phoenix-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sama...@apache.org
Subject [1/3] phoenix git commit: PHOENIX-914 Native HBase timestamp support to optimize date range queries in Phoenix
Date Sat, 03 Oct 2015 22:06:16 GMT
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 a0417a741 -> b4ec22fd1


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
index 7a7369b..341b07d 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
@@ -44,6 +44,7 @@ import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.INDEX_STATE;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.INDEX_TYPE;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IS_ARRAY;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IS_CONSTANT;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IS_ROW_TIMESTAMP;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IS_VIEW_REFERENCED;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.JAR_PATH;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.KEY_SEQ;
@@ -80,6 +81,7 @@ import static org.apache.phoenix.query.QueryConstants.BASE_TABLE_BASE_COLUMN_COU
 import static org.apache.phoenix.query.QueryServices.DROP_METADATA_ATTRIB;
 import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_DROP_METADATA;
 import static org.apache.phoenix.schema.PTable.ViewType.MAPPED;
+import static org.apache.phoenix.schema.PTableType.TABLE;
 import static org.apache.phoenix.schema.PTableType.VIEW;
 
 import java.io.IOException;
@@ -115,7 +117,6 @@ import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.compile.ColumnResolver;
@@ -147,6 +148,7 @@ import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.parse.AddColumnStatement;
 import org.apache.phoenix.parse.AlterIndexStatement;
 import org.apache.phoenix.parse.ColumnDef;
+import org.apache.phoenix.parse.ColumnDefInPkConstraint;
 import org.apache.phoenix.parse.ColumnName;
 import org.apache.phoenix.parse.CreateFunctionStatement;
 import org.apache.phoenix.parse.CreateIndexStatement;
@@ -179,6 +181,8 @@ import org.apache.phoenix.schema.stats.PTableStats;
 import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.types.PInteger;
 import org.apache.phoenix.schema.types.PLong;
+import org.apache.phoenix.schema.types.PTimestamp;
+import org.apache.phoenix.schema.types.PUnsignedLong;
 import org.apache.phoenix.schema.types.PVarbinary;
 import org.apache.phoenix.schema.types.PVarchar;
 import org.apache.phoenix.util.ByteUtil;
@@ -187,6 +191,7 @@ import org.apache.phoenix.util.LogUtil;
 import org.apache.phoenix.util.MetaDataUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.ReadOnlyProps;
+import org.apache.phoenix.util.ScanUtil;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.StringUtil;
 import org.apache.phoenix.util.UpgradeUtil;
@@ -296,8 +301,9 @@ public class MetaDataClient {
         IS_VIEW_REFERENCED + "," +
         PK_NAME + "," +  // write this both in the column and table rows for access by metadata APIs
         KEY_SEQ + "," +
-        COLUMN_DEF +
-        ") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
+        COLUMN_DEF + "," +
+        IS_ROW_TIMESTAMP + 
+        ") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
     private static final String UPDATE_COLUMN_POSITION =
         "UPSERT INTO " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_CATALOG_TABLE + "\" ( " +
         TENANT_ID + "," +
@@ -675,6 +681,7 @@ public class MetaDataClient {
         } else {
             colUpsert.setString(18, column.getExpressionStr());
         }
+        colUpsert.setBoolean(19, column.isRowTimestamp());
         colUpsert.execute();
     }
 
@@ -697,11 +704,13 @@ public class MetaDataClient {
             ColumnName columnDefName = def.getColumnDefName();
             SortOrder sortOrder = def.getSortOrder();
             boolean isPK = def.isPK();
+            boolean isRowTimestamp = def.isRowTimestamp();
             if (pkConstraint != null) {
-                Pair<ColumnName, SortOrder> pkSortOrder = pkConstraint.getColumn(columnDefName);
+                Pair<ColumnName, SortOrder> pkSortOrder = pkConstraint.getColumnWithSortOrder(columnDefName);
                 if (pkSortOrder != null) {
                     isPK = true;
                     sortOrder = pkSortOrder.getSecond();
+                    isRowTimestamp = pkConstraint.isColumnRowTimestamp(columnDefName);
                 }
             }
             String columnName = columnDefName.getColumnName();
@@ -740,7 +749,7 @@ public class MetaDataClient {
             }
 
             PColumn column = new PColumnImpl(PNameFactory.newName(columnName), familyName, def.getDataType(),
-                    def.getMaxLength(), def.getScale(), isNull, position, sortOrder, def.getArraySize(), null, false, def.getExpression());
+                    def.getMaxLength(), def.getScale(), isNull, position, sortOrder, def.getArraySize(), null, false, def.getExpression(), isRowTimestamp);
             return column;
         } catch (IllegalArgumentException e) { // Based on precondition check in constructor
             throw new SQLException(e);
@@ -929,95 +938,95 @@ public class MetaDataClient {
             // the wire for the index rows, as we don't need to do that. Instead, we tap into our
             // region observer to generate the index rows based on the data rows as we scan
             if (index.getIndexType() == IndexType.LOCAL) {
-                final PhoenixStatement statement = new PhoenixStatement(connection);
-                String tableName = getFullTableName(dataTableRef);
-                String query = "SELECT count(*) FROM " + tableName;
-                final QueryPlan plan = statement.compileQuery(query);
-                TableRef tableRef = plan.getTableRef();
-                // Set attribute on scan that UngroupedAggregateRegionObserver will switch on.
-                // We'll detect that this attribute was set the server-side and write the index
-                // rows per region as a result. The value of the attribute will be our persisted
-                // index maintainers.
-                // Define the LOCAL_INDEX_BUILD as a new static in BaseScannerRegionObserver
-                Scan scan = plan.getContext().getScan();
-                try {
-                    if(plan.getContext().getScanTimeRange()==null) {
-                        Long scn = connection.getSCN();
-                        if (scn == null) {
-                            scn = plan.getContext().getCurrentTime();
-                            // Add one to server time since max of time range is exclusive
-                            // and we need to account of OSs with lower resolution clocks.
-                            if (scn < HConstants.LATEST_TIMESTAMP) {
-                                scn++;
+                try (final PhoenixStatement statement = new PhoenixStatement(connection)) {
+                    String tableName = getFullTableName(dataTableRef);
+                    String query = "SELECT count(*) FROM " + tableName;
+                    final QueryPlan plan = statement.compileQuery(query);
+                    TableRef tableRef = plan.getTableRef();
+                    // Set attribute on scan that UngroupedAggregateRegionObserver will switch on.
+                    // We'll detect that this attribute was set the server-side and write the index
+                    // rows per region as a result. The value of the attribute will be our persisted
+                    // index maintainers.
+                    // Define the LOCAL_INDEX_BUILD as a new static in BaseScannerRegionObserver
+                    Scan scan = plan.getContext().getScan();
+                    try {
+                        if(ScanUtil.isDefaultTimeRange(scan.getTimeRange())) {
+                            Long scn = connection.getSCN();
+                            if (scn == null) {
+                                scn = plan.getContext().getCurrentTime();
                             }
+                            scan.setTimeRange(dataTableRef.getLowerBoundTimeStamp(),scn);
+                        }
+                    } catch (IOException e) {
+                        throw new SQLException(e);
+                    }
+                    ImmutableBytesWritable ptr = new ImmutableBytesWritable();
+                    PTable dataTable = tableRef.getTable();
+                    for(PTable idx: dataTable.getIndexes()) {
+                        if(idx.getName().equals(index.getName())) {
+                            index = idx;
+                            break;
                         }
-                        plan.getContext().setScanTimeRange(new TimeRange(dataTableRef.getLowerBoundTimeStamp(),scn));
                     }
-                } catch (IOException e) {
-                    throw new SQLException(e);
-                }
-                ImmutableBytesWritable ptr = new ImmutableBytesWritable();
-                PTable dataTable = tableRef.getTable();
-                for(PTable idx: dataTable.getIndexes()) {
-                    if(idx.getName().equals(index.getName())) {
-                        index = idx;
-                        break;
+                    List<PTable> indexes = Lists.newArrayListWithExpectedSize(1);
+                    // Only build newly created index.
+                    indexes.add(index);
+                    IndexMaintainer.serialize(dataTable, ptr, indexes, plan.getContext().getConnection());
+                    scan.setAttribute(BaseScannerRegionObserver.LOCAL_INDEX_BUILD, ByteUtil.copyKeyBytesIfNecessary(ptr));
+                    // By default, we'd use a FirstKeyOnly filter as nothing else needs to be projected for count(*).
+                    // However, in this case, we need to project all of the data columns that contribute to the index.
+                    IndexMaintainer indexMaintainer = index.getIndexMaintainer(dataTable, connection);
+                    for (ColumnReference columnRef : indexMaintainer.getAllColumns()) {
+                        scan.addColumn(columnRef.getFamily(), columnRef.getQualifier());
                     }
-                }
-                List<PTable> indexes = Lists.newArrayListWithExpectedSize(1);
-                // Only build newly created index.
-                indexes.add(index);
-                IndexMaintainer.serialize(dataTable, ptr, indexes, plan.getContext().getConnection());
-                scan.setAttribute(BaseScannerRegionObserver.LOCAL_INDEX_BUILD, ByteUtil.copyKeyBytesIfNecessary(ptr));
-                // By default, we'd use a FirstKeyOnly filter as nothing else needs to be projected for count(*).
-                // However, in this case, we need to project all of the data columns that contribute to the index.
-                IndexMaintainer indexMaintainer = index.getIndexMaintainer(dataTable, connection);
-                for (ColumnReference columnRef : indexMaintainer.getAllColumns()) {
-                    scan.addColumn(columnRef.getFamily(), columnRef.getQualifier());
-                }
 
-                // Go through MutationPlan abstraction so that we can create local indexes
-                // with a connectionless connection (which makes testing easier).
-                mutationPlan = new MutationPlan() {
+                    // Go through MutationPlan abstraction so that we can create local indexes
+                    // with a connectionless connection (which makes testing easier).
+                    mutationPlan = new MutationPlan() {
 
-                    @Override
-                    public StatementContext getContext() {
-                        return plan.getContext();
-                    }
+                        @Override
+                        public StatementContext getContext() {
+                            return plan.getContext();
+                        }
 
-                    @Override
-                    public ParameterMetaData getParameterMetaData() {
-                        return PhoenixParameterMetaData.EMPTY_PARAMETER_META_DATA;
-                    }
+                        @Override
+                        public ParameterMetaData getParameterMetaData() {
+                            return PhoenixParameterMetaData.EMPTY_PARAMETER_META_DATA;
+                        }
 
-                    @Override
-                    public ExplainPlan getExplainPlan() throws SQLException {
-                        return ExplainPlan.EMPTY_PLAN;
-                    }
+                        @Override
+                        public ExplainPlan getExplainPlan() throws SQLException {
+                            return ExplainPlan.EMPTY_PLAN;
+                        }
 
-                    @Override
-                    public PhoenixConnection getConnection() {
-                        return connection;
-                    }
+                        @Override
+                        public PhoenixConnection getConnection() {
+                            return connection;
+                        }
 
-                    @Override
-                    public MutationState execute() throws SQLException {
-                        Cell kv = plan.iterator().next().getValue(0);
-                        ImmutableBytesWritable tmpPtr = new ImmutableBytesWritable(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength());
-                        // A single Cell will be returned with the count(*) - we decode that here
-                        long rowCount = PLong.INSTANCE.getCodec().decodeLong(tmpPtr, SortOrder.getDefault());
-                        // The contract is to return a MutationState that contains the number of rows modified. In this
-                        // case, it's the number of rows in the data table which corresponds to the number of index
-                        // rows that were added.
-                        return new MutationState(0, connection, rowCount);
-                    }
+                        @Override
+                        public MutationState execute() throws SQLException {
+                            Cell kv = plan.iterator().next().getValue(0);
+                            ImmutableBytesWritable tmpPtr = new ImmutableBytesWritable(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength());
+                            // A single Cell will be returned with the count(*) - we decode that here
+                            long rowCount = PLong.INSTANCE.getCodec().decodeLong(tmpPtr, SortOrder.getDefault());
+                            // The contract is to return a MutationState that contains the number of rows modified. In this
+                            // case, it's the number of rows in the data table which corresponds to the number of index
+                            // rows that were added.
+                            return new MutationState(0, connection, rowCount);
+                        }
 
-                };
+                    };
+                }
             } else {
                 PostIndexDDLCompiler compiler = new PostIndexDDLCompiler(connection, dataTableRef);
                 mutationPlan = compiler.compile(index);
                 try {
-                    mutationPlan.getContext().setScanTimeRange(new TimeRange(dataTableRef.getLowerBoundTimeStamp(), Long.MAX_VALUE));
+                    Long scn = connection.getSCN();
+                    if (scn == null) {
+                        scn = mutationPlan.getContext().getCurrentTime();
+                    }
+                    mutationPlan.getContext().getScan().setTimeRange(dataTableRef.getLowerBoundTimeStamp(), scn);
                 } catch (IOException e) {
                     throw new SQLException(e);
                 }
@@ -1141,7 +1150,7 @@ public class MetaDataClient {
                     PColumn column = pkColumns.get(i);
 					unusedPkColumns.add(new RowKeyColumnExpression(column, new RowKeyValueAccessor(pkColumns, i), "\""+column.getName().getString()+"\""));
                 }
-                List<Pair<ColumnName, SortOrder>> allPkColumns = Lists.newArrayListWithExpectedSize(unusedPkColumns.size());
+                List<ColumnDefInPkConstraint> allPkColumns = Lists.newArrayListWithExpectedSize(unusedPkColumns.size());
                 List<ColumnDef> columnDefs = Lists.newArrayListWithExpectedSize(includedColumns.size() + indexParseNodeAndSortOrderList.size());
                 
                 if (dataTable.isMultiTenant()) {
@@ -1151,8 +1160,8 @@ public class MetaDataClient {
 					unusedPkColumns.remove(columnExpression);
                     PDataType dataType = IndexUtil.getIndexColumnDataType(col);
                     ColumnName colName = ColumnName.caseSensitiveColumnName(IndexUtil.getIndexColumnName(col));
-                    allPkColumns.add(new Pair<ColumnName, SortOrder>(colName, col.getSortOrder()));
-                    columnDefs.add(FACTORY.columnDef(colName, dataType.getSqlTypeName(), col.isNullable(), col.getMaxLength(), col.getScale(), false, SortOrder.getDefault(), col.getName().getString()));
+                    allPkColumns.add(new ColumnDefInPkConstraint(colName, col.getSortOrder(), false));
+                    columnDefs.add(FACTORY.columnDef(colName, dataType.getSqlTypeName(), col.isNullable(), col.getMaxLength(), col.getScale(), false, SortOrder.getDefault(), col.getName().getString(), col.isRowTimestamp()));
                 }
                 /*
                  * Allocate an index ID in two circumstances:
@@ -1164,8 +1173,8 @@ public class MetaDataClient {
                     // Next add index ID column
                     PDataType dataType = MetaDataUtil.getViewIndexIdDataType();
                     ColumnName colName = ColumnName.caseSensitiveColumnName(MetaDataUtil.getViewIndexIdColumnName());
-                    allPkColumns.add(new Pair<ColumnName, SortOrder>(colName, SortOrder.getDefault()));
-                    columnDefs.add(FACTORY.columnDef(colName, dataType.getSqlTypeName(), false, null, null, false, SortOrder.getDefault(), null));
+                    allPkColumns.add(new ColumnDefInPkConstraint(colName, SortOrder.getDefault(), false));
+                    columnDefs.add(FACTORY.columnDef(colName, dataType.getSqlTypeName(), false, null, null, false, SortOrder.getDefault(), null, false));
                 }
                 
                 PhoenixStatement phoenixStatment = new PhoenixStatement(connection);
@@ -1199,11 +1208,13 @@ public class MetaDataClient {
                     
                     ColumnName colName = null;
                     ColumnRef colRef = expressionIndexCompiler.getColumnRef();
-					if (colRef!=null) { 
+					boolean isRowTimestamp = false;
+                    if (colRef!=null) { 
 						// if this is a regular column
 					    PColumn column = colRef.getColumn();
 					    String columnFamilyName = column.getFamilyName()!=null ? column.getFamilyName().getString() : null;
 					    colName = ColumnName.caseSensitiveColumnName(IndexUtil.getIndexColumnName(columnFamilyName, column.getName().getString()));
+					    isRowTimestamp = column.isRowTimestamp();
 					}
 					else { 
 						// if this is an expression
@@ -1213,8 +1224,8 @@ public class MetaDataClient {
 					}
 					indexedColumnNames.add(colName);
                 	PDataType dataType = IndexUtil.getIndexColumnDataType(expression.isNullable(), expression.getDataType());
-                    allPkColumns.add(new Pair<ColumnName, SortOrder>(colName, pair.getSecond()));
-                    columnDefs.add(FACTORY.columnDef(colName, dataType.getSqlTypeName(), expression.isNullable(), expression.getMaxLength(), expression.getScale(), false, pair.getSecond(), expressionStr));
+                    allPkColumns.add(new ColumnDefInPkConstraint(colName, pair.getSecond(), isRowTimestamp));
+                    columnDefs.add(FACTORY.columnDef(colName, dataType.getSqlTypeName(), expression.isNullable(), expression.getMaxLength(), expression.getScale(), false, pair.getSecond(), expressionStr, isRowTimestamp));
                 }
 
                 // Next all the PK columns from the data table that aren't indexed
@@ -1225,11 +1236,11 @@ public class MetaDataClient {
                         // we don't need these in the index
                         if (col.getViewConstant() == null) {
                             ColumnName colName = ColumnName.caseSensitiveColumnName(IndexUtil.getIndexColumnName(col));
-                            allPkColumns.add(new Pair<ColumnName, SortOrder>(colName, colExpression.getSortOrder()));
+                            allPkColumns.add(new ColumnDefInPkConstraint(colName, colExpression.getSortOrder(), col.isRowTimestamp()));
                             PDataType dataType = IndexUtil.getIndexColumnDataType(colExpression.isNullable(), colExpression.getDataType());
                             columnDefs.add(FACTORY.columnDef(colName, dataType.getSqlTypeName(),
                                     colExpression.isNullable(), colExpression.getMaxLength(), colExpression.getScale(),
-                                    false, colExpression.getSortOrder(), colExpression.toString()));
+                                    false, colExpression.getSortOrder(), colExpression.toString(), col.isRowTimestamp()));
                         }
                     }
                 }
@@ -1245,7 +1256,7 @@ public class MetaDataClient {
                     if (!SchemaUtil.isPKColumn(col) && col.getViewConstant() == null) {
                         // Need to re-create ColumnName, since the above one won't have the column family name
                         colName = ColumnName.caseSensitiveColumnName(col.getFamilyName().getString(), IndexUtil.getIndexColumnName(col));
-                        columnDefs.add(FACTORY.columnDef(colName, col.getDataType().getSqlTypeName(), col.isNullable(), col.getMaxLength(), col.getScale(), false, col.getSortOrder(), null));
+                        columnDefs.add(FACTORY.columnDef(colName, col.getDataType().getSqlTypeName(), col.isNullable(), col.getMaxLength(), col.getScale(), false, col.getSortOrder(), null, col.isRowTimestamp()));
                     }
                 }
 
@@ -1424,7 +1435,47 @@ public class MetaDataClient {
         }
         return null;
     }
+    
+    private static boolean checkAndValidateRowTimestampCol(ColumnDef colDef, PrimaryKeyConstraint pkConstraint,
+            boolean rowTimeStampColAlreadyFound, PTableType tableType) throws SQLException {
 
+        ColumnName columnDefName = colDef.getColumnDefName();
+        if (tableType == VIEW && (pkConstraint.getNumColumnsWithRowTimestamp() > 0 || colDef.isRowTimestamp())) {
+            throw new SQLExceptionInfo.Builder(SQLExceptionCode.ROWTIMESTAMP_NOT_ALLOWED_ON_VIEW)
+            .setColumnName(columnDefName.getColumnName()).build().buildException();
+        }
+        /*
+         * For indexes we have already validated that the data table has the right kind and number of row_timestamp
+         * columns. So we don't need to perform any extra validations for them.
+         */
+        if (tableType == TABLE) {
+            boolean isColumnDeclaredRowTimestamp = colDef.isRowTimestamp() || pkConstraint.isColumnRowTimestamp(columnDefName);
+            if (isColumnDeclaredRowTimestamp) {
+                boolean isColumnPartOfPk = colDef.isPK() || pkConstraint.contains(columnDefName);
+                // A column can be declared as ROW_TIMESTAMP only if it is part of the primary key
+                if (isColumnDeclaredRowTimestamp && !isColumnPartOfPk) { 
+                    throw new SQLExceptionInfo.Builder(SQLExceptionCode.ROWTIMESTAMP_PK_COL_ONLY)
+                    .setColumnName(columnDefName.getColumnName()).build().buildException(); 
+                }
+
+                // A column can be declared as ROW_TIMESTAMP only if it can be represented as a long
+                PDataType dataType = colDef.getDataType();
+                if (isColumnDeclaredRowTimestamp && (dataType != PLong.INSTANCE && dataType != PUnsignedLong.INSTANCE && !dataType.isCoercibleTo(PTimestamp.INSTANCE))) { 
+                    throw new SQLExceptionInfo.Builder(SQLExceptionCode.ROWTIMESTAMP_COL_INVALID_TYPE)
+                    .setColumnName(columnDefName.getColumnName()).build().buildException(); 
+                }
+
+                // Only one column can be declared as a ROW_TIMESTAMP column
+                if (rowTimeStampColAlreadyFound && isColumnDeclaredRowTimestamp) {
+                    throw new SQLExceptionInfo.Builder(SQLExceptionCode.ROWTIMESTAMP_ONE_PK_COL_ONLY)
+                    .setColumnName(columnDefName.getColumnName()).build().buildException();
+                }
+                return true;
+            }
+        }
+        return false;
+    }
+    
     private PTable createTableInternal(CreateTableStatement statement, byte[][] splits, final PTable parent, String viewStatement, ViewType viewType, final byte[][] viewColumnConstants, final BitSet isViewColumnReferenced, Short indexId, IndexType indexType) throws SQLException {
         final PTableType tableType = statement.getTableType();
         boolean wasAutoCommit = connection.getAutoCommit();
@@ -1665,7 +1716,7 @@ public class MetaDataClient {
             PreparedStatement colUpsert = connection.prepareStatement(INSERT_COLUMN);
             Map<String, PName> familyNames = Maps.newLinkedHashMap();
             boolean isPK = false;
-
+            boolean rowTimeStampColumnAlreadyFound = false;
             int positionOffset = columns.size();
             if (saltBucketNum != null) {
                 positionOffset++;
@@ -1675,9 +1726,10 @@ public class MetaDataClient {
             }
             int pkPositionOffset = pkColumns.size();
             int position = positionOffset;
-
+            
             for (ColumnDef colDef : colDefs) {
-                if (colDef.isPK()) {
+                rowTimeStampColumnAlreadyFound = checkAndValidateRowTimestampCol(colDef, pkConstraint, rowTimeStampColumnAlreadyFound, tableType);
+                if (colDef.isPK()) { // i.e. the column is declared as CREATE TABLE COLNAME DATATYPE PRIMARY KEY...
                     if (isPK) {
                         throw new SQLExceptionInfo.Builder(SQLExceptionCode.PRIMARY_KEY_ALREADY_EXISTS)
                             .setColumnName(colDef.getColumnDefName().getColumnName()).build().buildException();
@@ -1693,7 +1745,6 @@ public class MetaDataClient {
                                 .setColumnName(colDef.getColumnDefName().getColumnName()).build().buildException();
                     }
                 }
-
                 PColumn column = newColumn(position++, colDef, pkConstraint, defaultFamilyName, false);
                 if (SchemaUtil.isPKColumn(column)) {
                     // TODO: remove this constraint?
@@ -2454,6 +2505,10 @@ public class MetaDataClient {
                         if (colDef != null && colDef.isPK() && table.getType() == VIEW && table.getViewType() != MAPPED) {
                             throwIfLastPKOfParentIsFixedLength(getParentOfView(table), schemaName, tableName, colDef);
                         }
+                        if (colDef != null && colDef.isRowTimestamp()) {
+                            throw new SQLExceptionInfo.Builder(SQLExceptionCode.ROWTIMESTAMP_CREATE_ONLY)
+                            .setColumnName(colDef.getColumnDefName().getColumnName()).build().buildException();
+                        }
                         PColumn column = newColumn(position++, colDef, PrimaryKeyConstraint.EMPTY, table.getDefaultFamilyName() == null ? null : table.getDefaultFamilyName().getString(), true);
                         columns.add(column);
                         String pkName = null;
@@ -2491,7 +2546,7 @@ public class MetaDataClient {
                                     PDataType indexColDataType = IndexUtil.getIndexColumnDataType(colDef.isNull(), colDef.getDataType());
                                     ColumnName indexColName = ColumnName.caseSensitiveColumnName(IndexUtil.getIndexColumnName(null, colDef.getColumnDefName().getColumnName()));
                                     Expression expression = new RowKeyColumnExpression(columns.get(i), new RowKeyValueAccessor(pkColumns, ++pkSlotPosition));
-                                    ColumnDef indexColDef = FACTORY.columnDef(indexColName, indexColDataType.getSqlTypeName(), colDef.isNull(), colDef.getMaxLength(), colDef.getScale(), true, colDef.getSortOrder(), expression.toString());
+                                    ColumnDef indexColDef = FACTORY.columnDef(indexColName, indexColDataType.getSqlTypeName(), colDef.isNull(), colDef.getMaxLength(), colDef.getScale(), true, colDef.getSortOrder(), expression.toString(), colDef.isRowTimestamp());
                                     PColumn indexColumn = newColumn(indexPosition++, indexColDef, PrimaryKeyConstraint.EMPTY, null, true);
                                     addColumnMutation(schemaName, index.getTableName().getString(), indexColumn, colUpsert, index.getParentTableName().getString(), index.getPKName() == null ? null : index.getPKName().getString(), ++nextIndexKeySeq, index.getBucketNum() != null);
                                 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumn.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumn.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumn.java
index fbc737c..357ce6f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumn.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumn.java
@@ -53,4 +53,9 @@ public interface PColumn extends PDatum {
     int getEstimatedSize();
     
     String getExpressionStr();
+    
+    /**
+     * @return whether this column represents/stores the hbase cell timestamp.
+     */
+    boolean isRowTimestamp();
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java
index 4efb145..cff276b 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java
@@ -38,6 +38,7 @@ public class PColumnImpl implements PColumn {
     private byte[] viewConstant;
     private boolean isViewReferenced;
     private String expressionStr;
+    private boolean isRowTimestamp;
     
     public PColumnImpl() {
     }
@@ -49,13 +50,13 @@ public class PColumnImpl implements PColumn {
                        Integer scale,
                        boolean nullable,
                        int position,
-                       SortOrder sortOrder, Integer arrSize, byte[] viewConstant, boolean isViewReferenced, String expressionStr) {
-        init(name, familyName, dataType, maxLength, scale, nullable, position, sortOrder, arrSize, viewConstant, isViewReferenced, expressionStr);
+                       SortOrder sortOrder, Integer arrSize, byte[] viewConstant, boolean isViewReferenced, String expressionStr, boolean isRowTimestamp) {
+        init(name, familyName, dataType, maxLength, scale, nullable, position, sortOrder, arrSize, viewConstant, isViewReferenced, expressionStr, isRowTimestamp);
     }
 
     public PColumnImpl(PColumn column, int position) {
         this(column.getName(), column.getFamilyName(), column.getDataType(), column.getMaxLength(),
-                column.getScale(), column.isNullable(), position, column.getSortOrder(), column.getArraySize(), column.getViewConstant(), column.isViewReferenced(), column.getExpressionStr());
+                column.getScale(), column.isNullable(), position, column.getSortOrder(), column.getArraySize(), column.getViewConstant(), column.isViewReferenced(), column.getExpressionStr(), column.isRowTimestamp());
     }
 
     private void init(PName name,
@@ -67,7 +68,7 @@ public class PColumnImpl implements PColumn {
             int position,
             SortOrder sortOrder,
             Integer arrSize,
-            byte[] viewConstant, boolean isViewReferenced, String expressionStr) {
+            byte[] viewConstant, boolean isViewReferenced, String expressionStr, boolean isRowTimestamp) {
     	Preconditions.checkNotNull(sortOrder);
         this.dataType = dataType;
         if (familyName == null) {
@@ -90,6 +91,7 @@ public class PColumnImpl implements PColumn {
         this.viewConstant = viewConstant;
         this.isViewReferenced = isViewReferenced;
         this.expressionStr = expressionStr;
+        this.isRowTimestamp = isRowTimestamp;
     }
 
     @Override
@@ -191,6 +193,11 @@ public class PColumnImpl implements PColumn {
     public boolean isViewReferenced() {
         return isViewReferenced;
     }
+    
+    @Override
+    public boolean isRowTimestamp() {
+        return isRowTimestamp;
+    }
 
     /**
      * Create a PColumn instance from PBed PColumn instance
@@ -232,8 +239,9 @@ public class PColumnImpl implements PColumn {
         if (column.hasExpression()) {
 	        expressionStr = column.getExpression();
         }
+        boolean isRowTimestamp = column.getIsRowTimestamp();
         return new PColumnImpl(columnName, familyName, dataType, maxLength, scale, nullable, position, sortOrder,
-                arraySize, viewConstant, isViewReferenced, expressionStr);
+                arraySize, viewConstant, isViewReferenced, expressionStr, isRowTimestamp);
     }
 
     public static PTableProtos.PColumn toProto(PColumn column) {
@@ -263,6 +271,7 @@ public class PColumnImpl implements PColumn {
         if (column.getExpressionStr() != null) {
             builder.setExpression(column.getExpressionStr());
         }
+        builder.setIsRowTimestamp(column.isRowTimestamp());
         return builder.build();
     }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-core/src/main/java/org/apache/phoenix/schema/PDatum.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/PDatum.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/PDatum.java
index 915473a..d6082a7 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PDatum.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PDatum.java
@@ -45,4 +45,5 @@ public interface PDatum {
      * @return The SortOrder for this column, never null
      */
     SortOrder getSortOrder();
+    
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-core/src/main/java/org/apache/phoenix/schema/PMetaDataImpl.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/PMetaDataImpl.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/PMetaDataImpl.java
index c4e3674..688de9a 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PMetaDataImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PMetaDataImpl.java
@@ -421,7 +421,7 @@ public class PMetaDataImpl implements PMetaData {
             // Update position of columns that follow removed column
             for (int i = position+1; i < oldColumns.size(); i++) {
                 PColumn oldColumn = oldColumns.get(i);
-                PColumn newColumn = new PColumnImpl(oldColumn.getName(), oldColumn.getFamilyName(), oldColumn.getDataType(), oldColumn.getMaxLength(), oldColumn.getScale(), oldColumn.isNullable(), i-1+positionOffset, oldColumn.getSortOrder(), oldColumn.getArraySize(), oldColumn.getViewConstant(), oldColumn.isViewReferenced(), null);
+                PColumn newColumn = new PColumnImpl(oldColumn.getName(), oldColumn.getFamilyName(), oldColumn.getDataType(), oldColumn.getMaxLength(), oldColumn.getScale(), oldColumn.isNullable(), i-1+positionOffset, oldColumn.getSortOrder(), oldColumn.getArraySize(), oldColumn.getViewConstant(), oldColumn.isViewReferenced(), null, oldColumn.isRowTimestamp());
                 columns.add(newColumn);
             }
             

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-core/src/main/java/org/apache/phoenix/schema/PTable.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTable.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTable.java
index 8da2206..a2979d4 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTable.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTable.java
@@ -332,4 +332,10 @@ public interface PTable extends PMetaDataEntity {
      * @return true if optimizations row key order optimizations are possible
      */
     boolean rowKeyOrderOptimizable();
+    
+    /**
+     * @return Position of the column with {@link PColumn#isRowTimestamp()} as true. 
+     * -1 if there is no such column.
+     */
+    int getRowTimestampColPos();
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
index 8f9bf63..b591281 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
@@ -131,6 +131,7 @@ public class PTableImpl implements PTable {
     private int baseColumnCount;
     private boolean rowKeyOrderOptimizable; // TODO: remove when required that tables have been upgrade for PHOENIX-2067
     private boolean hasColumnsRequiringUpgrade; // TODO: remove when required that tables have been upgrade for PHOENIX-2067
+    private int rowTimestampColPos;
 
     public PTableImpl() {
         this.indexes = Collections.emptyList();
@@ -406,6 +407,7 @@ public class PTableImpl implements PTable {
         int maxExpectedSize = allColumns.length - numPKColumns;
         // Maintain iteration order so that column families are ordered as they are listed
         Map<PName, List<PColumn>> familyMap = Maps.newLinkedHashMap();
+        PColumn rowTimestampCol = null;
         for (PColumn column : allColumns) {
             PName familyName = column.getFamilyName();
             if (familyName == null) {
@@ -418,6 +420,9 @@ public class PTableImpl implements PTable {
                                 || column.getDataType() == PBinary.INSTANCE) )
                         || (column.getSortOrder() == SortOrder.ASC && column.getDataType() == PBinary.INSTANCE && column.getMaxLength() != null && column.getMaxLength() > 1);
             	pkColumns.add(column);
+            	if (column.isRowTimestamp()) {
+            	    rowTimestampCol = column;
+            	}
             }
             if (familyName == null) {
                 estimatedSize += column.getEstimatedSize(); // PK columns
@@ -432,6 +437,12 @@ public class PTableImpl implements PTable {
             }
         }
         this.pkColumns = ImmutableList.copyOf(pkColumns);
+        if (rowTimestampCol != null) {
+            this.rowTimestampColPos = this.pkColumns.indexOf(rowTimestampCol);
+        } else {
+            this.rowTimestampColPos = -1;
+        }
+        
         builder.rowKeyOrderOptimizable(this.rowKeyOrderOptimizable()); // after hasDescVarLengthColumns is calculated
         this.rowKeySchema = builder.build();
         estimatedSize += rowKeySchema.getEstimatedSize();
@@ -1152,4 +1163,9 @@ public class PTableImpl implements PTable {
     public boolean rowKeyOrderOptimizable() {
         return rowKeyOrderOptimizable || !hasColumnsRequiringUpgrade;
     }
+
+    @Override
+    public int getRowTimestampColPos() {
+        return rowTimestampColPos;
+    }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-core/src/main/java/org/apache/phoenix/schema/SaltingUtil.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/SaltingUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/SaltingUtil.java
index 964ac39..4ac54cb 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/SaltingUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/SaltingUtil.java
@@ -38,7 +38,7 @@ public class SaltingUtil {
     public static final String SALTING_COLUMN_NAME = "_SALT";
     public static final String SALTED_ROW_KEY_NAME = "_SALTED_KEY";
     public static final PColumnImpl SALTING_COLUMN = new PColumnImpl(
-            PNameFactory.newName(SALTING_COLUMN_NAME), null, PBinary.INSTANCE, 1, 0, false, 0, SortOrder.getDefault(), 0, null, false, null);
+            PNameFactory.newName(SALTING_COLUMN_NAME), null, PBinary.INSTANCE, 1, 0, false, 0, SortOrder.getDefault(), 0, null, false, null, false);
     public static final RowKeySchema VAR_BINARY_SALTED_SCHEMA = new RowKeySchemaBuilder(2)
         .addField(SALTING_COLUMN, false, SortOrder.getDefault())
         .addField(SchemaUtil.VAR_BINARY_DATUM, false, SortOrder.getDefault()).build();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-core/src/main/java/org/apache/phoenix/util/ScanUtil.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/ScanUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/util/ScanUtil.java
index 7b76a2b..641398f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/ScanUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/ScanUtil.java
@@ -742,4 +742,38 @@ public class ScanUtil {
     public static boolean shouldRowsBeInRowKeyOrder(OrderBy orderBy, StatementContext context) {
         return forceRowKeyOrder(context) || orderBy == FWD_ROW_KEY_ORDER_BY || orderBy == REV_ROW_KEY_ORDER_BY;
     }
+    
+    public static TimeRange intersectTimeRange(TimeRange rowTimestampColRange, TimeRange scanTimeRange, Long scn) throws IOException, SQLException {
+        long scnToUse = scn == null ? HConstants.LATEST_TIMESTAMP : scn;
+        long lowerRangeToBe = 0;
+        long upperRangeToBe = scnToUse;
+        if (rowTimestampColRange != null) {
+            long minRowTimestamp = rowTimestampColRange.getMin();
+            long maxRowTimestamp = rowTimestampColRange.getMax();
+            if ((lowerRangeToBe > maxRowTimestamp) || (upperRangeToBe < minRowTimestamp)) {
+                return null; // degenerate
+            } else {
+                // there is an overlap of ranges
+                lowerRangeToBe = Math.max(lowerRangeToBe, minRowTimestamp);
+                upperRangeToBe = Math.min(upperRangeToBe, maxRowTimestamp);
+            }
+        }
+        if (scanTimeRange != null) {
+            long minScanTimeRange = scanTimeRange.getMin();
+            long maxScanTimeRange = scanTimeRange.getMax();
+            if ((lowerRangeToBe > maxScanTimeRange) || (upperRangeToBe < lowerRangeToBe)) {
+                return null; // degenerate
+            } else {
+                // there is an overlap of ranges
+                lowerRangeToBe = Math.max(lowerRangeToBe, minScanTimeRange);
+                upperRangeToBe = Math.min(upperRangeToBe, maxScanTimeRange);
+            }
+        }
+        return new TimeRange(lowerRangeToBe, upperRangeToBe);
+    }
+    
+    public static boolean isDefaultTimeRange(TimeRange range) {
+        return range.getMin() == 0 && range.getMax() == Long.MAX_VALUE;
+    }
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
index 076b49e..5ff63dc 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/compile/QueryCompilerTest.java
@@ -2068,4 +2068,47 @@ public class QueryCompilerTest extends BaseConnectionlessQueryTest {
             conn.close();
         }
     }
+    
+    @Test
+    public void testAddingRowTimestampColumn() throws Exception {
+        Connection conn = DriverManager.getConnection(getUrl());
+        // Column of type VARCHAR cannot be declared as ROW_TIMESTAMP
+        try {
+            conn.createStatement().execute("CREATE TABLE T1 (PK1 VARCHAR NOT NULL, PK2 VARCHAR NOT NULL, KV1 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 ROW_TIMESTAMP)) ");
+            fail("Varchar column cannot be added as row_timestamp");
+        } catch(SQLException e) {
+            assertEquals(SQLExceptionCode.ROWTIMESTAMP_COL_INVALID_TYPE.getErrorCode(), e.getErrorCode());
+        }
+        // Column of type INTEGER cannot be declared as ROW_TIMESTAMP
+        try {
+            conn.createStatement().execute("CREATE TABLE T1 (PK1 VARCHAR NOT NULL, PK2 INTEGER NOT NULL, KV1 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 ROW_TIMESTAMP)) ");
+            fail("Integer column cannot be added as row_timestamp");
+        } catch(SQLException e) {
+            assertEquals(SQLExceptionCode.ROWTIMESTAMP_COL_INVALID_TYPE.getErrorCode(), e.getErrorCode());
+        }
+        // Column of type DOUBLE cannot be declared as ROW_TIMESTAMP
+        try {
+            conn.createStatement().execute("CREATE TABLE T1 (PK1 VARCHAR NOT NULL, PK2 DOUBLE NOT NULL, KV1 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1, PK2 ROW_TIMESTAMP)) ");
+            fail("Double column cannot be added as row_timestamp");
+        } catch(SQLException e) {
+            assertEquals(SQLExceptionCode.ROWTIMESTAMP_COL_INVALID_TYPE.getErrorCode(), e.getErrorCode());
+        }
+        // Invalid - two columns declared as row_timestamp in pk constraint
+        try {
+            conn.createStatement().execute("CREATE TABLE T2 (PK1 DATE NOT NULL, PK2 DATE NOT NULL, KV1 VARCHAR CONSTRAINT PK PRIMARY KEY(PK1 ROW_TIMESTAMP , PK2 ROW_TIMESTAMP)) ");
+            fail("Creating table with two row_timestamp columns should fail");
+        } catch (SQLException e) {
+            assertEquals(SQLExceptionCode.ROWTIMESTAMP_ONE_PK_COL_ONLY.getErrorCode(), e.getErrorCode());
+        }
+        
+        // Invalid because only (unsigned)date, time, long, (unsigned)timestamp are valid data types for column to be declared as row_timestamp
+        try {
+            conn.createStatement().execute("CREATE TABLE T5 (PK1 VARCHAR PRIMARY KEY ROW_TIMESTAMP, PK2 VARCHAR, KV1 VARCHAR)");
+            fail("Creating table with a key value column as row_timestamp should fail");
+        } catch (SQLException e) {
+            assertEquals(SQLExceptionCode.ROWTIMESTAMP_COL_INVALID_TYPE.getErrorCode(), e.getErrorCode());
+        }
+        
+        
+    }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-core/src/test/java/org/apache/phoenix/compile/WhereOptimizerTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/compile/WhereOptimizerTest.java b/phoenix-core/src/test/java/org/apache/phoenix/compile/WhereOptimizerTest.java
index 9344323..1b45dc4 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/compile/WhereOptimizerTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/compile/WhereOptimizerTest.java
@@ -36,6 +36,8 @@ import java.math.BigDecimal;
 import java.sql.Connection;
 import java.sql.Date;
 import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.Arrays;
 import java.util.Collections;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-core/src/test/java/org/apache/phoenix/execute/CorrelatePlanTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/execute/CorrelatePlanTest.java b/phoenix-core/src/test/java/org/apache/phoenix/execute/CorrelatePlanTest.java
index 7ae3757..334ce8c 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/execute/CorrelatePlanTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/execute/CorrelatePlanTest.java
@@ -226,7 +226,7 @@ public class CorrelatePlanTest {
             Expression expr = LiteralExpression.newConstant(row[i]);
             columns.add(new PColumnImpl(PNameFactory.newName(name), PNameFactory.newName(TupleProjector.VALUE_COLUMN_FAMILY),
                     expr.getDataType(), expr.getMaxLength(), expr.getScale(), expr.isNullable(),
-                    i, expr.getSortOrder(), null, null, false, name));
+                    i, expr.getSortOrder(), null, null, false, name, false));
         }
         try {
             PTable pTable = PTableImpl.makePTable(null, PName.EMPTY_NAME, PName.EMPTY_NAME,

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-core/src/test/java/org/apache/phoenix/execute/UnnestArrayPlanTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/execute/UnnestArrayPlanTest.java b/phoenix-core/src/test/java/org/apache/phoenix/execute/UnnestArrayPlanTest.java
index 896f920..d508707 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/execute/UnnestArrayPlanTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/execute/UnnestArrayPlanTest.java
@@ -116,8 +116,8 @@ public class UnnestArrayPlanTest {
         LiteralExpression dummy = LiteralExpression.newConstant(null, arrayType);
         RowKeyValueAccessor accessor = new RowKeyValueAccessor(Arrays.asList(dummy), 0);
         UnnestArrayPlan plan = new UnnestArrayPlan(subPlan, new RowKeyColumnExpression(dummy, accessor), withOrdinality);
-        PColumn elemColumn = new PColumnImpl(PNameFactory.newName("ELEM"), PNameFactory.newName(TupleProjector.VALUE_COLUMN_FAMILY), baseType, null, null, true, 0, SortOrder.getDefault(), null, null, false, "");
-        PColumn indexColumn = withOrdinality ? new PColumnImpl(PNameFactory.newName("IDX"), PNameFactory.newName(TupleProjector.VALUE_COLUMN_FAMILY), PInteger.INSTANCE, null, null, true, 0, SortOrder.getDefault(), null, null, false, "") : null;
+        PColumn elemColumn = new PColumnImpl(PNameFactory.newName("ELEM"), PNameFactory.newName(TupleProjector.VALUE_COLUMN_FAMILY), baseType, null, null, true, 0, SortOrder.getDefault(), null, null, false, "", false);
+        PColumn indexColumn = withOrdinality ? new PColumnImpl(PNameFactory.newName("IDX"), PNameFactory.newName(TupleProjector.VALUE_COLUMN_FAMILY), PInteger.INSTANCE, null, null, true, 0, SortOrder.getDefault(), null, null, false, "", false) : null;
         List<PColumn> columns = withOrdinality ? Arrays.asList(elemColumn, indexColumn) : Arrays.asList(elemColumn);
         ProjectedColumnExpression elemExpr = new ProjectedColumnExpression(elemColumn, columns, 0, elemColumn.getName().getString());
         ProjectedColumnExpression indexExpr = withOrdinality ? new ProjectedColumnExpression(indexColumn, columns, 1, indexColumn.getName().getString()) : null;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-core/src/test/java/org/apache/phoenix/expression/ColumnExpressionTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/expression/ColumnExpressionTest.java b/phoenix-core/src/test/java/org/apache/phoenix/expression/ColumnExpressionTest.java
index 06d21c2..7a299a9 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/expression/ColumnExpressionTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/expression/ColumnExpressionTest.java
@@ -41,7 +41,7 @@ public class ColumnExpressionTest {
         int maxLen = 30;
         int scale = 5;
         PColumn column = new PColumnImpl(PNameFactory.newName("c1"), PNameFactory.newName("f1"), PDecimal.INSTANCE, maxLen, scale,
-                true, 20, SortOrder.getDefault(), 0, null, false, null);
+                true, 20, SortOrder.getDefault(), 0, null, false, null, false);
         ColumnExpression colExp = new KeyValueColumnExpression(column);
         ByteArrayOutputStream baos = new ByteArrayOutputStream();
         DataOutputStream dOut = new DataOutputStream(baos);
@@ -61,7 +61,7 @@ public class ColumnExpressionTest {
     public void testSerializationWithNullScale() throws Exception {
         int maxLen = 30;
         PColumn column = new PColumnImpl(PNameFactory.newName("c1"), PNameFactory.newName("f1"), PBinary.INSTANCE, maxLen, null,
-                true, 20, SortOrder.getDefault(), 0, null, false, null);
+                true, 20, SortOrder.getDefault(), 0, null, false, null, false);
         ColumnExpression colExp = new KeyValueColumnExpression(column);
         ByteArrayOutputStream baos = new ByteArrayOutputStream();
         DataOutputStream dOut = new DataOutputStream(baos);
@@ -81,7 +81,7 @@ public class ColumnExpressionTest {
     public void testSerializationWithNullMaxLength() throws Exception {
         int scale = 5;
         PColumn column = new PColumnImpl(PNameFactory.newName("c1"), PNameFactory.newName("f1"), PVarchar.INSTANCE, null, scale,
-                true, 20, SortOrder.getDefault(), 0, null, false, null);
+                true, 20, SortOrder.getDefault(), 0, null, false, null, false);
         ColumnExpression colExp = new KeyValueColumnExpression(column);
         ByteArrayOutputStream baos = new ByteArrayOutputStream();
         DataOutputStream dOut = new DataOutputStream(baos);
@@ -100,7 +100,7 @@ public class ColumnExpressionTest {
     @Test
     public void testSerializationWithNullScaleAndMaxLength() throws Exception {
         PColumn column = new PColumnImpl(PNameFactory.newName("c1"), PNameFactory.newName("f1"), PDecimal.INSTANCE, null, null, true,
-                20, SortOrder.getDefault(), 0, null, false, null);
+                20, SortOrder.getDefault(), 0, null, false, null, false);
         ColumnExpression colExp = new KeyValueColumnExpression(column);
         ByteArrayOutputStream baos = new ByteArrayOutputStream();
         DataOutputStream dOut = new DataOutputStream(baos);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-core/src/test/java/org/apache/phoenix/iterate/AggregateResultScannerTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/iterate/AggregateResultScannerTest.java b/phoenix-core/src/test/java/org/apache/phoenix/iterate/AggregateResultScannerTest.java
index 5624b51..b6371f1 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/iterate/AggregateResultScannerTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/iterate/AggregateResultScannerTest.java
@@ -129,6 +129,10 @@ public class AggregateResultScannerTest extends BaseConnectionlessQueryTest {
             public String getExpressionStr() {
                 return null;
             }
+            @Override
+            public boolean isRowTimestamp() {
+                return false;
+            }
         })), null);
         aggregationManager.setAggregators(new ClientAggregators(Collections.<SingleAggregateFunction>singletonList(func), 1));
         ResultIterators iterators = new ResultIterators() {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixDriverTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixDriverTest.java b/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixDriverTest.java
index be40702..61dc442 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixDriverTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/jdbc/PhoenixDriverTest.java
@@ -24,11 +24,15 @@ import java.sql.Connection;
 import java.sql.Driver;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
+import java.sql.SQLException;
 import java.util.Properties;
 
+import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.query.BaseConnectionlessQueryTest;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.TestUtil;
 import org.junit.Test;
 
 public class PhoenixDriverTest extends BaseConnectionlessQueryTest {
@@ -71,4 +75,16 @@ public class PhoenixDriverTest extends BaseConnectionlessQueryTest {
             fail("Upsert should have failed since the number of upserts (200) is greater than the MAX_MUTATION_SIZE_ATTRIB (100)");
         } catch (IllegalArgumentException expected) {}
     }
+    
+    @Test
+    public void testDisallowNegativeScn() {
+        Properties props = PropertiesUtil.deepCopy(TestUtil.TEST_PROPERTIES);
+        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, String.valueOf(-100));
+        try {
+            DriverManager.getConnection(getUrl(), props);
+            fail("Creating a phoenix connection with negative scn is not allowed");
+        } catch(SQLException e) {
+            assertEquals(SQLExceptionCode.INVALID_SCN.getErrorCode(), e.getErrorCode());
+        }
+    }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-core/src/test/java/org/apache/phoenix/parse/QueryParserTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/parse/QueryParserTest.java b/phoenix-core/src/test/java/org/apache/phoenix/parse/QueryParserTest.java
index d651246..63c9e42 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/parse/QueryParserTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/parse/QueryParserTest.java
@@ -408,7 +408,7 @@ public class QueryParserTest {
     		List<Pair<ColumnName,SortOrder>> columns = pkConstraint.getColumnNames();
     		assertEquals(2, columns.size());
     		for (Pair<ColumnName,SortOrder> pair : columns) {
-    			assertEquals(SortOrder.fromDDLValue(order), pkConstraint.getColumn(pair.getFirst()).getSecond());
+    			assertEquals(SortOrder.fromDDLValue(order), pkConstraint.getColumnWithSortOrder(pair.getFirst()).getSecond());
     		}    		
     	}
     }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b4ec22fd/phoenix-protocol/src/main/PTable.proto
----------------------------------------------------------------------
diff --git a/phoenix-protocol/src/main/PTable.proto b/phoenix-protocol/src/main/PTable.proto
index a327803..3257a70 100644
--- a/phoenix-protocol/src/main/PTable.proto
+++ b/phoenix-protocol/src/main/PTable.proto
@@ -47,6 +47,7 @@ message PColumn {
   optional bytes viewConstant = 10;
   optional bool viewReferenced = 11;
   optional string expression = 12;
+  optional bool isRowTimestamp = 13;
 }
 
 message PTableStats {


Mime
View raw message