hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ga...@apache.org
Subject svn commit: r1673437 [1/8] - in /hive/branches/hbase-metastore: ./ beeline/src/java/org/apache/hive/beeline/ bin/ cli/src/java/org/apache/hadoop/hive/cli/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/org/apache/hadoop/hive/conf/ ...
Date Tue, 14 Apr 2015 14:47:33 GMT
Author: gates
Date: Tue Apr 14 14:47:30 2015
New Revision: 1673437

URL: http://svn.apache.org/r1673437
Log:
hbase-metastore merge trunk to branch.

Added:
    hive/branches/hbase-metastore/data/files/extrapolate_stats_partial_ndv.txt
      - copied unchanged from r1673435, hive/trunk/data/files/extrapolate_stats_partial_ndv.txt
    hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/repl/
      - copied from r1673435, hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/repl/
    hive/branches/hbase-metastore/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/
      - copied from r1673435, hive/trunk/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/
    hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/
      - copied from r1673435, hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
      - copied unchanged from r1673435, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRowDynBatch.java
      - copied unchanged from r1673435, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRowDynBatch.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRowSameBatch.java
      - copied unchanged from r1673435, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRowSameBatch.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnMapping.java
      - copied unchanged from r1673435, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnMapping.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnOrderedMap.java
      - copied unchanged from r1673435, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnOrderedMap.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnOutputMapping.java
      - copied unchanged from r1673435, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnOutputMapping.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnSourceMapping.java
      - copied unchanged from r1673435, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnSourceMapping.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java
      - copied unchanged from r1673435, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
      - copied unchanged from r1673435, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
      - copied unchanged from r1673435, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRowDynBatch.java
      - copied unchanged from r1673435, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRowDynBatch.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRowSameBatch.java
      - copied unchanged from r1673435, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRowSameBatch.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
      - copied unchanged from r1673435, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRowNoNulls.java
      - copied unchanged from r1673435, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRowNoNulls.java
    hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/RandomRowObjectSource.java
      - copied unchanged from r1673435, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/RandomRowObjectSource.java
    hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorRowObject.java
      - copied unchanged from r1673435, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorRowObject.java
    hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSerDeRow.java
      - copied unchanged from r1673435, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSerDeRow.java
    hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRowGroupFilter.java
      - copied unchanged from r1673435, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetRowGroupFilter.java
    hive/branches/hbase-metastore/ql/src/test/queries/clientnegative/update_bucket_col.q
      - copied unchanged from r1673435, hive/trunk/ql/src/test/queries/clientnegative/update_bucket_col.q
    hive/branches/hbase-metastore/ql/src/test/queries/clientpositive/columnstats_part_coltype.q
      - copied unchanged from r1673435, hive/trunk/ql/src/test/queries/clientpositive/columnstats_part_coltype.q
    hive/branches/hbase-metastore/ql/src/test/queries/clientpositive/extrapolate_part_stats_partial_ndv.q
      - copied unchanged from r1673435, hive/trunk/ql/src/test/queries/clientpositive/extrapolate_part_stats_partial_ndv.q
    hive/branches/hbase-metastore/ql/src/test/queries/clientpositive/vectorized_parquet_types.q
      - copied unchanged from r1673435, hive/trunk/ql/src/test/queries/clientpositive/vectorized_parquet_types.q
    hive/branches/hbase-metastore/ql/src/test/results/clientnegative/update_bucket_col.q.out
      - copied unchanged from r1673435, hive/trunk/ql/src/test/results/clientnegative/update_bucket_col.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/columnstats_part_coltype.q.out
      - copied unchanged from r1673435, hive/trunk/ql/src/test/results/clientpositive/columnstats_part_coltype.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/extrapolate_part_stats_partial_ndv.q.out
      - copied unchanged from r1673435, hive/trunk/ql/src/test/results/clientpositive/extrapolate_part_stats_partial_ndv.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/tez/vector_date_1.q.out
      - copied unchanged from r1673435, hive/trunk/ql/src/test/results/clientpositive/tez/vector_date_1.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/tez/vector_interval_1.q.out
      - copied unchanged from r1673435, hive/trunk/ql/src/test/results/clientpositive/tez/vector_interval_1.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/tez/vector_interval_2.q.out
      - copied unchanged from r1673435, hive/trunk/ql/src/test/results/clientpositive/tez/vector_interval_2.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/tez/vector_multi_insert.q.out
      - copied unchanged from r1673435, hive/trunk/ql/src/test/results/clientpositive/tez/vector_multi_insert.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/vectorized_parquet_types.q.out
      - copied unchanged from r1673435, hive/trunk/ql/src/test/results/clientpositive/vectorized_parquet_types.q.out
    hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/
      - copied from r1673435, hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/
    hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/fast/
      - copied from r1673435, hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/fast/
    hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/
      - copied from r1673435, hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/
    hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/
      - copied from r1673435, hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/
    hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java
      - copied unchanged from r1673435, hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java
    hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
      - copied unchanged from r1673435, hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
    hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableFast.java
      - copied unchanged from r1673435, hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableFast.java
    hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleFast.java
      - copied unchanged from r1673435, hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleFast.java
    hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinaryFast.java
      - copied unchanged from r1673435, hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinaryFast.java
Removed:
    hive/branches/hbase-metastore/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatEximInputFormat.java.broken
    hive/branches/hbase-metastore/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatEximOutputCommitter.java.broken
    hive/branches/hbase-metastore/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatEximOutputFormat.java.broken
    hive/branches/hbase-metastore/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestEximSemanticAnalysis.java.broken
    hive/branches/hbase-metastore/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestStorageHandlerProperties.java.broken
    hive/branches/hbase-metastore/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapred/
    hive/branches/hbase-metastore/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatEximInputFormat.java.broken
    hive/branches/hbase-metastore/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatEximOutputFormat.java.broken
    hive/branches/hbase-metastore/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileInputStorageDriver.java.broken
    hive/branches/hbase-metastore/hcatalog/core/src/test/java/org/apache/hive/hcatalog/rcfile/TestRCFileOutputStorageDriver.java.broken
    hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatEximLoader.java.broken
    hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatEximStorer.java.broken
    hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatEximLoader.java.broken
    hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestPermsInheritance.java.broken
    hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestPigStorageDriver.java.broken
    hive/branches/hbase-metastore/service/src/test/org/apache/hive/service/cli/operation/
Modified:
    hive/branches/hbase-metastore/   (props changed)
    hive/branches/hbase-metastore/beeline/src/java/org/apache/hive/beeline/Commands.java
    hive/branches/hbase-metastore/bin/beeline
    hive/branches/hbase-metastore/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
    hive/branches/hbase-metastore/common/src/java/org/apache/hadoop/hive/common/type/HiveChar.java
    hive/branches/hbase-metastore/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/branches/hbase-metastore/common/src/java/org/apache/hive/common/util/DateUtils.java
    hive/branches/hbase-metastore/data/conf/hive-log4j.properties
    hive/branches/hbase-metastore/data/conf/hive-site.xml
    hive/branches/hbase-metastore/data/files/parquet_types.txt
    hive/branches/hbase-metastore/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java
    hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
    hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
    hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
    hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
    hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/NotificationListener.java
    hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java
    hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONMessageFactory.java
    hive/branches/hbase-metastore/hcatalog/webhcat/java-client/pom.xml
    hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java
    hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
    hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatNotificationEvent.java
    hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
    hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java
    hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
    hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
    hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
    hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
    hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java
    hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java
    hive/branches/hbase-metastore/itests/src/test/resources/testconfiguration.properties
    hive/branches/hbase-metastore/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
    hive/branches/hbase-metastore/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java
    hive/branches/hbase-metastore/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
    hive/branches/hbase-metastore/jdbc/src/java/org/apache/hive/jdbc/Utils.java
    hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
    hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/IExtrapolatePartStatus.java
    hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/LinearExtrapolatePartStatus.java
    hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
    hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
    hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
    hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/events/AddPartitionEvent.java
    hive/branches/hbase-metastore/pom.xml
    hive/branches/hbase-metastore/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumn.txt
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonMergeJoinOperator.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/AbstractRowContainer.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/BytesBytesMultiHashMap.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinEagerRowContainer.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/UnwrapRowContainer.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSelectOperator.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatch.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/VectorizedParquetInputFormat.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java
    hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java
    hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/TestErrorMsg.java
    hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestBytesBytesMultiHashMap.java
    hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java
    hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
    hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java
    hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
    hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
    hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
    hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java
    hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
    hive/branches/hbase-metastore/ql/src/test/queries/clientnegative/authorization_update_noupdatepriv.q
    hive/branches/hbase-metastore/ql/src/test/queries/clientpositive/authorization_update.q
    hive/branches/hbase-metastore/ql/src/test/queries/clientpositive/authorization_update_own_table.q
    hive/branches/hbase-metastore/ql/src/test/queries/clientpositive/dynpart_sort_optimization_acid.q
    hive/branches/hbase-metastore/ql/src/test/queries/clientpositive/update_all_types.q
    hive/branches/hbase-metastore/ql/src/test/queries/clientpositive/update_tmp_table.q
    hive/branches/hbase-metastore/ql/src/test/results/clientnegative/alter_table_wrong_regex.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientnegative/authorization_update_noupdatepriv.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/annotate_stats_part.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/authorization_update.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/authorization_update_own_table.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/dynpart_sort_optimization_acid.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/index_stale_partitioned.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/input42.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/input_part9.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/list_bucket_dml_1.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/list_bucket_dml_11.q.java1.7.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/list_bucket_dml_12.q.java1.7.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/list_bucket_dml_13.q.java1.7.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/list_bucket_dml_2.q.java1.7.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/list_bucket_dml_3.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/list_bucket_dml_4.q.java1.7.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/list_bucket_dml_5.q.java1.7.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/list_bucket_dml_9.q.java1.7.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/list_bucket_query_multiskew_1.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/list_bucket_query_multiskew_2.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/list_bucket_query_multiskew_3.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/list_bucket_query_oneskew_1.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/list_bucket_query_oneskew_2.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/list_bucket_query_oneskew_3.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/tez/update_all_types.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/tez/update_tmp_table.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/tez/vector_aggregate_9.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/tez/vector_char_2.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/tez/vector_decimal_round.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/tez/vector_decimal_round_2.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/tez/vector_if_expr.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/union_view.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/update_all_types.q.out
    hive/branches/hbase-metastore/ql/src/test/results/clientpositive/update_tmp_table.q.out
    hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/ByteStream.java
    hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/WriteBuffers.java
    hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
    hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/InputByteBuffer.java
    hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java
    hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
    hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java
    hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveDecimalObjectInspector.java
    hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/TestStatsSerde.java
    hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java
    hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableSerDe.java
    hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java
    hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java
    hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java
    hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java
    hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java
    hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/operation/Operation.java
    hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java

Propchange: hive/branches/hbase-metastore/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Apr 14 14:47:30 2015
@@ -4,4 +4,4 @@
 /hive/branches/spark:1608589-1660298
 /hive/branches/tez:1494760-1622766
 /hive/branches/vectorization:1466908-1527856
-/hive/trunk:1654331-1671987
+/hive/trunk:1654331-1673435

Modified: hive/branches/hbase-metastore/beeline/src/java/org/apache/hive/beeline/Commands.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/beeline/src/java/org/apache/hive/beeline/Commands.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/beeline/src/java/org/apache/hive/beeline/Commands.java (original)
+++ hive/branches/hbase-metastore/beeline/src/java/org/apache/hive/beeline/Commands.java Tue Apr 14 14:47:30 2015
@@ -760,14 +760,22 @@ public class Commands {
       while (beeLine.getConsoleReader() != null && !(line.trim().endsWith(";"))
         && beeLine.getOpts().isAllowMultiLineCommand()) {
 
-        StringBuilder prompt = new StringBuilder(beeLine.getPrompt());
-        for (int i = 0; i < prompt.length() - 1; i++) {
-          if (prompt.charAt(i) != '>') {
-            prompt.setCharAt(i, i % 2 == 0 ? '.' : ' ');
+        if (!beeLine.getOpts().isSilent()) {
+          StringBuilder prompt = new StringBuilder(beeLine.getPrompt());
+          for (int i = 0; i < prompt.length() - 1; i++) {
+            if (prompt.charAt(i) != '>') {
+              prompt.setCharAt(i, i % 2 == 0 ? '.' : ' ');
+            }
           }
         }
 
-        String extra = beeLine.getConsoleReader().readLine(prompt.toString());
+        String extra = null;
+        if (beeLine.getOpts().isSilent() && beeLine.getOpts().getScriptFile() != null) {
+          extra = beeLine.getConsoleReader().readLine(null, jline.console.ConsoleReader.NULL_MASK);
+        } else {
+          extra = beeLine.getConsoleReader().readLine(beeLine.getPrompt());
+        }
+
         if (extra == null) { //it happens when using -f and the line of cmds does not end with ;
           break;
         }

Modified: hive/branches/hbase-metastore/bin/beeline
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/bin/beeline?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/bin/beeline (original)
+++ hive/branches/hbase-metastore/bin/beeline Tue Apr 14 14:47:30 2015
@@ -18,4 +18,8 @@
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
+# Set Hadoop User classpath to true so that httpclient jars are taken from
+# hive lib instead of hadoop lib.
+export HADOOP_USER_CLASSPATH_FIRST=true
+
 . "$bin"/hive --service beeline "$@"

Modified: hive/branches/hbase-metastore/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (original)
+++ hive/branches/hbase-metastore/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java Tue Apr 14 14:47:30 2015
@@ -767,6 +767,8 @@ public class CliDriver {
       System.err.println(e.getMessage());
     }
 
+    System.out.println("WARNING: Hive CLI is deprecated and migration to Beeline is recommended.");
+
     // add shutdown hook to flush the history to history file
     Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
       @Override
@@ -785,6 +787,7 @@ public class CliDriver {
 
   protected void setupConsoleReader() throws IOException {
     reader = new ConsoleReader();
+    reader.setExpandEvents(false);
     reader.setBellEnabled(false);
     for (Completer completer : getCommandCompleter()) {
       reader.addCompleter(completer);

Modified: hive/branches/hbase-metastore/common/src/java/org/apache/hadoop/hive/common/type/HiveChar.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/common/src/java/org/apache/hadoop/hive/common/type/HiveChar.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/common/src/java/org/apache/hadoop/hive/common/type/HiveChar.java (original)
+++ hive/branches/hbase-metastore/common/src/java/org/apache/hadoop/hive/common/type/HiveChar.java Tue Apr 14 14:47:30 2015
@@ -55,7 +55,7 @@ public class HiveChar extends HiveBaseCh
     return StringUtils.stripEnd(value, " ");
   }
 
-  protected String getPaddedValue() {
+  public String getPaddedValue() {
     return value;
   }
 

Modified: hive/branches/hbase-metastore/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/hbase-metastore/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Tue Apr 14 14:47:30 2015
@@ -167,6 +167,7 @@ public class HiveConf extends Configurat
       HiveConf.ConfVars.HIVE_TXN_MANAGER,
       HiveConf.ConfVars.HIVE_TXN_TIMEOUT,
       HiveConf.ConfVars.HIVE_TXN_MAX_OPEN_BATCH,
+      HiveConf.ConfVars.HIVE_METASTORE_STATS_NDV_DENSITY_FUNCTION
       };
 
   /**
@@ -1288,6 +1289,8 @@ public class HiveConf extends Configurat
     HIVE_STATS_NDV_ERROR("hive.stats.ndv.error", (float)20.0,
         "Standard error expressed in percentage. Provides a tradeoff between accuracy and compute cost. \n" +
         "A lower value for error indicates higher accuracy and a higher compute cost."),
+    HIVE_METASTORE_STATS_NDV_DENSITY_FUNCTION("hive.metastore.stats.ndv.densityfunction", false,
+        "Whether to use density function to estimate the NDV for the whole table based on the NDV of partitions"),
     HIVE_STATS_KEY_PREFIX_MAX_LENGTH("hive.stats.key.prefix.max.length", 150,
         "Determines if when the prefix of the key used for intermediate stats collection\n" +
         "exceeds a certain length, a hash of the key is used instead.  If the value < 0 then hashing"),
@@ -1632,8 +1635,14 @@ public class HiveConf extends Configurat
         "${system:java.io.tmpdir}" + File.separator + "${system:user.name}" + File.separator +
             "operation_logs",
         "Top level directory where operation logs are stored if logging functionality is enabled"),
-    HIVE_SERVER2_LOGGING_OPERATION_VERBOSE("hive.server2.logging.operation.verbose", false,
-            "When true, HS2 operation logs available for clients will be verbose"),
+    HIVE_SERVER2_LOGGING_OPERATION_LEVEL("hive.server2.logging.operation.level", "EXECUTION",
+        new StringSet("NONE", "EXECUTION", "PERFORMANCE", "VERBOSE"),
+        "HS2 operation logging mode available to clients to be set at session level.\n" +
+        "For this to work, hive.server2.logging.operation.enabled should be set to true.\n" +
+        "  NONE: Ignore any logging\n" +
+        "  EXECUTION: Log completion of tasks\n" +
+        "  PERFORMANCE: Execution + Performance logs \n" +
+        "  VERBOSE: All logs" ),
     // logging configuration
     HIVE_LOG4J_FILE("hive.log4j.file", "",
         "Hive log4j configuration file.\n" +
@@ -1727,10 +1736,6 @@ public class HiveConf extends Configurat
         "Path component of URL endpoint when in HTTP mode."),
     HIVE_SERVER2_THRIFT_MAX_MESSAGE_SIZE("hive.server2.thrift.max.message.size", 100*1024*1024,
         "Maximum message size in bytes a HS2 server will accept."),
-    HIVE_SERVER2_THRIFT_HTTP_MIN_WORKER_THREADS("hive.server2.thrift.http.min.worker.threads", 5,
-        "Minimum number of worker threads when in HTTP mode."),
-    HIVE_SERVER2_THRIFT_HTTP_MAX_WORKER_THREADS("hive.server2.thrift.http.max.worker.threads", 500,
-        "Maximum number of worker threads when in HTTP mode."),
     HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME("hive.server2.thrift.http.max.idle.time", "1800s",
         new TimeValidator(TimeUnit.MILLISECONDS),
         "Maximum idle time for a connection on the server when in HTTP mode."),

Modified: hive/branches/hbase-metastore/common/src/java/org/apache/hive/common/util/DateUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/common/src/java/org/apache/hive/common/util/DateUtils.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/common/src/java/org/apache/hive/common/util/DateUtils.java (original)
+++ hive/branches/hbase-metastore/common/src/java/org/apache/hive/common/util/DateUtils.java Tue Apr 14 14:47:30 2015
@@ -65,5 +65,12 @@ public class DateUtils {
       long totalNanos) {
     intervalDayTime.set(totalNanos / NANOS_PER_SEC, (int) (totalNanos % NANOS_PER_SEC));
   }
-}
 
+  public static long getIntervalDayTimeTotalSecondsFromTotalNanos(long totalNanos) {
+    return totalNanos / NANOS_PER_SEC;
+  }
+
+  public static int getIntervalDayTimeNanosFromTotalNanos(long totalNanos) {
+    return (int) (totalNanos % NANOS_PER_SEC);
+  }
+}

Modified: hive/branches/hbase-metastore/data/conf/hive-log4j.properties
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/data/conf/hive-log4j.properties?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/data/conf/hive-log4j.properties (original)
+++ hive/branches/hbase-metastore/data/conf/hive-log4j.properties Tue Apr 14 14:47:30 2015
@@ -91,7 +91,7 @@ log4j.logger.org.apache.zookeeper.server
 log4j.logger.org.apache.zookeeper.ClientCnxn=WARN,DRFA
 log4j.logger.org.apache.zookeeper.ClientCnxnSocket=WARN,DRFA
 log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA
-log4j.logger.org.apache.hadoop.hive.ql.log.PerfLogger=WARN,DRFA
+log4j.logger.org.apache.hadoop.hive.ql.log.PerfLogger=${hive.ql.log.PerfLogger.level}
 log4j.logger.org.apache.hadoop.hive.ql.exec.Operator=INFO,DRFA
 log4j.logger.org.apache.hadoop.hive.serde2.lazy=INFO,DRFA
 log4j.logger.org.apache.hadoop.hive.metastore.ObjectStore=INFO,DRFA

Modified: hive/branches/hbase-metastore/data/conf/hive-site.xml
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/data/conf/hive-site.xml?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/data/conf/hive-site.xml (original)
+++ hive/branches/hbase-metastore/data/conf/hive-site.xml Tue Apr 14 14:47:30 2015
@@ -240,6 +240,11 @@
   <description>Using dummy param to test server specific configuration</description>
 </property>
 
+<property>
+  <name>hive.ql.log.PerfLogger.level</name>
+  <value>WARN,DRFA</value>
+  <description>Used to change the perflogger level</description>
+</property>
 
 <property>
   <name>hive.fetch.task.conversion</name>

Modified: hive/branches/hbase-metastore/data/files/parquet_types.txt
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/data/files/parquet_types.txt?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/data/files/parquet_types.txt (original)
+++ hive/branches/hbase-metastore/data/files/parquet_types.txt Tue Apr 14 14:47:30 2015
@@ -1,22 +1,22 @@
-100|1|1|1.0|0.0|abc|2011-01-01 01:01:01.111111111|a   |a  |B4F3CAFDBEDD|k1:v1|101,200|10,abc|2011-01-01
-101|2|2|1.1|0.3|def|2012-02-02 02:02:02.222222222|ab  |ab |68692CCAC0BDE7|k2:v2|102,200|10,def|2012-02-02
-102|3|3|1.2|0.6|ghi|2013-03-03 03:03:03.333333333|abc|abc|B4F3CAFDBEDD|k3:v3|103,200|10,ghi|2013-03-03
-103|1|4|1.3|0.9|jkl|2014-04-04 04:04:04.444444444|abcd|abcd|68692CCAC0BDE7|k4:v4|104,200|10,jkl|2014-04-04
-104|2|5|1.4|1.2|mno|2015-05-05 05:05:05.555555555|abcde|abcde|B4F3CAFDBEDD|k5:v5|105,200|10,mno|2015-05-05
-105|3|1|1.0|1.5|pqr|2016-06-06 06:06:06.666666666|abcdef|abcdef|68692CCAC0BDE7|k6:v6|106,200|10,pqr|2016-06-06
-106|1|2|1.1|1.8|stu|2017-07-07 07:07:07.777777777|abcdefg|abcdefg|B4F3CAFDBEDD|k7:v7|107,200|10,stu|2017-07-07
-107|2|3|1.2|2.1|vwx|2018-08-08 08:08:08.888888888|bcdefg|abcdefgh|68692CCAC0BDE7|k8:v8|108,200|10,vwx|2018-08-08
-108|3|4|1.3|2.4|yza|2019-09-09 09:09:09.999999999|cdefg|B4F3CAFDBEDD|68656C6C6F|k9:v9|109,200|10,yza|2019-09-09
-109|1|5|1.4|2.7|bcd|2020-10-10 10:10:10.101010101|klmno|abcdedef|68692CCAC0BDE7|k10:v10|110,200|10,bcd|2020-10-10
-110|2|1|1.0|3.0|efg|2021-11-11 11:11:11.111111111|pqrst|abcdede|B4F3CAFDBEDD|k11:v11|111,200|10,efg|2021-11-11
-111|3|2|1.1|3.3|hij|2022-12-12 12:12:12.121212121|nopqr|abcded|68692CCAC0BDE7|k12:v12|112,200|10,hij|2022-12-12
-112|1|3|1.2|3.6|klm|2023-01-02 13:13:13.131313131|opqrs|abcdd|B4F3CAFDBEDD|k13:v13|113,200|10,klm|2023-01-02
-113|2|4|1.3|3.9|nop|2024-02-02 14:14:14.141414141|pqrst|abc|68692CCAC0BDE7|k14:v14|114,200|10,nop|2024-02-02
-114|3|5|1.4|4.2|qrs|2025-03-03 15:15:15.151515151|qrstu|b|B4F3CAFDBEDD|k15:v15|115,200|10,qrs|2025-03-03
-115|1|1|1.0|4.5|qrs|2026-04-04 16:16:16.161616161|rstuv|abcded|68692CCAC0BDE7|k16:v16|116,200|10,qrs|2026-04-04
-116|2|2|1.1|4.8|wxy|2027-05-05 17:17:17.171717171|stuvw|abcded|B4F3CAFDBEDD|k17:v17|117,200|10,wxy|2027-05-05
-117|3|3|1.2|5.1|zab|2028-06-06 18:18:18.181818181|tuvwx|abcded|68692CCAC0BDE7|k18:v18|118,200|10,zab|2028-06-06
-118|1|4|1.3|5.4|cde|2029-07-07 19:19:19.191919191|uvwzy|abcdede|B4F3CAFDBEDD|k19:v19|119,200|10,cde|2029-07-07
-119|2|5|1.4|5.7|fgh|2030-08-08 20:20:20.202020202|vwxyz|abcdede|68692CCAC0BDE7|k20:v20|120,200|10,fgh|2030-08-08
-120|3|1|1.0|6.0|ijk|2031-09-09 21:21:21.212121212|wxyza|abcde|B4F3CAFDBEDD|k21:v21|121,200|10,ijk|2031-09-09
-121|1|2|1.1|6.3|lmn|2032-10-10 22:22:22.222222222|bcdef|abcde||k22:v22|122,200|10,lmn|2032-10-10
\ No newline at end of file
+100|1|1|1.0|0.0|abc|2011-01-01 01:01:01.111111111|a   |a  |B4F3CAFDBEDD|k1:v1|101,200|10,abc|2011-01-01|48.88
+101|2|2|1.1|0.3|def|2012-02-02 02:02:02.222222222|ab  |ab |68692CCAC0BDE7|k2:v2|102,200|10,def|2012-02-02|8.72
+102|3|3|1.2|0.6|ghi|2013-03-03 03:03:03.333333333|abc|abc|B4F3CAFDBEDD|k3:v3|103,200|10,ghi|2013-03-03|90.21
+103|1|4|1.3|0.9|jkl|2014-04-04 04:04:04.444444444|abcd|abcd|68692CCAC0BDE7|k4:v4|104,200|10,jkl|2014-04-04|3.89
+104|2|5|1.4|1.2|mno|2015-05-05 05:05:05.555555555|abcde|abcde|B4F3CAFDBEDD|k5:v5|105,200|10,mno|2015-05-05|56.23
+105|3|1|1.0|1.5|pqr|2016-06-06 06:06:06.666666666|abcdef|abcdef|68692CCAC0BDE7|k6:v6|106,200|10,pqr|2016-06-06|90.21
+106|1|2|1.1|1.8|stu|2017-07-07 07:07:07.777777777|abcdefg|abcdefg|B4F3CAFDBEDD|k7:v7|107,200|10,stu|2017-07-07|6.09
+107|2|3|1.2|2.1|vwx|2018-08-08 08:08:08.888888888|bcdefg|abcdefgh|68692CCAC0BDE7|k8:v8|108,200|10,vwx|2018-08-08|9.44
+108|3|4|1.3|2.4|yza|2019-09-09 09:09:09.999999999|cdefg|B4F3CAFDBEDD|68656C6C6F|k9:v9|109,200|10,yza|2019-09-09|77.54
+109|1|5|1.4|2.7|bcd|2020-10-10 10:10:10.101010101|klmno|abcdedef|68692CCAC0BDE7|k10:v10|110,200|10,bcd|2020-10-10|25.42
+110|2|1|1.0|3.0|efg|2021-11-11 11:11:11.111111111|pqrst|abcdede|B4F3CAFDBEDD|k11:v11|111,200|10,efg|2021-11-11|60.12
+111|3|2|1.1|3.3|hij|2022-12-12 12:12:12.121212121|nopqr|abcded|68692CCAC0BDE7|k12:v12|112,200|10,hij|2022-12-12|49.56
+112|1|3|1.2|3.6|klm|2023-01-02 13:13:13.131313131|opqrs|abcdd|B4F3CAFDBEDD|k13:v13|113,200|10,klm|2023-01-02|80.76
+113|2|4|1.3|3.9|nop|2024-02-02 14:14:14.141414141|pqrst|abc|68692CCAC0BDE7|k14:v14|114,200|10,nop|2024-02-02|23.23
+114|3|5|1.4|4.2|qrs|2025-03-03 15:15:15.151515151|qrstu|b|B4F3CAFDBEDD|k15:v15|115,200|10,qrs|2025-03-03|1.01
+115|1|1|1.0|4.5|qrs|2026-04-04 16:16:16.161616161|rstuv|abcded|68692CCAC0BDE7|k16:v16|116,200|10,qrs|2026-04-04|5.98
+116|2|2|1.1|4.8|wxy|2027-05-05 17:17:17.171717171|stuvw|abcded|B4F3CAFDBEDD|k17:v17|117,200|10,wxy|2027-05-05|11.22
+117|3|3|1.2|5.1|zab|2028-06-06 18:18:18.181818181|tuvwx|abcded|68692CCAC0BDE7|k18:v18|118,200|10,zab|2028-06-06|9.88
+118|1|4|1.3|5.4|cde|2029-07-07 19:19:19.191919191|uvwzy|abcdede|B4F3CAFDBEDD|k19:v19|119,200|10,cde|2029-07-07|4.76
+119|2|5|1.4|5.7|fgh|2030-08-08 20:20:20.202020202|vwxyz|abcdede|68692CCAC0BDE7|k20:v20|120,200|10,fgh|2030-08-08|12.83
+120|3|1|1.0|6.0|ijk|2031-09-09 21:21:21.212121212|wxyza|abcde|B4F3CAFDBEDD|k21:v21|121,200|10,ijk|2031-09-09|73.04
+121|1|2|1.1|6.3|lmn|2032-10-10 22:22:22.222222222|bcdef|abcde||k22:v22|122,200|10,lmn|2032-10-10|90.33
\ No newline at end of file

Modified: hive/branches/hbase-metastore/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java (original)
+++ hive/branches/hbase-metastore/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java Tue Apr 14 14:47:30 2015
@@ -24,7 +24,9 @@ import org.apache.hadoop.hive.cli.CliSes
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.WindowsPathUtil;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.util.Shell;
 import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
@@ -81,6 +83,10 @@ public class HCatBaseTest {
     hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, "");
     hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
     hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, TEST_WAREHOUSE_DIR);
+
+    if (Shell.WINDOWS) {
+      WindowsPathUtil.convertPathsFromWindowsToHdfs(hiveConf);
+    }
   }
 
   protected void logAndRegister(PigServer server, String query) throws IOException {

Modified: hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java (original)
+++ hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java Tue Apr 14 14:47:30 2015
@@ -44,6 +44,7 @@ import org.apache.hadoop.hive.cli.CliSes
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.WindowsPathUtil;
 import org.apache.hadoop.hive.ql.io.IOConstants;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
@@ -51,6 +52,7 @@ import org.apache.hadoop.hive.ql.session
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.mapreduce.Job;
 
+import org.apache.hadoop.util.Shell;
 import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.hive.hcatalog.common.HCatConstants;
@@ -179,6 +181,11 @@ public class TestHCatLoader {
     hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
     hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
     hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR);
+
+    if (Shell.WINDOWS) {
+      WindowsPathUtil.convertPathsFromWindowsToHdfs(hiveConf);
+    }
+
     driver = new Driver(hiveConf);
     SessionState.start(new CliSessionState(hiveConf));
 

Modified: hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java (original)
+++ hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java Tue Apr 14 14:47:30 2015
@@ -33,11 +33,13 @@ import org.apache.hadoop.hive.cli.CliSes
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.WindowsPathUtil;
 import org.apache.hadoop.hive.ql.io.IOConstants;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
+import org.apache.hadoop.util.Shell;
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.backend.executionengine.ExecException;
@@ -123,6 +125,11 @@ public class TestHCatLoaderComplexSchema
     hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
     hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
     hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+
+    if (Shell.WINDOWS) {
+      WindowsPathUtil.convertPathsFromWindowsToHdfs(hiveConf);
+    }
+
     driver = new Driver(hiveConf);
     SessionState.start(new CliSessionState(hiveConf));
     //props = new Properties();

Modified: hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java (original)
+++ hive/branches/hbase-metastore/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java Tue Apr 14 14:47:30 2015
@@ -42,6 +42,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.WindowsPathUtil;
 import org.apache.hadoop.hive.ql.io.IOConstants;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
 import org.apache.hadoop.hive.ql.processors.CommandProcessor;
@@ -58,6 +59,7 @@ import org.apache.hadoop.io.WritableComp
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
+import org.apache.hadoop.util.Shell;
 import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.hive.hcatalog.data.HCatRecord;
@@ -177,6 +179,11 @@ public class TestHCatLoaderEncryption {
     hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
     hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
     hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR);
+
+    if (Shell.WINDOWS) {
+      WindowsPathUtil.convertPathsFromWindowsToHdfs(hiveConf);
+    }
+
     driver = new Driver(hiveConf);
 
     checkShimLoaderVersion();

Modified: hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java (original)
+++ hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java Tue Apr 14 14:47:30 2015
@@ -29,7 +29,6 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.NotificationEvent;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
@@ -44,7 +43,6 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.messaging.MessageFactory;
 
-import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
 /**
@@ -146,11 +144,9 @@ public class DbNotificationListener exte
     NotificationEvent event = new NotificationEvent(0, now(),
         HCatConstants.HCAT_ALTER_TABLE_EVENT,
         msgFactory.buildAlterTableMessage(before, after).toString());
-    if (event != null) {
-      event.setDbName(after.getDbName());
-      event.setTableName(after.getTableName());
-      enqueue(event);
-    }
+    event.setDbName(after.getDbName());
+    event.setTableName(after.getTableName());
+    enqueue(event);
   }
 
   /**
@@ -162,7 +158,7 @@ public class DbNotificationListener exte
     Table t = partitionEvent.getTable();
     NotificationEvent event = new NotificationEvent(0, now(),
         HCatConstants.HCAT_ADD_PARTITION_EVENT,
-        msgFactory.buildAddPartitionMessage(t, partitionEvent.getPartitions()).toString());
+        msgFactory.buildAddPartitionMessage(t, partitionEvent.getPartitionIterator()).toString());
     event.setDbName(t.getDbName());
     event.setTableName(t.getTableName());
     enqueue(event);
@@ -192,11 +188,9 @@ public class DbNotificationListener exte
     NotificationEvent event = new NotificationEvent(0, now(),
         HCatConstants.HCAT_ALTER_PARTITION_EVENT,
         msgFactory.buildAlterPartitionMessage(before, after).toString());
-    if (event != null) {
-      event.setDbName(before.getDbName());
-      event.setTableName(before.getTableName());
-      enqueue(event);
-    }
+    event.setDbName(before.getDbName());
+    event.setTableName(before.getTableName());
+    enqueue(event);
   }
 
   /**

Modified: hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/NotificationListener.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/NotificationListener.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/NotificationListener.java (original)
+++ hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/NotificationListener.java Tue Apr 14 14:47:30 2015
@@ -21,8 +21,6 @@ package org.apache.hive.hcatalog.listene
 
 import java.util.ArrayList;
 import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
 
 import javax.jms.Connection;
 import javax.jms.ConnectionFactory;
@@ -130,15 +128,14 @@ public class NotificationListener extend
     // and message selector string as "HCAT_EVENT = HCAT_ADD_PARTITION"
     if (partitionEvent.getStatus()) {
       Table table = partitionEvent.getTable();
-      List<Partition> partitions = partitionEvent.getPartitions();
       String topicName = getTopicName(table);
       if (topicName != null && !topicName.equals("")) {
-        send(messageFactory.buildAddPartitionMessage(table, partitions), topicName);
+        send(messageFactory.buildAddPartitionMessage(table, partitionEvent.getPartitionIterator()), topicName);
       } else {
         LOG.info("Topic name not found in metastore. Suppressing HCatalog notification for "
-            + partitions.get(0).getDbName()
+            + partitionEvent.getTable().getDbName()
             + "."
-            + partitions.get(0).getTableName()
+            + partitionEvent.getTable().getTableName()
             + " To enable notifications for this table, please do alter table set properties ("
             + HCatConstants.HCAT_MSGBUS_TOPIC_NAME
             + "=<dbname>.<tablename>) or whatever you want topic name to be.");

Modified: hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java (original)
+++ hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java Tue Apr 14 14:47:30 2015
@@ -20,16 +20,14 @@
 package org.apache.hive.hcatalog.messaging;
 
 import org.apache.hadoop.hive.common.JavaUtils;
-import org.apache.hadoop.hive.common.classification.InterfaceAudience;
-import org.apache.hadoop.hive.common.classification.InterfaceStability;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hive.hcatalog.messaging.json.JSONMessageFactory;
 
+import java.util.Iterator;
 import java.util.List;
 
 /**
@@ -140,20 +138,10 @@ public abstract class MessageFactory {
     /**
      * Factory method for AddPartitionMessage.
      * @param table The Table to which the partitions are added.
-     * @param partitions The set of Partitions being added.
+     * @param partitions The iterator to set of Partitions being added.
      * @return AddPartitionMessage instance.
      */
-    public abstract AddPartitionMessage buildAddPartitionMessage(Table table, List<Partition> partitions);
-
-  /**
-   * Factory method for AddPartitionMessage.
-   * @param table The Table to which the partitions are added.
-   * @param partitionSpec The set of Partitions being added.
-   * @return AddPartitionMessage instance.
-   */
-  @InterfaceAudience.LimitedPrivate({"Hive"})
-  @InterfaceStability.Evolving
-  public abstract AddPartitionMessage buildAddPartitionMessage(Table table, PartitionSpecProxy partitionSpec);
+  public abstract AddPartitionMessage buildAddPartitionMessage(Table table, Iterator<Partition> partitions);
 
   /**
    * Factory method for building AlterPartitionMessage

Modified: hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONMessageFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONMessageFactory.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONMessageFactory.java (original)
+++ hive/branches/hbase-metastore/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONMessageFactory.java Tue Apr 14 14:47:30 2015
@@ -19,14 +19,14 @@
 
 package org.apache.hive.hcatalog.messaging.json;
 
+import com.google.common.base.Function;
+import com.google.common.collect.Iterators;
+import com.google.common.collect.Lists;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.common.classification.InterfaceAudience;
-import org.apache.hadoop.hive.common.classification.InterfaceStability;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
 import org.apache.hive.hcatalog.messaging.AddPartitionMessage;
 import org.apache.hive.hcatalog.messaging.AlterPartitionMessage;
 import org.apache.hive.hcatalog.messaging.AlterTableMessage;
@@ -39,7 +39,12 @@ import org.apache.hive.hcatalog.messagin
 import org.apache.hive.hcatalog.messaging.MessageDeserializer;
 import org.apache.hive.hcatalog.messaging.MessageFactory;
 
-import java.util.*;
+import javax.annotation.Nullable;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
 
 /**
  * The JSON implementation of the MessageFactory. Constructs JSON implementations of
@@ -98,17 +103,9 @@ public class JSONMessageFactory extends
   }
 
   @Override
-  public AddPartitionMessage buildAddPartitionMessage(Table table, List<Partition> partitions) {
+  public AddPartitionMessage buildAddPartitionMessage(Table table, Iterator<Partition> partitionsIterator) {
     return new JSONAddPartitionMessage(HCAT_SERVER_URL, HCAT_SERVICE_PRINCIPAL, table.getDbName(),
-        table.getTableName(), getPartitionKeyValues(table, partitions), now());
-  }
-
-  @Override
-  @InterfaceAudience.LimitedPrivate({"Hive"})
-  @InterfaceStability.Evolving
-  public AddPartitionMessage buildAddPartitionMessage(Table table, PartitionSpecProxy partitionSpec) {
-    return new JSONAddPartitionMessage(HCAT_SERVER_URL, HCAT_SERVICE_PRINCIPAL, table.getDbName(),
-        table.getTableName(), getPartitionKeyValues(table, partitionSpec), now());
+        table.getTableName(), getPartitionKeyValues(table, partitionsIterator), now());
   }
 
   @Override
@@ -142,22 +139,12 @@ public class JSONMessageFactory extends
     return partitionKeys;
   }
 
-  private static List<Map<String, String>> getPartitionKeyValues(Table table, List<Partition> partitions) {
-    List<Map<String, String>> partitionList = new ArrayList<Map<String, String>>(partitions.size());
-    for (Partition partition : partitions)
-      partitionList.add(getPartitionKeyValues(table, partition));
-    return partitionList;
-  }
-
-  @InterfaceAudience.LimitedPrivate({"Hive"})
-  @InterfaceStability.Evolving
-  private static List<Map<String, String>> getPartitionKeyValues(Table table, PartitionSpecProxy partitionSpec) {
-    List<Map<String, String>> partitionList = new ArrayList<Map<String, String>>();
-    PartitionSpecProxy.PartitionIterator iterator = partitionSpec.getPartitionIterator();
-    while (iterator.hasNext()) {
-      Partition partition = iterator.next();
-      partitionList.add(getPartitionKeyValues(table, partition));
-    }
-    return partitionList;
+  private static List<Map<String, String>> getPartitionKeyValues(final Table table, Iterator<Partition> iterator) {
+    return Lists.newArrayList(Iterators.transform(iterator, new Function<Partition, Map<String, String>>() {
+      @Override
+      public Map<String, String> apply(@Nullable Partition partition) {
+        return getPartitionKeyValues(table, partition);
+      }
+    }));
   }
 }

Modified: hive/branches/hbase-metastore/hcatalog/webhcat/java-client/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/hcatalog/webhcat/java-client/pom.xml?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/hcatalog/webhcat/java-client/pom.xml (original)
+++ hive/branches/hbase-metastore/hcatalog/webhcat/java-client/pom.xml Tue Apr 14 14:47:30 2015
@@ -46,6 +46,11 @@
       <version>${project.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hive-hcatalog-server-extensions</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
       <groupId>org.apache.hive</groupId>
       <artifactId>hive-exec</artifactId>
       <version>${project.version}</version>

Modified: hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java (original)
+++ hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java Tue Apr 14 14:47:30 2015
@@ -18,6 +18,7 @@
  */
 package org.apache.hive.hcatalog.api;
 
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
@@ -27,6 +28,7 @@ import org.apache.hadoop.hive.common.cla
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.PartitionEventType;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hive.hcatalog.api.repl.ReplicationTask;
 import org.apache.hive.hcatalog.common.HCatException;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
 
@@ -379,6 +381,24 @@ public abstract class HCatClient {
     throws HCatException;
 
   /**
+   * Drops partition(s) that match the specified (and possibly partial) partition specification.
+   * A partial partition-specification is one where not all partition-keys have associated values. For example,
+   * for a table ('myDb.myTable') with 2 partition keys (dt string, region string),
+   * if for each dt ('20120101', '20120102', etc.) there can exist 3 regions ('us', 'uk', 'in'), then,
+   *  1. Complete partition spec: dropPartitions('myDb', 'myTable', {dt='20120101', region='us'}) would drop 1 partition.
+   *  2. Partial  partition spec: dropPartitions('myDb', 'myTable', {dt='20120101'}) would drop all 3 partitions,
+   *                              with dt='20120101' (i.e. region = 'us', 'uk' and 'in').
+   * @param dbName The database name.
+   * @param tableName The table name.
+   * @param partitionSpec The partition specification, {[col_name,value],[col_name2,value2]}.
+   * @param ifExists Hive returns an error if the partition specified does not exist, unless ifExists is set to true.
+   * @param deleteData Whether to delete the underlying data.
+   * @throws HCatException,ConnectionFailureException
+   */
+   public abstract void dropPartitions(String dbName, String tableName,
+                    Map<String, String> partitionSpec, boolean ifExists, boolean deleteData)
+    throws HCatException;
+  /**
    * List partitions by filter.
    *
    * @param dbName The database name.
@@ -467,6 +487,23 @@ public abstract class HCatClient {
    */
   public abstract String getMessageBusTopicName(String dbName, String tableName) throws HCatException;
 
+
+  /**
+   * Get an iterator that iterates over a list of replication tasks needed to replicate all the
+   * events that have taken place for a given db/table.
+   * @param lastEventId : The last event id that was processed for this reader. The returned
+   *                    replication tasks will start from this point forward
+   * @param maxEvents : Maximum number of events to consider for generating the
+   *                  replication tasks. If < 1, then all available events will be considered.
+   * @param dbName : The database name for which we're interested in the events for.
+   * @param tableName : The table name for which we're interested in the events for - if null,
+   *                  then this function will behave as if it were running at a db level.
+   * @return an iterator over a list of replication events that can be processed one by one.
+   * @throws HCatException
+   */
+  public abstract Iterator<ReplicationTask> getReplicationTasks(
+      long lastEventId, int maxEvents, String dbName, String tableName) throws HCatException;
+
   /**
    * Get a list of notifications
    * @param lastEventId The last event id that was consumed by this reader.  The returned

Modified: hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java (original)
+++ hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java Tue Apr 14 14:47:30 2015
@@ -21,9 +21,11 @@ package org.apache.hive.hcatalog.api;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
+import com.google.common.base.Function;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import org.apache.commons.lang.StringUtils;
@@ -63,6 +65,8 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hive.hcatalog.api.repl.HCatReplicationTaskIterator;
+import org.apache.hive.hcatalog.api.repl.ReplicationTask;
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.common.HCatException;
 import org.apache.hive.hcatalog.common.HCatUtil;
@@ -72,6 +76,8 @@ import org.apache.thrift.TException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import javax.annotation.Nullable;
+
 /**
  * The HCatClientHMSImpl is the Hive Metastore client based implementation of
  * HCatClient.
@@ -567,33 +573,35 @@ public class HCatClientHMSImpl extends H
         && "TRUE".equalsIgnoreCase(table.getParameters().get("EXTERNAL"));
   }
 
-  private void dropPartitionsUsingExpressions(Table table, Map<String, String> partitionSpec, boolean ifExists)
-    throws SemanticException, TException {
+  private void dropPartitionsUsingExpressions(Table table, Map<String, String> partitionSpec,
+                                              boolean ifExists, boolean deleteData)
+      throws SemanticException, TException {
     LOG.info("HCatClient: Dropping partitions using partition-predicate Expressions.");
     ExprNodeGenericFuncDesc partitionExpression = new ExpressionBuilder(table, partitionSpec).build();
     ObjectPair<Integer, byte[]> serializedPartitionExpression =
         new ObjectPair<Integer, byte[]>(partitionSpec.size(),
             Utilities.serializeExpressionToKryo(partitionExpression));
     hmsClient.dropPartitions(table.getDbName(), table.getTableName(), Arrays.asList(serializedPartitionExpression),
-        !isExternal(table),  // Delete data?
-        false,               // Ignore Protection?
-        ifExists,            // Fail if table doesn't exist?
-        false);              // Need results back?
+        deleteData && !isExternal(table),  // Delete data?
+        false,                             // Ignore Protection?
+        ifExists,                          // Fail if table doesn't exist?
+        false);                            // Need results back?
   }
 
   private void dropPartitionsIteratively(String dbName, String tableName,
-                                         Map<String, String> partitionSpec, boolean ifExists) throws HCatException, TException {
+                                         Map<String, String> partitionSpec, boolean ifExists, boolean deleteData)
+      throws HCatException, TException {
     LOG.info("HCatClient: Dropping partitions iteratively.");
     List<Partition> partitions = hmsClient.listPartitionsByFilter(dbName, tableName,
         getFilterString(partitionSpec), (short) -1);
     for (Partition partition : partitions) {
-      dropPartition(partition, ifExists);
+      dropPartition(partition, ifExists, deleteData);
     }
   }
 
   @Override
   public void dropPartitions(String dbName, String tableName,
-                 Map<String, String> partitionSpec, boolean ifExists)
+                 Map<String, String> partitionSpec, boolean ifExists, boolean deleteData)
     throws HCatException {
     LOG.info("HCatClient dropPartitions(db=" + dbName + ",table=" + tableName + ", partitionSpec: ["+ partitionSpec + "]).");
     try {
@@ -602,17 +610,17 @@ public class HCatClientHMSImpl extends H
 
       if (hiveConfig.getBoolVar(HiveConf.ConfVars.METASTORE_CLIENT_DROP_PARTITIONS_WITH_EXPRESSIONS)) {
         try {
-          dropPartitionsUsingExpressions(table, partitionSpec, ifExists);
+          dropPartitionsUsingExpressions(table, partitionSpec, ifExists, deleteData);
         }
         catch (SemanticException parseFailure) {
           LOG.warn("Could not push down partition-specification to back-end, for dropPartitions(). Resorting to iteration.",
               parseFailure);
-          dropPartitionsIteratively(dbName, tableName, partitionSpec, ifExists);
+          dropPartitionsIteratively(dbName, tableName, partitionSpec, ifExists, deleteData);
         }
       }
       else {
         // Not using expressions.
-        dropPartitionsIteratively(dbName, tableName, partitionSpec, ifExists);
+        dropPartitionsIteratively(dbName, tableName, partitionSpec, ifExists, deleteData);
       }
     } catch (NoSuchObjectException e) {
       throw new ObjectNotFoundException(
@@ -627,10 +635,16 @@ public class HCatClientHMSImpl extends H
     }
   }
 
-  private void dropPartition(Partition partition, boolean ifExists)
+  @Override
+  public void dropPartitions(String dbName, String tableName,
+                             Map<String, String> partitionSpec, boolean ifExists) throws HCatException {
+    dropPartitions(dbName, tableName, partitionSpec, ifExists, true);
+  }
+
+  private void dropPartition(Partition partition, boolean ifExists, boolean deleteData)
     throws HCatException, MetaException, TException {
     try {
-      hmsClient.dropPartition(partition.getDbName(), partition.getTableName(), partition.getValues());
+      hmsClient.dropPartition(partition.getDbName(), partition.getTableName(), partition.getValues(), deleteData);
     } catch (NoSuchObjectException e) {
       if (!ifExists) {
         throw new ObjectNotFoundException(
@@ -965,18 +979,27 @@ public class HCatClientHMSImpl extends H
   }
 
   @Override
+  public Iterator<ReplicationTask> getReplicationTasks(
+      long lastEventId, int maxEvents, String dbName, String tableName) throws HCatException {
+    return new HCatReplicationTaskIterator(this,lastEventId,maxEvents,dbName,tableName);
+  }
+
+  @Override
   public List<HCatNotificationEvent> getNextNotification(long lastEventId, int maxEvents,
                                                          IMetaStoreClient.NotificationFilter filter)
       throws HCatException {
     try {
-      List<HCatNotificationEvent> events = new ArrayList<HCatNotificationEvent>();
       NotificationEventResponse rsp = hmsClient.getNextNotification(lastEventId, maxEvents, filter);
       if (rsp != null && rsp.getEvents() != null) {
-        for (NotificationEvent event : rsp.getEvents()) {
-          events.add(new HCatNotificationEvent(event));
-        }
+        return Lists.transform(rsp.getEvents(), new Function<NotificationEvent, HCatNotificationEvent>() {
+          @Override
+          public HCatNotificationEvent apply(@Nullable NotificationEvent notificationEvent) {
+            return new HCatNotificationEvent(notificationEvent);
+          }
+        });
+      } else {
+        return new ArrayList<HCatNotificationEvent>();
       }
-      return events;
     } catch (TException e) {
       throw new ConnectionFailureException("TException while getting notifications", e);
     }

Modified: hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatNotificationEvent.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatNotificationEvent.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatNotificationEvent.java (original)
+++ hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatNotificationEvent.java Tue Apr 14 14:47:30 2015
@@ -32,6 +32,8 @@ public class HCatNotificationEvent {
   private String tableName;
   private String message;
 
+  public enum Scope { DB, TABLE, UNKNOWN };
+
   HCatNotificationEvent(NotificationEvent event) {
     eventId = event.getEventId();
     eventTime = event.getEventTime();
@@ -45,6 +47,20 @@ public class HCatNotificationEvent {
     return eventId;
   }
 
+  public Scope getEventScope() {
+    // Eventually, we want this to be a richer description of having
+    // a DB, TABLE, ROLE, etc scope. For now, we have a trivial impl
+    // of having only DB and TABLE scopes, as determined by whether
+    // or not the tableName is null.
+    if (dbName != null){
+      if (tableName != null){
+        return Scope.TABLE;
+      }
+      return Scope.DB;
+    }
+    return Scope.UNKNOWN;
+  }
+
   public int getEventTime() {
     return eventTime;
   }

Modified: hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java (original)
+++ hive/branches/hbase-metastore/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java Tue Apr 14 14:47:30 2015
@@ -18,18 +18,24 @@
  */
 package org.apache.hive.hcatalog.api;
 
+import java.io.IOException;
 import java.math.BigInteger;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.EnumSet;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Random;
 
+import com.google.common.base.Function;
+import com.google.common.collect.Iterables;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.NotificationEvent;
 import org.apache.hadoop.hive.metastore.api.PartitionEventType;
 import org.apache.hadoop.hive.ql.WindowsPathUtil;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
@@ -42,12 +48,17 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe;
 import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hive.hcatalog.api.repl.Command;
+import org.apache.hive.hcatalog.api.repl.ReplicationTask;
+import org.apache.hive.hcatalog.api.repl.ReplicationUtils;
+import org.apache.hive.hcatalog.api.repl.StagingDirectoryProvider;
 import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.common.HCatException;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema.Type;
 import org.apache.hive.hcatalog.NoExitSecurityManager;
+import org.apache.hive.hcatalog.listener.DbNotificationListener;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -63,6 +74,8 @@ import static org.junit.Assert.assertArr
 
 import org.apache.hadoop.util.Shell;
 
+import javax.annotation.Nullable;
+
 public class TestHCatClient {
   private static final Logger LOG = LoggerFactory.getLogger(TestHCatClient.class);
   private static final String msPort = "20101";
@@ -71,6 +84,8 @@ public class TestHCatClient {
   private static final String replicationTargetHCatPort = "20102";
   private static HiveConf replicationTargetHCatConf;
   private static SecurityManager securityManager;
+  private static boolean useExternalMS = false;
+  private static boolean useExternalMSForReplication = false;
 
   private static class RunMS implements Runnable {
 
@@ -101,18 +116,28 @@ public class TestHCatClient {
 
   @AfterClass
   public static void tearDown() throws Exception {
-    LOG.info("Shutting down metastore.");
-    System.setSecurityManager(securityManager);
+    if (!useExternalMS) {
+      LOG.info("Shutting down metastore.");
+      System.setSecurityManager(securityManager);
+    }
   }
 
   @BeforeClass
   public static void startMetaStoreServer() throws Exception {
 
     hcatConf = new HiveConf(TestHCatClient.class);
+    String metastoreUri = System.getProperty(HiveConf.ConfVars.METASTOREURIS.varname);
+    if (metastoreUri != null) {
+      hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metastoreUri);
+      useExternalMS = true;
+      return;
+    }
     if (Shell.WINDOWS) {
       WindowsPathUtil.convertPathsFromWindowsToHdfs(hcatConf);
     }
 
+    System.setProperty(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS.varname,
+        DbNotificationListener.class.getName()); // turn on db notification listener on metastore
     Thread t = new Thread(new RunMS(msPort));
     t.start();
     Thread.sleep(10000);
@@ -162,8 +187,12 @@ public class TestHCatClient {
     assertTrue(testDb.getProperties().size() == 0);
     String warehouseDir = System
       .getProperty("test.warehouse.dir", "/user/hive/warehouse");
-    String expectedDir = warehouseDir.replaceFirst("pfile:///", "pfile:/");
-    assertEquals(expectedDir + "/" + db + ".db", testDb.getLocation());
+    if (useExternalMS) {
+      assertTrue(testDb.getLocation().matches(".*" + "/" + db + ".db"));
+    } else {
+      String expectedDir = warehouseDir.replaceFirst("pfile:///", "pfile:/");
+      assertEquals(expectedDir + "/" + db + ".db", testDb.getLocation());
+    }
     ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>();
     cols.add(new HCatFieldSchema("id", Type.INT, "id comment"));
     cols.add(new HCatFieldSchema("value", Type.STRING, "value comment"));
@@ -213,7 +242,7 @@ public class TestHCatClient {
     assertEquals("checking " + serdeConstants.SERIALIZATION_NULL_FORMAT, Character.toString('\006'),
       table2.getSerdeParams().get(serdeConstants.SERIALIZATION_NULL_FORMAT));
     
-    assertEquals((expectedDir + "/" + db + ".db/" + tableTwo).toLowerCase(), table2.getLocation().toLowerCase());
+    assertTrue(table2.getLocation().toLowerCase().matches(".*" + ("/" + db + ".db/" + tableTwo).toLowerCase()));
 
     HCatCreateTableDesc tableDesc3 = HCatCreateTableDesc.create(db,
       tableThree, cols).fileFormat("orcfile").build();
@@ -372,7 +401,7 @@ public class TestHCatClient {
       .ifNotExists(true).location("/tmp/" + dbName).build();
     client.createDatabase(dbDesc);
     HCatDatabase newDB = client.getDatabase(dbName);
-    assertTrue(newDB.getLocation().equalsIgnoreCase("file:/tmp/" + dbName));
+    assertTrue(newDB.getLocation().matches(".*/tmp/" + dbName));
     client.close();
   }
 
@@ -792,6 +821,113 @@ public class TestHCatClient {
     }
   }
 
+  /**
+   * Test for event-based replication scenario
+   *
+   * Does not test if replication actually happened, merely tests if we're able to consume a repl task
+   * iter appropriately, calling all the functions expected of the interface, without errors.
+   */
+  @Test
+  public void testReplicationTaskIter() throws Exception {
+
+    HCatClient sourceMetastore = HCatClient.create(new Configuration(hcatConf));
+
+    List<HCatNotificationEvent> notifs = sourceMetastore.getNextNotification(
+        0, 0, new IMetaStoreClient.NotificationFilter() {
+      @Override
+      public boolean accept(NotificationEvent event) {
+        return true;
+      }
+    });
+    for(HCatNotificationEvent n : notifs){
+      LOG.info("notif from dblistener:" + n.getEventId()
+          + ":" + n.getEventTime() + ",t:" + n.getEventType() + ",o:" + n.getDbName() + "." + n.getTableName());
+    }
+
+    Iterator<ReplicationTask> taskIter = sourceMetastore.getReplicationTasks(0, 0, "mydb", null);
+    while(taskIter.hasNext()){
+      ReplicationTask task = taskIter.next();
+      HCatNotificationEvent n = task.getEvent();
+      LOG.info("notif from tasks:" + n.getEventId()
+          + ":" + n.getEventTime() + ",t:" + n.getEventType() + ",o:" + n.getDbName() + "." + n.getTableName()
+          + ",s:" + n.getEventScope());
+      LOG.info("task :" + task.getClass().getName());
+      if (task.needsStagingDirs()){
+        StagingDirectoryProvider provider = new StagingDirectoryProvider() {
+          @Override
+          public String getStagingDirectory(String key) {
+            LOG.info("getStagingDirectory(" + key + ") called!");
+            return "/tmp/" + key.replaceAll(" ","_");
+          }
+        };
+        task
+            .withSrcStagingDirProvider(provider)
+            .withDstStagingDirProvider(provider);
+      }
+      if (task.isActionable()){
+        LOG.info("task was actionable!");
+        Function<Command, String> commandDebugPrinter = new Function<Command, String>() {
+          @Override
+          public String apply(@Nullable Command cmd) {
+            StringBuilder sb = new StringBuilder();
+            String serializedCmd = null;
+            try {
+              serializedCmd = ReplicationUtils.serializeCommand(cmd);
+            } catch (IOException e) {
+              e.printStackTrace();
+              throw new RuntimeException(e);
+            }
+            sb.append("SERIALIZED:"+serializedCmd+"\n");
+            Command command = null;
+            try {
+              command = ReplicationUtils.deserializeCommand(serializedCmd);
+            } catch (IOException e) {
+              e.printStackTrace();
+              throw new RuntimeException(e);
+            }
+            sb.append("CMD:[" + command.getClass().getName() + "]\n");
+            sb.append("EVENTID:[" +command.getEventId()+"]\n");
+            for (String s : command.get()) {
+              sb.append("CMD:" + s);
+              sb.append("\n");
+            }
+            sb.append("Retriable:" + command.isRetriable() + "\n");
+            sb.append("Undoable:" + command.isUndoable() + "\n");
+            if (command.isUndoable()) {
+              for (String s : command.getUndo()) {
+                sb.append("UNDO:" + s);
+                sb.append("\n");
+              }
+            }
+            List<String> locns = command.cleanupLocationsPerRetry();
+            sb.append("cleanupLocationsPerRetry entries :" + locns.size());
+            for (String s : locns){
+              sb.append("RETRY_CLEANUP:"+s);
+              sb.append("\n");
+            }
+            locns = command.cleanupLocationsAfterEvent();
+            sb.append("cleanupLocationsAfterEvent entries :" + locns.size());
+            for (String s : locns){
+              sb.append("AFTER_EVENT_CLEANUP:"+s);
+              sb.append("\n");
+            }
+            return sb.toString();
+          }
+        };
+        LOG.info("On src:");
+        for (String s : Iterables.transform(task.getSrcWhCommands(), commandDebugPrinter)){
+          LOG.info(s);
+        }
+        LOG.info("On dest:");
+        for (String s : Iterables.transform(task.getDstWhCommands(), commandDebugPrinter)){
+          LOG.info(s);
+        }
+      } else {
+        LOG.info("task was not actionable.");
+      }
+    }
+  }
+
   /**
    * Test for detecting schema-changes for an HCatalog table, across 2 different HCat instances.
    * A table is created with the same schema on 2 HCat instances. The table-schema is modified on the source HCat



Mime
View raw message