hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1672173 [1/8] - in /hive/branches/cbo: ./ cli/src/java/org/apache/hadoop/hive/cli/ cli/src/test/org/apache/hadoop/hive/cli/ common/ common/src/java/org/apache/hadoop/hive/common/jsonexplain/ common/src/java/org/apache/hadoop/hive/conf/ hca...
Date Wed, 08 Apr 2015 20:49:47 GMT
Author: hashutosh
Date: Wed Apr  8 20:49:44 2015
New Revision: 1672173

URL: http://svn.apache.org/r1672173
Log:
Merged latest trunk into branch (Ashutosh Chauhan)

Added:
    hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/jsonexplain/
      - copied from r1672144, hive/trunk/common/src/java/org/apache/hadoop/hive/common/jsonexplain/
    hive/branches/cbo/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/repl/
      - copied from r1672144, hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/repl/
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java
      - copied unchanged from r1672144, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java
    hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java
      - copied unchanged from r1672144, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java
    hive/branches/cbo/ql/src/test/queries/clientpositive/columnstats_part_coltype.q
      - copied unchanged from r1672144, hive/trunk/ql/src/test/queries/clientpositive/columnstats_part_coltype.q
    hive/branches/cbo/ql/src/test/queries/clientpositive/explainuser_1.q
      - copied unchanged from r1672144, hive/trunk/ql/src/test/queries/clientpositive/explainuser_1.q
    hive/branches/cbo/ql/src/test/queries/clientpositive/explainuser_2.q
      - copied unchanged from r1672144, hive/trunk/ql/src/test/queries/clientpositive/explainuser_2.q
    hive/branches/cbo/ql/src/test/queries/clientpositive/interval_udf.q
      - copied unchanged from r1672144, hive/trunk/ql/src/test/queries/clientpositive/interval_udf.q
    hive/branches/cbo/ql/src/test/queries/clientpositive/non_native_window_udf.q
      - copied unchanged from r1672144, hive/trunk/ql/src/test/queries/clientpositive/non_native_window_udf.q
    hive/branches/cbo/ql/src/test/queries/clientpositive/udf_months_between.q
      - copied unchanged from r1672144, hive/trunk/ql/src/test/queries/clientpositive/udf_months_between.q
    hive/branches/cbo/ql/src/test/results/clientpositive/columnstats_part_coltype.q.out
      - copied unchanged from r1672144, hive/trunk/ql/src/test/results/clientpositive/columnstats_part_coltype.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/interval_udf.q.out
      - copied unchanged from r1672144, hive/trunk/ql/src/test/results/clientpositive/interval_udf.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/non_native_window_udf.q.out
      - copied unchanged from r1672144, hive/trunk/ql/src/test/results/clientpositive/non_native_window_udf.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/tez/explainuser_1.q.out
      - copied unchanged from r1672144, hive/trunk/ql/src/test/results/clientpositive/tez/explainuser_1.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/tez/explainuser_2.q.out
      - copied unchanged from r1672144, hive/trunk/ql/src/test/results/clientpositive/tez/explainuser_2.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/udf_months_between.q.out
      - copied unchanged from r1672144, hive/trunk/ql/src/test/results/clientpositive/udf_months_between.q.out
Modified:
    hive/branches/cbo/   (props changed)
    hive/branches/cbo/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
    hive/branches/cbo/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
    hive/branches/cbo/common/pom.xml
    hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/branches/cbo/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
    hive/branches/cbo/hcatalog/webhcat/java-client/pom.xml
    hive/branches/cbo/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java
    hive/branches/cbo/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
    hive/branches/cbo/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatNotificationEvent.java
    hive/branches/cbo/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
    hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
    hive/branches/cbo/itests/src/test/resources/testconfiguration.properties
    hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
    hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
    hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java
    hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
    hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
    hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Context.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/SecureCmdDoAs.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/CalciteSemanticException.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/reloperators/HiveProject.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/SqlFunctionConverter.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterIndexDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ArchiveWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/BucketMapJoinContext.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/CommonMergeJoinDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateDatabaseDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateFunctionDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateMacroDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/DemuxDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/DependencyCollectionWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/DescDatabaseDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/DropDatabaseDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/DropFunctionDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/DummyStoreDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/DynamicPruningEventDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplosionDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ForwardDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/GrantDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/GrantRevokeRoleDDL.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableDummyDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinCondDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewForwardDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/LockDatabaseDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/LockTableDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/MergeJoinWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/MuxDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/OrcFileMergeDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeObjectDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/RCFileMergeDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/SMBJoinDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowColumnsDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowDatabasesDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowIndexesDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTblPropertiesDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/Statistics.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsNoJobWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/SwitchDatabaseDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/TezWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/TruncateTableDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionWork.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockDatabaseDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockTableDesc.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFExpressionDef.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFQueryInputDef.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PartitionedTableFunctionDef.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java
    hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
    hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
    hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java
    hive/branches/cbo/ql/src/test/results/clientpositive/interval_arithmetic.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/show_functions.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/spark/vector_between_in.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/tez/update_all_types.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/tez/vector_between_in.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/udf_day.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/udf_dayofmonth.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/udf_hour.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/udf_minute.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/udf_month.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/udf_second.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/update_all_types.q.out
    hive/branches/cbo/ql/src/test/results/clientpositive/vector_between_in.q.out
    hive/branches/cbo/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
    hive/branches/cbo/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
    hive/branches/cbo/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
    hive/branches/cbo/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/PropValueUnion.java
    hive/branches/cbo/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
    hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java
    hive/branches/cbo/service/if/TCLIService.thrift
    hive/branches/cbo/service/src/gen/thrift/gen-cpp/TCLIService_constants.cpp
    hive/branches/cbo/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp
    hive/branches/cbo/service/src/gen/thrift/gen-cpp/TCLIService_types.h
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBinaryColumn.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TBoolColumn.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TByteColumn.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCLIServiceConstants.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TDoubleColumn.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TGetTablesReq.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI16Column.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI32Column.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TI64Column.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOpenSessionReq.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TOpenSessionResp.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TProtocolVersion.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRow.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TRowSet.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStatus.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TStringColumn.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTableSchema.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeDesc.java
    hive/branches/cbo/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TTypeId.java
    hive/branches/cbo/service/src/gen/thrift/gen-py/TCLIService/constants.py
    hive/branches/cbo/service/src/gen/thrift/gen-py/TCLIService/ttypes.py
    hive/branches/cbo/service/src/gen/thrift/gen-rb/t_c_l_i_service_constants.rb
    hive/branches/cbo/service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
    hive/branches/cbo/service/src/java/org/apache/hive/service/cli/ColumnValue.java
    hive/branches/cbo/service/src/java/org/apache/hive/service/cli/Type.java

Propchange: hive/branches/cbo/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Apr  8 20:49:44 2015
@@ -3,4 +3,4 @@
 /hive/branches/spark:1608589-1660298
 /hive/branches/tez:1494760-1622766
 /hive/branches/vectorization:1466908-1527856
-/hive/trunk:1605012-1671958
+/hive/trunk:1605012-1672144

Modified: hive/branches/cbo/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (original)
+++ hive/branches/cbo/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java Wed Apr  8 20:49:44 2015
@@ -37,9 +37,11 @@ import java.util.Map;
 import java.util.Set;
 
 import com.google.common.base.Splitter;
+
 import jline.console.ConsoleReader;
 import jline.console.completer.Completer;
 import jline.console.history.FileHistory;
+import jline.console.history.History;
 import jline.console.history.PersistentHistory;
 import jline.console.completer.StringsCompleter;
 import jline.console.completer.ArgumentCompleter;
@@ -93,12 +95,16 @@ public class CliDriver {
   public static final String HIVERCFILE = ".hiverc";
 
   private final LogHelper console;
+  protected ConsoleReader reader;
   private Configuration conf;
 
   public CliDriver() {
     SessionState ss = SessionState.get();
     conf = (ss != null) ? ss.getConf() : new Configuration();
     Log LOG = LogFactory.getLog("CliDriver");
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("CliDriver inited with classpath " + System.getProperty("java.class.path"));
+    }
     console = new LogHelper(LOG);
   }
 
@@ -712,34 +718,10 @@ public class CliDriver {
       return 3;
     }
 
-    ConsoleReader reader =  getConsoleReader();
-    reader.setBellEnabled(false);
-    // reader.setDebug(new PrintWriter(new FileWriter("writer.debug", true)));
-    for (Completer completer : getCommandCompleter()) {
-      reader.addCompleter(completer);
-    }
+    setupConsoleReader();
 
     String line;
-    final String HISTORYFILE = ".hivehistory";
-    String historyDirectory = System.getProperty("user.home");
-    PersistentHistory history = null;
-    try {
-      if ((new File(historyDirectory)).exists()) {
-        String historyFile = historyDirectory + File.separator + HISTORYFILE;
-        history = new FileHistory(new File(historyFile));
-        reader.setHistory(history);
-      } else {
-        System.err.println("WARNING: Directory for Hive history file: " + historyDirectory +
-                           " does not exist.   History will not be available during this session.");
-      }
-    } catch (Exception e) {
-      System.err.println("WARNING: Encountered an error while trying to initialize Hive's " +
-                         "history file.  History will not be available during this session.");
-      System.err.println(e.getMessage());
-    }
-
     int ret = 0;
-
     String prefix = "";
     String curDB = getFormattedDb(conf, ss);
     String curPrompt = prompt + curDB;
@@ -763,15 +745,53 @@ public class CliDriver {
       }
     }
 
-    if (history != null) {
-      history.flush();
-    }
     return ret;
   }
 
-  protected ConsoleReader getConsoleReader() throws IOException{
-    return new ConsoleReader();
+  private void setupCmdHistory() {
+    final String HISTORYFILE = ".hivehistory";
+    String historyDirectory = System.getProperty("user.home");
+    PersistentHistory history = null;
+    try {
+      if ((new File(historyDirectory)).exists()) {
+        String historyFile = historyDirectory + File.separator + HISTORYFILE;
+        history = new FileHistory(new File(historyFile));
+        reader.setHistory(history);
+      } else {
+        System.err.println("WARNING: Directory for Hive history file: " + historyDirectory +
+                           " does not exist.   History will not be available during this session.");
+      }
+    } catch (Exception e) {
+      System.err.println("WARNING: Encountered an error while trying to initialize Hive's " +
+                         "history file.  History will not be available during this session.");
+      System.err.println(e.getMessage());
+    }
+
+    // add shutdown hook to flush the history to history file
+    Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
+      @Override
+      public void run() {
+        History h = reader.getHistory();
+        if (h instanceof FileHistory) {
+          try {
+            ((FileHistory) h).flush();
+          } catch (IOException e) {
+            System.err.println("WARNING: Failed to write command history file: " + e.getMessage());
+          }
+        }
+      }
+    }));
   }
+
+  protected void setupConsoleReader() throws IOException {
+    reader = new ConsoleReader();
+    reader.setBellEnabled(false);
+    for (Completer completer : getCommandCompleter()) {
+      reader.addCompleter(completer);
+    }
+    setupCmdHistory();
+  }
+
   /**
    * Retrieve the current database name string to display, based on the
    * configuration value.

Modified: hive/branches/cbo/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java (original)
+++ hive/branches/cbo/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java Wed Apr  8 20:49:44 2015
@@ -68,11 +68,13 @@ public class TestCliDriverMethods extend
   // Some of these tests require intercepting System.exit() using the SecurityManager.
   // It is safer to  register/unregister our SecurityManager during setup/teardown instead
   // of doing it within the individual test cases.
+  @Override
   public void setUp() {
     securityManager = System.getSecurityManager();
     System.setSecurityManager(new NoExitSecurityManager(securityManager));
   }
 
+  @Override
   public void tearDown() {
     System.setSecurityManager(securityManager);
   }
@@ -322,7 +324,7 @@ public class TestCliDriverMethods extend
 
   private static void setEnvLinux(String key, String value) throws Exception {
     Class[] classes = Collections.class.getDeclaredClasses();
-    Map<String, String> env = (Map<String, String>) System.getenv();
+    Map<String, String> env = System.getenv();
     for (Class cl : classes) {
       if ("java.util.Collections$UnmodifiableMap".equals(cl.getName())) {
         Field field = cl.getDeclaredField("m");
@@ -362,9 +364,8 @@ public class TestCliDriverMethods extend
   private static class FakeCliDriver extends CliDriver {
 
     @Override
-    protected ConsoleReader getConsoleReader() throws IOException {
-      ConsoleReader reslt = new FakeConsoleReader();
-      return reslt;
+    protected void setupConsoleReader() throws IOException {
+      reader = new FakeConsoleReader();
     }
 
   }

Modified: hive/branches/cbo/common/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/cbo/common/pom.xml?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/common/pom.xml (original)
+++ hive/branches/cbo/common/pom.xml Wed Apr  8 20:49:44 2015
@@ -93,6 +93,11 @@
       <version>${junit.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.json</groupId>
+      <artifactId>json</artifactId>
+      <version>${json.version}</version>
+    </dependency>
   </dependencies>
 
   <profiles>

Modified: hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Wed Apr  8 20:49:44 2015
@@ -1623,6 +1623,9 @@ public class HiveConf extends Configurat
     HIVE_LOG_EXPLAIN_OUTPUT("hive.log.explain.output", false,
         "Whether to log explain output for every query.\n" +
         "When enabled, will log EXPLAIN EXTENDED output for the query at INFO log4j log level."),
+    HIVE_EXPLAIN_USER("hive.explain.user", false,
+        "Whether to show explain result at user level.\n" +
+        "When enabled, will log EXPLAIN output for the query at user level."),
 
     // prefix used to auto generated column aliases (this should be started with '_')
     HIVE_AUTOGEN_COLUMNALIAS_PREFIX_LABEL("hive.autogen.columnalias.prefix.label", "_c",

Modified: hive/branches/cbo/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java (original)
+++ hive/branches/cbo/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java Wed Apr  8 20:49:44 2015
@@ -29,7 +29,6 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.NotificationEvent;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
@@ -146,11 +145,9 @@ public class DbNotificationListener exte
     NotificationEvent event = new NotificationEvent(0, now(),
         HCatConstants.HCAT_ALTER_TABLE_EVENT,
         msgFactory.buildAlterTableMessage(before, after).toString());
-    if (event != null) {
-      event.setDbName(after.getDbName());
-      event.setTableName(after.getTableName());
-      enqueue(event);
-    }
+    event.setDbName(after.getDbName());
+    event.setTableName(after.getTableName());
+    enqueue(event);
   }
 
   /**
@@ -192,11 +189,9 @@ public class DbNotificationListener exte
     NotificationEvent event = new NotificationEvent(0, now(),
         HCatConstants.HCAT_ALTER_PARTITION_EVENT,
         msgFactory.buildAlterPartitionMessage(before, after).toString());
-    if (event != null) {
-      event.setDbName(before.getDbName());
-      event.setTableName(before.getTableName());
-      enqueue(event);
-    }
+    event.setDbName(before.getDbName());
+    event.setTableName(before.getTableName());
+    enqueue(event);
   }
 
   /**

Modified: hive/branches/cbo/hcatalog/webhcat/java-client/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/webhcat/java-client/pom.xml?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/webhcat/java-client/pom.xml (original)
+++ hive/branches/cbo/hcatalog/webhcat/java-client/pom.xml Wed Apr  8 20:49:44 2015
@@ -46,6 +46,11 @@
       <version>${project.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.hive.hcatalog</groupId>
+      <artifactId>hive-hcatalog-server-extensions</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
       <groupId>org.apache.hive</groupId>
       <artifactId>hive-exec</artifactId>
       <version>${project.version}</version>

Modified: hive/branches/cbo/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java (original)
+++ hive/branches/cbo/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java Wed Apr  8 20:49:44 2015
@@ -18,6 +18,7 @@
  */
 package org.apache.hive.hcatalog.api;
 
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
@@ -27,6 +28,7 @@ import org.apache.hadoop.hive.common.cla
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.PartitionEventType;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hive.hcatalog.api.repl.ReplicationTask;
 import org.apache.hive.hcatalog.common.HCatException;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
 
@@ -379,6 +381,24 @@ public abstract class HCatClient {
     throws HCatException;
 
   /**
+   * Drops partition(s) that match the specified (and possibly partial) partition specification.
+   * A partial partition-specification is one where not all partition-keys have associated values. For example,
+   * for a table ('myDb.myTable') with 2 partition keys (dt string, region string),
+   * if for each dt ('20120101', '20120102', etc.) there can exist 3 regions ('us', 'uk', 'in'), then,
+   *  1. Complete partition spec: dropPartitions('myDb', 'myTable', {dt='20120101', region='us'}) would drop 1 partition.
+   *  2. Partial  partition spec: dropPartitions('myDb', 'myTable', {dt='20120101'}) would drop all 3 partitions,
+   *                              with dt='20120101' (i.e. region = 'us', 'uk' and 'in').
+   * @param dbName The database name.
+   * @param tableName The table name.
+   * @param partitionSpec The partition specification, {[col_name,value],[col_name2,value2]}.
+   * @param ifExists Hive returns an error if the partition specified does not exist, unless ifExists is set to true.
+   * @param deleteData Whether to delete the underlying data.
+   * @throws HCatException,ConnectionFailureException
+   */
+   public abstract void dropPartitions(String dbName, String tableName,
+                    Map<String, String> partitionSpec, boolean ifExists, boolean deleteData)
+    throws HCatException;
+  /**
    * List partitions by filter.
    *
    * @param dbName The database name.
@@ -467,6 +487,23 @@ public abstract class HCatClient {
    */
   public abstract String getMessageBusTopicName(String dbName, String tableName) throws HCatException;
 
+
+  /**
+   * Get an iterator that iterates over a list of replication tasks needed to replicate all the
+   * events that have taken place for a given db/table.
+   * @param lastEventId : The last event id that was processed for this reader. The returned
+   *                    replication tasks will start from this point forward
+   * @param maxEvents : Maximum number of events to consider for generating the
+   *                  replication tasks. If < 1, then all available events will be considered.
+   * @param dbName : The database name for which we're interested in the events for.
+   * @param tableName : The table name for which we're interested in the events for - if null,
+   *                  then this function will behave as if it were running at a db level.
+   * @return an iterator over a list of replication events that can be processed one by one.
+   * @throws HCatException
+   */
+  public abstract Iterator<ReplicationTask> getReplicationTasks(
+      long lastEventId, int maxEvents, String dbName, String tableName) throws HCatException;
+
   /**
    * Get a list of notifications
    * @param lastEventId The last event id that was consumed by this reader.  The returned

Modified: hive/branches/cbo/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java (original)
+++ hive/branches/cbo/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java Wed Apr  8 20:49:44 2015
@@ -21,9 +21,11 @@ package org.apache.hive.hcatalog.api;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
+import com.google.common.base.Function;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import org.apache.commons.lang.StringUtils;
@@ -63,6 +65,8 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hive.hcatalog.api.repl.HCatReplicationTaskIterator;
+import org.apache.hive.hcatalog.api.repl.ReplicationTask;
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.common.HCatException;
 import org.apache.hive.hcatalog.common.HCatUtil;
@@ -72,6 +76,8 @@ import org.apache.thrift.TException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import javax.annotation.Nullable;
+
 /**
  * The HCatClientHMSImpl is the Hive Metastore client based implementation of
  * HCatClient.
@@ -567,33 +573,35 @@ public class HCatClientHMSImpl extends H
         && "TRUE".equalsIgnoreCase(table.getParameters().get("EXTERNAL"));
   }
 
-  private void dropPartitionsUsingExpressions(Table table, Map<String, String> partitionSpec, boolean ifExists)
-    throws SemanticException, TException {
+  private void dropPartitionsUsingExpressions(Table table, Map<String, String> partitionSpec,
+                                              boolean ifExists, boolean deleteData)
+      throws SemanticException, TException {
     LOG.info("HCatClient: Dropping partitions using partition-predicate Expressions.");
     ExprNodeGenericFuncDesc partitionExpression = new ExpressionBuilder(table, partitionSpec).build();
     ObjectPair<Integer, byte[]> serializedPartitionExpression =
         new ObjectPair<Integer, byte[]>(partitionSpec.size(),
             Utilities.serializeExpressionToKryo(partitionExpression));
     hmsClient.dropPartitions(table.getDbName(), table.getTableName(), Arrays.asList(serializedPartitionExpression),
-        !isExternal(table),  // Delete data?
-        false,               // Ignore Protection?
-        ifExists,            // Fail if table doesn't exist?
-        false);              // Need results back?
+        deleteData && !isExternal(table),  // Delete data?
+        false,                             // Ignore Protection?
+        ifExists,                          // Fail if table doesn't exist?
+        false);                            // Need results back?
   }
 
   private void dropPartitionsIteratively(String dbName, String tableName,
-                                         Map<String, String> partitionSpec, boolean ifExists) throws HCatException, TException {
+                                         Map<String, String> partitionSpec, boolean ifExists, boolean deleteData)
+      throws HCatException, TException {
     LOG.info("HCatClient: Dropping partitions iteratively.");
     List<Partition> partitions = hmsClient.listPartitionsByFilter(dbName, tableName,
         getFilterString(partitionSpec), (short) -1);
     for (Partition partition : partitions) {
-      dropPartition(partition, ifExists);
+      dropPartition(partition, ifExists, deleteData);
     }
   }
 
   @Override
   public void dropPartitions(String dbName, String tableName,
-                 Map<String, String> partitionSpec, boolean ifExists)
+                 Map<String, String> partitionSpec, boolean ifExists, boolean deleteData)
     throws HCatException {
     LOG.info("HCatClient dropPartitions(db=" + dbName + ",table=" + tableName + ", partitionSpec: ["+ partitionSpec + "]).");
     try {
@@ -602,17 +610,17 @@ public class HCatClientHMSImpl extends H
 
       if (hiveConfig.getBoolVar(HiveConf.ConfVars.METASTORE_CLIENT_DROP_PARTITIONS_WITH_EXPRESSIONS)) {
         try {
-          dropPartitionsUsingExpressions(table, partitionSpec, ifExists);
+          dropPartitionsUsingExpressions(table, partitionSpec, ifExists, deleteData);
         }
         catch (SemanticException parseFailure) {
           LOG.warn("Could not push down partition-specification to back-end, for dropPartitions(). Resorting to iteration.",
               parseFailure);
-          dropPartitionsIteratively(dbName, tableName, partitionSpec, ifExists);
+          dropPartitionsIteratively(dbName, tableName, partitionSpec, ifExists, deleteData);
         }
       }
       else {
         // Not using expressions.
-        dropPartitionsIteratively(dbName, tableName, partitionSpec, ifExists);
+        dropPartitionsIteratively(dbName, tableName, partitionSpec, ifExists, deleteData);
       }
     } catch (NoSuchObjectException e) {
       throw new ObjectNotFoundException(
@@ -627,10 +635,16 @@ public class HCatClientHMSImpl extends H
     }
   }
 
-  private void dropPartition(Partition partition, boolean ifExists)
+  @Override
+  public void dropPartitions(String dbName, String tableName,
+                             Map<String, String> partitionSpec, boolean ifExists) throws HCatException {
+    dropPartitions(dbName, tableName, partitionSpec, ifExists, true);
+  }
+
+  private void dropPartition(Partition partition, boolean ifExists, boolean deleteData)
     throws HCatException, MetaException, TException {
     try {
-      hmsClient.dropPartition(partition.getDbName(), partition.getTableName(), partition.getValues());
+      hmsClient.dropPartition(partition.getDbName(), partition.getTableName(), partition.getValues(), deleteData);
     } catch (NoSuchObjectException e) {
       if (!ifExists) {
         throw new ObjectNotFoundException(
@@ -965,18 +979,27 @@ public class HCatClientHMSImpl extends H
   }
 
   @Override
+  public Iterator<ReplicationTask> getReplicationTasks(
+      long lastEventId, int maxEvents, String dbName, String tableName) throws HCatException {
+    return new HCatReplicationTaskIterator(this,lastEventId,maxEvents,dbName,tableName);
+  }
+
+  @Override
   public List<HCatNotificationEvent> getNextNotification(long lastEventId, int maxEvents,
                                                          IMetaStoreClient.NotificationFilter filter)
       throws HCatException {
     try {
-      List<HCatNotificationEvent> events = new ArrayList<HCatNotificationEvent>();
       NotificationEventResponse rsp = hmsClient.getNextNotification(lastEventId, maxEvents, filter);
       if (rsp != null && rsp.getEvents() != null) {
-        for (NotificationEvent event : rsp.getEvents()) {
-          events.add(new HCatNotificationEvent(event));
-        }
+        return Lists.transform(rsp.getEvents(), new Function<NotificationEvent, HCatNotificationEvent>() {
+          @Override
+          public HCatNotificationEvent apply(@Nullable NotificationEvent notificationEvent) {
+            return new HCatNotificationEvent(notificationEvent);
+          }
+        });
+      } else {
+        return new ArrayList<HCatNotificationEvent>();
       }
-      return events;
     } catch (TException e) {
       throw new ConnectionFailureException("TException while getting notifications", e);
     }

Modified: hive/branches/cbo/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatNotificationEvent.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatNotificationEvent.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatNotificationEvent.java (original)
+++ hive/branches/cbo/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatNotificationEvent.java Wed Apr  8 20:49:44 2015
@@ -32,6 +32,8 @@ public class HCatNotificationEvent {
   private String tableName;
   private String message;
 
+  public enum Scope { DB, TABLE, UNKNOWN };
+
   HCatNotificationEvent(NotificationEvent event) {
     eventId = event.getEventId();
     eventTime = event.getEventTime();
@@ -45,6 +47,20 @@ public class HCatNotificationEvent {
     return eventId;
   }
 
+  public Scope getEventScope() {
+    // Eventually, we want this to be a richer description of having
+    // a DB, TABLE, ROLE, etc scope. For now, we have a trivial impl
+    // of having only DB and TABLE scopes, as determined by whether
+    // or not the tableName is null.
+    if (dbName != null){
+      if (tableName != null){
+        return Scope.TABLE;
+      }
+      return Scope.DB;
+    }
+    return Scope.UNKNOWN;
+  }
+
   public int getEventTime() {
     return eventTime;
   }

Modified: hive/branches/cbo/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java (original)
+++ hive/branches/cbo/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java Wed Apr  8 20:49:44 2015
@@ -18,18 +18,24 @@
  */
 package org.apache.hive.hcatalog.api;
 
+import java.io.IOException;
 import java.math.BigInteger;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.EnumSet;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Random;
 
+import com.google.common.base.Function;
+import com.google.common.collect.Iterables;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.NotificationEvent;
 import org.apache.hadoop.hive.metastore.api.PartitionEventType;
 import org.apache.hadoop.hive.ql.WindowsPathUtil;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
@@ -42,12 +48,17 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe;
 import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hive.hcatalog.api.repl.Command;
+import org.apache.hive.hcatalog.api.repl.ReplicationTask;
+import org.apache.hive.hcatalog.api.repl.ReplicationUtils;
+import org.apache.hive.hcatalog.api.repl.StagingDirectoryProvider;
 import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.common.HCatException;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema.Type;
 import org.apache.hive.hcatalog.NoExitSecurityManager;
+import org.apache.hive.hcatalog.listener.DbNotificationListener;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -63,6 +74,8 @@ import static org.junit.Assert.assertArr
 
 import org.apache.hadoop.util.Shell;
 
+import javax.annotation.Nullable;
+
 public class TestHCatClient {
   private static final Logger LOG = LoggerFactory.getLogger(TestHCatClient.class);
   private static final String msPort = "20101";
@@ -113,6 +126,8 @@ public class TestHCatClient {
       WindowsPathUtil.convertPathsFromWindowsToHdfs(hcatConf);
     }
 
+    System.setProperty(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS.varname,
+        DbNotificationListener.class.getName()); // turn on db notification listener on metastore
     Thread t = new Thread(new RunMS(msPort));
     t.start();
     Thread.sleep(10000);
@@ -792,6 +807,113 @@ public class TestHCatClient {
     }
   }
 
+  /**
+   * Test for event-based replication scenario
+   *
+   * Does not test if replication actually happened, merely tests if we're able to consume a repl task
+   * iter appropriately, calling all the functions expected of the interface, without errors.
+   */
+  @Test
+  public void testReplicationTaskIter() throws Exception {
+
+    HCatClient sourceMetastore = HCatClient.create(new Configuration(hcatConf));
+
+    List<HCatNotificationEvent> notifs = sourceMetastore.getNextNotification(
+        0, 0, new IMetaStoreClient.NotificationFilter() {
+      @Override
+      public boolean accept(NotificationEvent event) {
+        return true;
+      }
+    });
+    for(HCatNotificationEvent n : notifs){
+      LOG.info("notif from dblistener:" + n.getEventId()
+          + ":" + n.getEventTime() + ",t:" + n.getEventType() + ",o:" + n.getDbName() + "." + n.getTableName());
+    }
+
+    Iterator<ReplicationTask> taskIter = sourceMetastore.getReplicationTasks(0, 0, "mydb", null);
+    while(taskIter.hasNext()){
+      ReplicationTask task = taskIter.next();
+      HCatNotificationEvent n = task.getEvent();
+      LOG.info("notif from tasks:" + n.getEventId()
+          + ":" + n.getEventTime() + ",t:" + n.getEventType() + ",o:" + n.getDbName() + "." + n.getTableName()
+          + ",s:" + n.getEventScope());
+      LOG.info("task :" + task.getClass().getName());
+      if (task.needsStagingDirs()){
+        StagingDirectoryProvider provider = new StagingDirectoryProvider() {
+          @Override
+          public String getStagingDirectory(String key) {
+            LOG.info("getStagingDirectory(" + key + ") called!");
+            return "/tmp/" + key.replaceAll(" ","_");
+          }
+        };
+        task
+            .withSrcStagingDirProvider(provider)
+            .withDstStagingDirProvider(provider);
+      }
+      if (task.isActionable()){
+        LOG.info("task was actionable!");
+        Function<Command, String> commandDebugPrinter = new Function<Command, String>() {
+          @Override
+          public String apply(@Nullable Command cmd) {
+            StringBuilder sb = new StringBuilder();
+            String serializedCmd = null;
+            try {
+              serializedCmd = ReplicationUtils.serializeCommand(cmd);
+            } catch (IOException e) {
+              e.printStackTrace();
+              throw new RuntimeException(e);
+            }
+            sb.append("SERIALIZED:"+serializedCmd+"\n");
+            Command command = null;
+            try {
+              command = ReplicationUtils.deserializeCommand(serializedCmd);
+            } catch (IOException e) {
+              e.printStackTrace();
+              throw new RuntimeException(e);
+            }
+            sb.append("CMD:[" + command.getClass().getName() + "]\n");
+            sb.append("EVENTID:[" +command.getEventId()+"]\n");
+            for (String s : command.get()) {
+              sb.append("CMD:" + s);
+              sb.append("\n");
+            }
+            sb.append("Retriable:" + command.isRetriable() + "\n");
+            sb.append("Undoable:" + command.isUndoable() + "\n");
+            if (command.isUndoable()) {
+              for (String s : command.getUndo()) {
+                sb.append("UNDO:" + s);
+                sb.append("\n");
+              }
+            }
+            List<String> locns = command.cleanupLocationsPerRetry();
+            sb.append("cleanupLocationsPerRetry entries :" + locns.size());
+            for (String s : locns){
+              sb.append("RETRY_CLEANUP:"+s);
+              sb.append("\n");
+            }
+            locns = command.cleanupLocationsAfterEvent();
+            sb.append("cleanupLocationsAfterEvent entries :" + locns.size());
+            for (String s : locns){
+              sb.append("AFTER_EVENT_CLEANUP:"+s);
+              sb.append("\n");
+            }
+            return sb.toString();
+          }
+        };
+        LOG.info("On src:");
+        for (String s : Iterables.transform(task.getSrcWhCommands(), commandDebugPrinter)){
+          LOG.info(s);
+        }
+        LOG.info("On dest:");
+        for (String s : Iterables.transform(task.getDstWhCommands(), commandDebugPrinter)){
+          LOG.info(s);
+        }
+      } else {
+        LOG.info("task was not actionable.");
+      }
+    }
+  }
+
   /**
    * Test for detecting schema-changes for an HCatalog table, across 2 different HCat instances.
    * A table is created with the same schema on 2 HCat instances. The table-schema is modified on the source HCat

Modified: hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java (original)
+++ hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java Wed Apr  8 20:49:44 2015
@@ -50,6 +50,8 @@ import java.util.regex.Pattern;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.TableType;
@@ -893,6 +895,54 @@ public class TestJdbcDriver2 {
     assertFalse(res.next());
   }
 
+  @Test
+  public void testIntervalTypes() throws Exception {
+    Statement stmt = con.createStatement();
+
+    // Since interval types not currently supported as table columns, need to create them
+    // as expressions.
+    ResultSet res = stmt.executeQuery(
+        "select case when c17 is null then null else interval '1' year end as col1,"
+        + " c17 -  c17 as col2 from " + dataTypeTableName + " order by col1");
+    ResultSetMetaData meta = res.getMetaData();
+
+    assertEquals("col1", meta.getColumnLabel(1));
+    assertEquals(java.sql.Types.OTHER, meta.getColumnType(1));
+    assertEquals("interval_year_month", meta.getColumnTypeName(1));
+    assertEquals(11, meta.getColumnDisplaySize(1));
+    assertEquals(11, meta.getPrecision(1));
+    assertEquals(0, meta.getScale(1));
+    assertEquals(HiveIntervalYearMonth.class.getName(), meta.getColumnClassName(1));
+
+    assertEquals("col2", meta.getColumnLabel(2));
+    assertEquals(java.sql.Types.OTHER, meta.getColumnType(2));
+    assertEquals("interval_day_time", meta.getColumnTypeName(2));
+    assertEquals(29, meta.getColumnDisplaySize(2));
+    assertEquals(29, meta.getPrecision(2));
+    assertEquals(0, meta.getScale(2));
+    assertEquals(HiveIntervalDayTime.class.getName(), meta.getColumnClassName(2));
+
+    // row 1 - results should be null
+    assertTrue(res.next());
+    // skip the last (partitioning) column since it is always non-null
+    for (int i = 1; i < meta.getColumnCount(); i++) {
+      assertNull("Column " + i + " should be null", res.getObject(i));
+    }
+
+    // row 2 - results should be null
+    assertTrue(res.next());
+    for (int i = 1; i < meta.getColumnCount(); i++) {
+      assertNull("Column " + i + " should be null", res.getObject(i));
+    }
+
+    // row 3
+    assertTrue(res.next());
+    assertEquals("1-0", res.getString(1));
+    assertEquals(1, ((HiveIntervalYearMonth) res.getObject(1)).getYears());
+    assertEquals("0 00:00:00.000000000", res.getString(2));
+    assertEquals(0, ((HiveIntervalDayTime) res.getObject(2)).getDays());
+  }
+
   private void doTestSelectAll(String tableName, int maxRows, int fetchSize) throws Exception {
     boolean isPartitionTable = tableName.equals(partitionedTableName);
 

Modified: hive/branches/cbo/itests/src/test/resources/testconfiguration.properties
URL: http://svn.apache.org/viewvc/hive/branches/cbo/itests/src/test/resources/testconfiguration.properties?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/itests/src/test/resources/testconfiguration.properties (original)
+++ hive/branches/cbo/itests/src/test/resources/testconfiguration.properties Wed Apr  8 20:49:44 2015
@@ -29,6 +29,7 @@ minimr.query.files=auto_sortmerge_join_1
   list_bucket_dml_10.q,\
   load_fs2.q,\
   load_hdfs_file_with_space_in_the_name.q,\
+  non_native_window_udf.q, \
   optrstat_groupby.q,\
   parallel_orderby.q,\
   ql_rewrite_gbtoidx.q,\
@@ -287,6 +288,8 @@ minitez.query.files=bucket_map_join_tez1
   bucket_map_join_tez2.q,\
   dynamic_partition_pruning.q,\
   dynamic_partition_pruning_2.q,\
+  explainuser_1.q,\
+  explainuser_2.q,\
   hybridhashjoin.q,\
   mapjoin_decimal.q,\
   lvj_mapjoin.q, \

Modified: hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java (original)
+++ hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java Wed Apr  8 20:49:44 2015
@@ -44,6 +44,8 @@ import java.util.Calendar;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hive.service.cli.TableSchema;
 import org.apache.hive.service.cli.Type;
 
@@ -443,6 +445,10 @@ public abstract class HiveBaseResultSet
         return new BigDecimal((String)value);
       case DATE_TYPE:
         return Date.valueOf((String) value);
+      case INTERVAL_YEAR_MONTH_TYPE:
+        return HiveIntervalYearMonth.valueOf((String) value);
+      case INTERVAL_DAY_TIME_TYPE:
+        return HiveIntervalDayTime.valueOf((String) value);
       case ARRAY_TYPE:
       case MAP_TYPE:
       case STRUCT_TYPE:

Modified: hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java (original)
+++ hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java Wed Apr  8 20:49:44 2015
@@ -173,6 +173,7 @@ public class HiveConnection implements j
     supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V5);
     supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6);
     supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V7);
+    supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V8);
 
     // open client session
     openSession();

Modified: hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java (original)
+++ hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/HiveResultSetMetaData.java Wed Apr  8 20:49:44 2015
@@ -21,6 +21,7 @@ package org.apache.hive.jdbc;
 import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
 import java.util.List;
+import org.apache.hive.service.cli.Type;
 
 /**
  * HiveResultSetMetaData.
@@ -43,9 +44,13 @@ public class HiveResultSetMetaData imple
     throw new SQLException("Method not supported");
   }
 
+  private Type getHiveType(int column) throws SQLException {
+    return JdbcColumn.typeStringToHiveType(columnTypes.get(toZeroIndex(column)));
+  }
+
   public String getColumnClassName(int column) throws SQLException {
-    int columnType = getColumnType(column);
-    return JdbcColumn.columnClassName(columnType, columnAttributes.get(toZeroIndex(column)));
+    return JdbcColumn.columnClassName(getHiveType(column),
+        columnAttributes.get(toZeroIndex(column)));
   }
 
   public int getColumnCount() throws SQLException {
@@ -53,9 +58,8 @@ public class HiveResultSetMetaData imple
   }
 
   public int getColumnDisplaySize(int column) throws SQLException {
-    int columnType = getColumnType(column);
-
-    return JdbcColumn.columnDisplaySize(columnType, columnAttributes.get(toZeroIndex(column)));
+    return JdbcColumn.columnDisplaySize(getHiveType(column),
+        columnAttributes.get(toZeroIndex(column)));
   }
 
   public String getColumnLabel(int column) throws SQLException {
@@ -79,15 +83,13 @@ public class HiveResultSetMetaData imple
   }
 
   public int getPrecision(int column) throws SQLException {
-    int columnType = getColumnType(column);
-
-    return JdbcColumn.columnPrecision(columnType, columnAttributes.get(toZeroIndex(column)));
+    return JdbcColumn.columnPrecision(getHiveType(column),
+        columnAttributes.get(toZeroIndex(column)));
   }
 
   public int getScale(int column) throws SQLException {
-    int columnType = getColumnType(column);
-
-    return JdbcColumn.columnScale(columnType, columnAttributes.get(toZeroIndex(column)));
+    return JdbcColumn.columnScale(getHiveType(column),
+        columnAttributes.get(toZeroIndex(column)));
   }
 
   public String getSchemaName(int column) throws SQLException {

Modified: hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java (original)
+++ hive/branches/cbo/jdbc/src/java/org/apache/hive/jdbc/JdbcColumn.java Wed Apr  8 20:49:44 2015
@@ -18,7 +18,10 @@
 
 package org.apache.hive.jdbc;
 
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hive.service.cli.Type;
 
 import java.math.BigInteger;
 import java.sql.Date;
@@ -64,10 +67,12 @@ public class JdbcColumn {
     return type;
   }
 
-  static String columnClassName(int columnType, JdbcColumnAttributes columnAttributes)
+  static String columnClassName(Type hiveType, JdbcColumnAttributes columnAttributes)
       throws SQLException {
-    // according to hiveTypeToSqlType possible options are:
+    int columnType = hiveTypeToSqlType(hiveType);
     switch(columnType) {
+      case Types.NULL:
+        return "null";
       case Types.BOOLEAN:
         return Boolean.class.getName();
       case Types.CHAR:
@@ -93,7 +98,17 @@ public class JdbcColumn {
         return BigInteger.class.getName();
       case Types.BINARY:
         return byte[].class.getName();
-      case Types.JAVA_OBJECT:
+      case Types.OTHER:
+      case Types.JAVA_OBJECT: {
+        switch (hiveType) {
+          case INTERVAL_YEAR_MONTH_TYPE:
+            return HiveIntervalYearMonth.class.getName();
+          case INTERVAL_DAY_TIME_TYPE:
+            return HiveIntervalDayTime.class.getName();
+          default:
+            return String.class.getName();
+        }
+      }
       case Types.ARRAY:
       case Types.STRUCT:
         return String.class.getName();
@@ -102,45 +117,61 @@ public class JdbcColumn {
     }
   }
 
-  public static int hiveTypeToSqlType(String type) throws SQLException {
+  static Type typeStringToHiveType(String type) throws SQLException {
     if ("string".equalsIgnoreCase(type)) {
-      return Types.VARCHAR;
+      return Type.STRING_TYPE;
     } else if ("varchar".equalsIgnoreCase(type)) {
-      return Types.VARCHAR;
+      return Type.VARCHAR_TYPE;
     } else if ("char".equalsIgnoreCase(type)) {
-      return Types.CHAR;
+      return Type.CHAR_TYPE;
     } else if ("float".equalsIgnoreCase(type)) {
-      return Types.FLOAT;
+      return Type.FLOAT_TYPE;
     } else if ("double".equalsIgnoreCase(type)) {
-      return Types.DOUBLE;
+      return Type.DOUBLE_TYPE;
     } else if ("boolean".equalsIgnoreCase(type)) {
-      return Types.BOOLEAN;
+      return Type.BOOLEAN_TYPE;
     } else if ("tinyint".equalsIgnoreCase(type)) {
-      return Types.TINYINT;
+      return Type.TINYINT_TYPE;
     } else if ("smallint".equalsIgnoreCase(type)) {
-      return Types.SMALLINT;
+      return Type.SMALLINT_TYPE;
     } else if ("int".equalsIgnoreCase(type)) {
-      return Types.INTEGER;
+      return Type.INT_TYPE;
     } else if ("bigint".equalsIgnoreCase(type)) {
-      return Types.BIGINT;
+      return Type.BIGINT_TYPE;
     } else if ("date".equalsIgnoreCase(type)) {
-      return Types.DATE;
+      return Type.DATE_TYPE;
     } else if ("timestamp".equalsIgnoreCase(type)) {
-      return Types.TIMESTAMP;
+      return Type.TIMESTAMP_TYPE;
+    } else if ("interval_year_month".equalsIgnoreCase(type)) {
+      return Type.INTERVAL_YEAR_MONTH_TYPE;
+    } else if ("interval_day_time".equalsIgnoreCase(type)) {
+      return Type.INTERVAL_DAY_TIME_TYPE;
     } else if ("decimal".equalsIgnoreCase(type)) {
-      return Types.DECIMAL;
+      return Type.DECIMAL_TYPE;
     } else if ("binary".equalsIgnoreCase(type)) {
-      return Types.BINARY;
+      return Type.BINARY_TYPE;
     } else if ("map".equalsIgnoreCase(type)) {
-      return Types.JAVA_OBJECT;
+      return Type.MAP_TYPE;
     } else if ("array".equalsIgnoreCase(type)) {
-      return Types.ARRAY;
+      return Type.ARRAY_TYPE;
     } else if ("struct".equalsIgnoreCase(type)) {
-      return Types.STRUCT;
+      return Type.STRUCT_TYPE;
     }
     throw new SQLException("Unrecognized column type: " + type);
   }
 
+  public static int hiveTypeToSqlType(Type hiveType) throws SQLException {
+    return hiveType.toJavaSQLType();
+  }
+
+  public static int hiveTypeToSqlType(String type) throws SQLException {
+    if ("void".equalsIgnoreCase(type) || "null".equalsIgnoreCase(type)) {
+      return Types.NULL;
+    } else {
+      return hiveTypeToSqlType(typeStringToHiveType(type));
+    }
+  }
+
   static String getColumnTypeName(String type) throws SQLException {
     // we need to convert the Hive type to the SQL type name
     // TODO: this would be better handled in an enum
@@ -168,11 +199,15 @@ public class JdbcColumn {
       return serdeConstants.TIMESTAMP_TYPE_NAME;
     } else if ("date".equalsIgnoreCase(type)) {
       return serdeConstants.DATE_TYPE_NAME;
+    } else if ("interval_year_month".equalsIgnoreCase(type)) {
+      return serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME;
+    } else if ("interval_day_time".equalsIgnoreCase(type)) {
+      return serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME;
     } else if ("decimal".equalsIgnoreCase(type)) {
       return serdeConstants.DECIMAL_TYPE_NAME;
     } else if ("binary".equalsIgnoreCase(type)) {
       return serdeConstants.BINARY_TYPE_NAME;
-    } else if ("void".equalsIgnoreCase(type)) {
+    } else if ("void".equalsIgnoreCase(type) || "null".equalsIgnoreCase(type)) {
       return serdeConstants.VOID_TYPE_NAME;
     } else if (type.equalsIgnoreCase("map")) {
       return serdeConstants.MAP_TYPE_NAME;
@@ -185,26 +220,27 @@ public class JdbcColumn {
     throw new SQLException("Unrecognized column type: " + type);
   }
 
-  static int columnDisplaySize(int columnType, JdbcColumnAttributes columnAttributes)
+  static int columnDisplaySize(Type hiveType, JdbcColumnAttributes columnAttributes)
       throws SQLException {
     // according to hiveTypeToSqlType possible options are:
+    int columnType = hiveTypeToSqlType(hiveType);
     switch(columnType) {
     case Types.BOOLEAN:
-      return columnPrecision(columnType, columnAttributes);
+      return columnPrecision(hiveType, columnAttributes);
     case Types.CHAR:
     case Types.VARCHAR:
-      return columnPrecision(columnType, columnAttributes);
+      return columnPrecision(hiveType, columnAttributes);
     case Types.BINARY:
       return Integer.MAX_VALUE; // hive has no max limit for binary
     case Types.TINYINT:
     case Types.SMALLINT:
     case Types.INTEGER:
     case Types.BIGINT:
-      return columnPrecision(columnType, columnAttributes) + 1; // allow +/-
+      return columnPrecision(hiveType, columnAttributes) + 1; // allow +/-
     case Types.DATE:
       return 10;
     case Types.TIMESTAMP:
-      return columnPrecision(columnType, columnAttributes);
+      return columnPrecision(hiveType, columnAttributes);
 
     // see http://download.oracle.com/javase/6/docs/api/constant-values.html#java.lang.Float.MAX_EXPONENT
     case Types.FLOAT:
@@ -213,8 +249,10 @@ public class JdbcColumn {
     case Types.DOUBLE:
       return 25; // e.g. -(17#).e-####
     case Types.DECIMAL:
-      return columnPrecision(columnType, columnAttributes) + 2;  // '-' sign and '.'
+      return columnPrecision(hiveType, columnAttributes) + 2;  // '-' sign and '.'
+    case Types.OTHER:
     case Types.JAVA_OBJECT:
+      return columnPrecision(hiveType, columnAttributes);
     case Types.ARRAY:
     case Types.STRUCT:
       return Integer.MAX_VALUE;
@@ -223,8 +261,9 @@ public class JdbcColumn {
     }
   }
 
-  static int columnPrecision(int columnType, JdbcColumnAttributes columnAttributes)
+  static int columnPrecision(Type hiveType, JdbcColumnAttributes columnAttributes)
       throws SQLException {
+    int columnType = hiveTypeToSqlType(hiveType);
     // according to hiveTypeToSqlType possible options are:
     switch(columnType) {
     case Types.BOOLEAN:
@@ -255,7 +294,19 @@ public class JdbcColumn {
       return 29;
     case Types.DECIMAL:
       return columnAttributes.precision;
-    case Types.JAVA_OBJECT:
+    case Types.OTHER:
+    case Types.JAVA_OBJECT: {
+      switch (hiveType) {
+        case INTERVAL_YEAR_MONTH_TYPE:
+          // -yyyyyyy-mm  : should be more than enough
+          return 11;
+        case INTERVAL_DAY_TIME_TYPE:
+          // -ddddddddd hh:mm:ss.nnnnnnnnn
+          return 29;
+        default:
+          return Integer.MAX_VALUE;
+      }
+    }
     case Types.ARRAY:
     case Types.STRUCT:
       return Integer.MAX_VALUE;
@@ -264,8 +315,9 @@ public class JdbcColumn {
     }
   }
 
-  static int columnScale(int columnType, JdbcColumnAttributes columnAttributes)
+  static int columnScale(Type hiveType, JdbcColumnAttributes columnAttributes)
       throws SQLException {
+    int columnType = hiveTypeToSqlType(hiveType);
     // according to hiveTypeToSqlType possible options are:
     switch(columnType) {
     case Types.BOOLEAN:
@@ -286,6 +338,7 @@ public class JdbcColumn {
       return 9;
     case Types.DECIMAL:
       return columnAttributes.scale;
+    case Types.OTHER:
     case Types.JAVA_OBJECT:
     case Types.ARRAY:
     case Types.STRUCT:

Modified: hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (original)
+++ hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java Wed Apr  8 20:49:44 2015
@@ -620,10 +620,10 @@ public class MetaStoreUtils {
    */
   static public boolean validateColumnType(String type) {
     int last = 0;
-    boolean lastAlphaDigit = Character.isLetterOrDigit(type.charAt(last));
+    boolean lastAlphaDigit = isValidTypeChar(type.charAt(last));
     for (int i = 1; i <= type.length(); i++) {
       if (i == type.length()
-          || Character.isLetterOrDigit(type.charAt(i)) != lastAlphaDigit) {
+          || isValidTypeChar(type.charAt(i)) != lastAlphaDigit) {
         String token = type.substring(last, i);
         last = i;
         if (!hiveThriftTypeMap.contains(token)) {
@@ -635,6 +635,10 @@ public class MetaStoreUtils {
     return true;
   }
 
+  private static boolean isValidTypeChar(char c) {
+    return Character.isLetterOrDigit(c) || c == '_';
+  }
+
   public static String validateSkewedColNames(List<String> cols) {
     if (null == cols) {
       return null;
@@ -720,6 +724,12 @@ public class MetaStoreUtils {
             "timestamp");
     typeToThriftTypeMap.put(
         org.apache.hadoop.hive.serde.serdeConstants.DECIMAL_TYPE_NAME, "decimal");
+    typeToThriftTypeMap.put(
+        org.apache.hadoop.hive.serde.serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME,
+        org.apache.hadoop.hive.serde.serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME);
+    typeToThriftTypeMap.put(
+        org.apache.hadoop.hive.serde.serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME,
+        org.apache.hadoop.hive.serde.serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME);
   }
 
   static Set<String> hiveThriftTypeMap; //for validation

Modified: hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java (original)
+++ hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java Wed Apr  8 20:49:44 2015
@@ -24,13 +24,13 @@ import java.lang.reflect.InvocationTarge
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
 import java.lang.reflect.UndeclaredThrowableException;
+import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.thrift.TApplicationException;
 import org.apache.thrift.TException;
@@ -51,14 +51,17 @@ public class RetryingMetaStoreClient imp
   private final IMetaStoreClient base;
   private final int retryLimit;
   private final long retryDelaySeconds;
+  private final Map<String, Long> metaCallTimeMap;
+
 
 
 
   protected RetryingMetaStoreClient(HiveConf hiveConf, HiveMetaHookLoader hookLoader,
-      Class<? extends IMetaStoreClient> msClientClass) throws MetaException {
+      Map<String, Long> metaCallTimeMap, Class<? extends IMetaStoreClient> msClientClass) throws MetaException {
     this.retryLimit = hiveConf.getIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES);
     this.retryDelaySeconds = hiveConf.getTimeVar(
         HiveConf.ConfVars.METASTORE_CLIENT_CONNECT_RETRY_DELAY, TimeUnit.SECONDS);
+    this.metaCallTimeMap = metaCallTimeMap;
 
     reloginExpiringKeytabUser();
     this.base = MetaStoreUtils.newInstance(msClientClass, new Class[] {
@@ -67,14 +70,20 @@ public class RetryingMetaStoreClient imp
 
   public static IMetaStoreClient getProxy(HiveConf hiveConf, HiveMetaHookLoader hookLoader,
       String mscClassName) throws MetaException {
+    return getProxy(hiveConf, hookLoader, null, mscClassName);
+  }
+
+  public static IMetaStoreClient getProxy(HiveConf hiveConf, HiveMetaHookLoader hookLoader,
+      Map<String, Long> metaCallTimeMap, String mscClassName) throws MetaException {
 
-    Class<? extends IMetaStoreClient> baseClass = (Class<? extends IMetaStoreClient>)
-        MetaStoreUtils.getClass(mscClassName);
+    Class<? extends IMetaStoreClient> baseClass = (Class<? extends IMetaStoreClient>) MetaStoreUtils
+        .getClass(mscClassName);
 
-    RetryingMetaStoreClient handler = new RetryingMetaStoreClient(hiveConf, hookLoader, baseClass);
+    RetryingMetaStoreClient handler = new RetryingMetaStoreClient(hiveConf, hookLoader,
+        metaCallTimeMap, baseClass);
 
-    return (IMetaStoreClient) Proxy.newProxyInstance(RetryingMetaStoreClient.class.getClassLoader(),
-        baseClass.getInterfaces(), handler);
+    return (IMetaStoreClient) Proxy.newProxyInstance(
+        RetryingMetaStoreClient.class.getClassLoader(), baseClass.getInterfaces(), handler);
   }
 
   @Override
@@ -88,7 +97,15 @@ public class RetryingMetaStoreClient imp
         if(retriesMade > 0){
           base.reconnect();
         }
-        ret = method.invoke(base, args);
+        if (metaCallTimeMap == null) {
+          ret = method.invoke(base, args);
+        } else {
+          // need to capture the timing
+          long startTime = System.currentTimeMillis();
+          ret = method.invoke(base, args);
+          long timeTaken = System.currentTimeMillis() - startTime;
+          addMethodTime(method, timeTaken);
+        }
         break;
       } catch (UndeclaredThrowableException e) {
         throw e.getCause();
@@ -116,6 +133,30 @@ public class RetryingMetaStoreClient imp
     return ret;
   }
 
+  private void addMethodTime(Method method, long timeTaken) {
+    String methodStr = getMethodString(method);
+    Long curTime = metaCallTimeMap.get(methodStr);
+    if (curTime != null) {
+      timeTaken += curTime;
+    }
+    metaCallTimeMap.put(methodStr, timeTaken);
+  }
+
+  /**
+   * @param method
+   * @return String representation with arg types. eg getDatabase_(String, )
+   */
+  private String getMethodString(Method method) {
+    StringBuilder methodSb = new StringBuilder(method.getName());
+    methodSb.append("_(");
+    for (Class<?> paramClass : method.getParameterTypes()) {
+      methodSb.append(paramClass.getSimpleName());
+      methodSb.append(", ");
+    }
+    methodSb.append(")");
+    return methodSb.toString();
+  }
+
   /**
    * Relogin if login user is logged in using keytab
    * Relogin is actually done by ugi code only if sufficient time has passed

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Context.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Context.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Context.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Context.java Wed Apr  8 20:49:44 2015
@@ -23,6 +23,7 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
 import java.text.SimpleDateFormat;
+import java.util.ArrayList;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
@@ -84,6 +85,7 @@ public class Context {
   private final Configuration conf;
   protected int pathid = 10000;
   protected boolean explain = false;
+  protected String cboInfo;
   protected boolean explainLogical = false;
   protected String cmd = "";
   // number of previous attempts
@@ -695,4 +697,13 @@ public class Context {
   public AcidUtils.Operation getAcidOperation() {
     return acidOperation;
   }
+
+  public String getCboInfo() {
+    return cboInfo;
+  }
+
+  public void setCboInfo(String cboInfo) {
+    this.cboInfo = cboInfo;
+  }
+
 }

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Wed Apr  8 20:49:44 2015
@@ -63,7 +63,6 @@ import org.apache.hadoop.hive.ql.hooks.H
 import org.apache.hadoop.hive.ql.hooks.PostExecute;
 import org.apache.hadoop.hive.ql.hooks.PreExecute;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
-import org.apache.hadoop.hive.ql.hooks.Redactor;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.lockmgr.HiveLock;
 import org.apache.hadoop.hive.ql.lockmgr.HiveLockMode;
@@ -485,7 +484,6 @@ public class Driver implements CommandPr
               + explainOutput);
         }
       }
-
       return 0;
     } catch (Exception e) {
       ErrorMsg error = ErrorMsg.getErrorMsg(e.getMessage());
@@ -508,10 +506,19 @@ public class Driver implements CommandPr
       return error.getErrorCode();
     } finally {
       perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.COMPILE);
+      dumpMetaCallTimingWithoutEx("compilation");
       restoreSession(queryState);
     }
   }
 
+  private void dumpMetaCallTimingWithoutEx(String phase) {
+    try {
+      Hive.get().dumpAndClearMetaCallTiming(phase);
+    } catch (HiveException he) {
+      LOG.warn("Caught exception attempting to write metadata call information " + he, he);
+    }
+  }
+
   /**
    * Returns EXPLAIN EXTENDED output for a semantically
    * analyzed query.
@@ -1182,7 +1189,6 @@ public class Driver implements CommandPr
         return createProcessorResponse(ret);
       }
     }
-
     ret = execute();
     if (ret != 0) {
       //if needRequireLock is false, the release here will do nothing because there is no lock
@@ -1307,7 +1313,6 @@ public class Driver implements CommandPr
   public int execute() throws CommandNeedRetryException {
     PerfLogger perfLogger = PerfLogger.getPerfLogger();
     perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DRIVER_EXECUTE);
-
     boolean noName = StringUtils.isEmpty(conf.getVar(HiveConf.ConfVars.HADOOPJOBNAME));
     int maxlen = conf.getIntVar(HiveConf.ConfVars.HIVEJOBNAMELENGTH);
 
@@ -1318,6 +1323,9 @@ public class Driver implements CommandPr
 
     try {
       LOG.info("Starting command: " + queryStr);
+      // compile and execute can get called from different threads in case of HS2
+      // so clear timing in this thread's Hive object before proceeding.
+      Hive.get().clearMetaCallTiming();
 
       plan.setStarted();
 
@@ -1548,6 +1556,7 @@ public class Driver implements CommandPr
       if (noName) {
         conf.setVar(HiveConf.ConfVars.HADOOPJOBNAME, "");
       }
+      dumpMetaCallTimingWithoutEx("execution");
       perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.DRIVER_EXECUTE);
 
       Map<String, MapRedStats> stats = SessionState.get().getMapRedStats();

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=1672173&r1=1672172&r2=1672173&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Wed Apr  8 20:49:44 2015
@@ -40,6 +40,8 @@ import java.util.Set;
 import java.util.TreeMap;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.jsonexplain.JsonParser;
+import org.apache.hadoop.hive.common.jsonexplain.JsonParserFactory;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.DriverContext;
@@ -47,7 +49,9 @@ import org.apache.hadoop.hive.ql.hooks.R
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.optimizer.physical.StageIDsRearranger;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 import org.apache.hadoop.hive.ql.plan.ExplainWork;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
@@ -288,9 +292,24 @@ public class ExplainTask extends Task<Ex
           JSONObject jsonDependencies = getJSONDependencies(work);
           out.print(jsonDependencies);
         } else {
-          JSONObject jsonPlan = getJSONPlan(out, work);
-          if (work.isFormatted()) {
-            out.print(jsonPlan);
+          if (work.isUserLevelExplain()) {
+            JsonParser jsonParser = JsonParserFactory.getParser(conf);
+            if (jsonParser != null) {
+              work.setFormatted(true);
+              JSONObject jsonPlan = getJSONPlan(out, work);
+              if (work.getCboInfo() != null) {
+                jsonPlan.put("cboInfo", work.getCboInfo());
+              }
+              jsonParser.print(jsonPlan, out);
+            } else {
+              throw new SemanticException(
+                  "Hive UserLevelExplain only supports tez engine right now.");
+            }
+          } else {
+            JSONObject jsonPlan = getJSONPlan(out, work);
+            if (work.isFormatted()) {
+              out.print(jsonPlan);
+            }
           }
         }
       }
@@ -566,7 +585,17 @@ public class ExplainTask extends Task<Ex
 
     if (note instanceof Explain) {
       Explain xpl_note = (Explain) note;
-      if (extended || xpl_note.normalExplain()) {
+      boolean invokeFlag = false;
+      if (this.work.isUserLevelExplain()) {
+        invokeFlag = Level.USER.in(xpl_note.explainLevels());
+      } else {
+        if (extended) {
+          invokeFlag = Level.EXTENDED.in(xpl_note.explainLevels());
+        } else {
+          invokeFlag = Level.DEFAULT.in(xpl_note.explainLevels());
+        }
+      }
+      if (invokeFlag) {
         keyJSONObject = xpl_note.displayName();
         if (out != null) {
           out.print(indentString(indent));
@@ -589,6 +618,12 @@ public class ExplainTask extends Task<Ex
         String appender = isLogical ? " (" + operator.getOperatorId() + ")" : "";
         JSONObject jsonOut = outputPlan(operator.getConf(), out, extended,
             jsonOutput, jsonOutput ? 0 : indent, appender);
+        if (this.work.isUserLevelExplain()) {
+          if (jsonOut != null && jsonOut.length() > 0) {
+            ((JSONObject) jsonOut.get(JSONObject.getNames(jsonOut)[0])).put("OperatorId:",
+                operator.getOperatorId());
+          }
+        }
         if (jsonOutput) {
             json = jsonOut;
         }
@@ -623,8 +658,17 @@ public class ExplainTask extends Task<Ex
 
       if (note instanceof Explain) {
         Explain xpl_note = (Explain) note;
-
-        if (extended || xpl_note.normalExplain()) {
+        boolean invokeFlag = false;
+        if (this.work.isUserLevelExplain()) {
+          invokeFlag = Level.USER.in(xpl_note.explainLevels());
+        } else {
+          if (extended) {
+            invokeFlag = Level.EXTENDED.in(xpl_note.explainLevels());
+          } else {
+            invokeFlag = Level.DEFAULT.in(xpl_note.explainLevels());
+          }
+        }
+        if (invokeFlag) {
 
           Object val = null;
           try {



Mime
View raw message