hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gunt...@apache.org
Subject svn commit: r1510788 [1/10] - in /hive/branches/tez: ./ cli/src/java/org/apache/hadoop/hive/cli/ cli/src/test/org/apache/hadoop/hive/cli/ common/src/java/org/apache/hadoop/hive/conf/ conf/ data/files/ eclipse-templates/ hcatalog/storage-handlers/hbase/...
Date Mon, 05 Aug 2013 22:31:32 GMT
Author: gunther
Date: Mon Aug  5 22:31:28 2013
New Revision: 1510788

URL: http://svn.apache.org/r1510788
Log:
Merged latest trunk changes to tez branch

Added:
    hive/branches/tez/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java
      - copied unchanged from r1510787, hive/trunk/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java
    hive/branches/tez/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java
      - copied unchanged from r1510787, hive/trunk/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java
    hive/branches/tez/cli/src/test/org/apache/hadoop/hive/cli/TestRCFileCat.java
      - copied unchanged from r1510787, hive/trunk/cli/src/test/org/apache/hadoop/hive/cli/TestRCFileCat.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/SecureCmdDoAs.java
      - copied unchanged from r1510787, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SecureCmdDoAs.java
    hive/branches/tez/ql/src/test/queries/clientpositive/correlationoptimizer15.q
      - copied unchanged from r1510787, hive/trunk/ql/src/test/queries/clientpositive/correlationoptimizer15.q
    hive/branches/tez/ql/src/test/queries/clientpositive/multiMapJoin2.q
      - copied unchanged from r1510787, hive/trunk/ql/src/test/queries/clientpositive/multiMapJoin2.q
    hive/branches/tez/ql/src/test/queries/clientpositive/partition_date2.q
      - copied unchanged from r1510787, hive/trunk/ql/src/test/queries/clientpositive/partition_date2.q
    hive/branches/tez/ql/src/test/results/clientpositive/correlationoptimizer15.q.out
      - copied unchanged from r1510787, hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer15.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/multiMapJoin2.q.out
      - copied unchanged from r1510787, hive/trunk/ql/src/test/results/clientpositive/multiMapJoin2.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/partition_date2.q.out
      - copied unchanged from r1510787, hive/trunk/ql/src/test/results/clientpositive/partition_date2.q.out
    hive/branches/tez/serde/src/test/org/apache/hadoop/hive/serde2/io/
      - copied from r1510787, hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/HostExecutorBuilder.java
      - copied unchanged from r1510787, hive/trunk/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/HostExecutorBuilder.java
Removed:
    hive/branches/tez/ql/src/test/queries/clientpositive/auto_join33.q
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join33.q.out
Modified:
    hive/branches/tez/   (props changed)
    hive/branches/tez/.gitattributes
    hive/branches/tez/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
    hive/branches/tez/cli/src/java/org/apache/hadoop/hive/cli/RCFileCat.java
    hive/branches/tez/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
    hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/branches/tez/conf/hive-default.xml.template
    hive/branches/tez/data/files/csv.txt
    hive/branches/tez/eclipse-templates/.classpath
    hive/branches/tez/eclipse-templates/.classpath._hbase
    hive/branches/tez/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerFactory.java
    hive/branches/tez/ivy/libraries.properties
    hive/branches/tez/metastore/ivy.xml
    hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java
    hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/parser/Filter.g
    hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
    hive/branches/tez/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp
    hive/branches/tez/ql/src/gen/thrift/gen-cpp/queryplan_types.h
    hive/branches/tez/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java
    hive/branches/tez/ql/src/gen/thrift/gen-php/Types.php
    hive/branches/tez/ql/src/gen/thrift/gen-py/queryplan/ttypes.py
    hive/branches/tez/ql/src/gen/thrift/gen-rb/queryplan_types.rb
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/Context.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorUtils.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/WindowFunctionDescription.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/WindowFunctionInfo.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/correlation/QueryPlanTreeTransformation.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFType.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFDenseRank.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLag.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLead.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLeadLag.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
    hive/branches/tez/ql/src/test/queries/clientpositive/avro_nullable_fields.q
    hive/branches/tez/ql/src/test/queries/clientpositive/ba_table_udfs.q
    hive/branches/tez/ql/src/test/queries/clientpositive/correlationoptimizer1.q
    hive/branches/tez/ql/src/test/queries/clientpositive/correlationoptimizer3.q
    hive/branches/tez/ql/src/test/queries/clientpositive/correlationoptimizer4.q
    hive/branches/tez/ql/src/test/queries/clientpositive/correlationoptimizer5.q
    hive/branches/tez/ql/src/test/queries/clientpositive/correlationoptimizer6.q
    hive/branches/tez/ql/src/test/queries/clientpositive/correlationoptimizer7.q
    hive/branches/tez/ql/src/test/queries/clientpositive/multiMapJoin1.q
    hive/branches/tez/ql/src/test/queries/clientpositive/nonblock_op_deduplicate.q
    hive/branches/tez/ql/src/test/queries/clientpositive/union34.q
    hive/branches/tez/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join0.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join10.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join11.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join12.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join13.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join15.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join16.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join2.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join20.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join21.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join22.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join23.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join24.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join26.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join28.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join29.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_join32.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_10.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_9.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/avro_nullable_fields.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/avro_schema_literal.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/ba_table_udfs.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/combine2_win.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/correlationoptimizer1.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/correlationoptimizer3.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/correlationoptimizer4.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/correlationoptimizer6.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/correlationoptimizer7.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/join28.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/join32.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/join33.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/join_star.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/mapjoin_filter_on_outerjoin.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/mapjoin_subquery.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/mapjoin_subquery2.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/mapjoin_test_outer.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/multiMapJoin1.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/multi_join_union.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/nonblock_op_deduplicate.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/serde_user_properties.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/union34.q.out
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
    hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
    hive/branches/tez/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroDeserializer.java
    hive/branches/tez/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroObjectInspectorGenerator.java
    hive/branches/tez/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorConverters.java
    hive/branches/tez/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
    hive/branches/tez/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
    hive/branches/tez/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
    hive/branches/tez/shims/src/common-secure/test/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java
    hive/branches/tez/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/client/PTestClient.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/request/TestStartRequest.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/server/ExecutionController.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/api/server/TestExecutor.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/CleanupPhase.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/Constants.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/Drone.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ExecutionPhase.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/HostExecutor.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/JIRAService.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/JUnitReportParser.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/LogDirectoryCleaner.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PTest.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/Phase.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PrepPhase.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ReportingPhase.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/ExecutionContextConfiguration.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/QFileTestBatch.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestConfiguration.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestParser.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudComputeService.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudExecutionContextProvider.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ssh/AbstractSSHCommand.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ssh/RSyncCommandExecutor.java
    hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/ssh/SSHCommandExecutor.java
    hive/branches/tez/testutils/ptest2/src/main/resources/batch-exec.vm
    hive/branches/tez/testutils/ptest2/src/main/resources/log4j.properties
    hive/branches/tez/testutils/ptest2/src/main/resources/source-prep.vm
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/api/server/TestTestExecutor.java
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/AbstractTestPhase.java
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/MockLocalCommandFactory.java
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/MockRSyncCommandExecutor.java
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/MockSSHCommandExecutor.java
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestCleanupPhase.java
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestCleanupPhase.testExecute.approved.txt
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestExecutionPhase.java
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestExecutionPhase.testFailingQFile.approved.txt
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestExecutionPhase.testFailingUnitTest.approved.txt
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestExecutionPhase.testPassingQFileTest.approved.txt
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestExecutionPhase.testPassingUnitTest.approved.txt
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestHostExecutor.java
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestPhase.java
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestPhase.testRsyncFromLocalToRemoteInstancesWithFailureOne.approved.txt
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestPhase.testRsyncFromLocalToRemoteInstancesWithFailureUnknown.approved.txt
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestPrepPhase.testExecute.approved.txt
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestReportParser.java
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.java
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepGit.approved.txt
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepNone.approved.txt
    hive/branches/tez/testutils/ptest2/src/test/java/org/apache/hive/ptest/execution/TestScripts.testPrepSvn.approved.txt
    hive/branches/tez/testutils/ptest2/src/test/resources/SomeTest-success.xml
    hive/branches/tez/testutils/ptest2/src/test/resources/test-outputs/SomeTest-truncated.xml
    hive/branches/tez/testutils/ptest2/src/test/resources/test-outputs/skewjoin_union_remove_1.q-TEST-org.apache.hadoop.hive.cli.TestCliDriver.xml

Propchange: hive/branches/tez/
------------------------------------------------------------------------------
  Merged /hive/trunk:r1508199-1510787

Modified: hive/branches/tez/.gitattributes
URL: http://svn.apache.org/viewvc/hive/branches/tez/.gitattributes?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/.gitattributes (original)
+++ hive/branches/tez/.gitattributes Mon Aug  5 22:31:28 2013
@@ -14,6 +14,9 @@
 
 *.sh     text eol=lf
 
+#test files, use lf so that size is same on windows as well
+data/files/*.dat    text eol=lf
+
 *.bat    text eol=crlf
 *.csproj text merge=union eol=crlf
 *.sln    text merge=union eol=crlf

Modified: hive/branches/tez/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (original)
+++ hive/branches/tez/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java Mon Aug  5 22:31:28 2013
@@ -611,11 +611,11 @@ public class CliDriver {
   }
 
   public static void main(String[] args) throws Exception {
-    int ret = run(args);
+    int ret = new CliDriver().run(args);
     System.exit(ret);
   }
 
-  public static int run(String[] args) throws Exception {
+  public  int run(String[] args) throws Exception {
 
     OptionsProcessor oproc = new OptionsProcessor();
     if (!oproc.process_stage1(args)) {
@@ -690,7 +690,7 @@ public class CliDriver {
    * @return status of the CLI comman execution
    * @throws Exception
    */
-  private static int executeDriver(CliSessionState ss, HiveConf conf, OptionsProcessor oproc)
+  private  int executeDriver(CliSessionState ss, HiveConf conf, OptionsProcessor oproc)
       throws Exception {
 
     // connect to Hive Server
@@ -734,15 +734,14 @@ public class CliDriver {
 
     try {
       if (ss.fileName != null) {
-        int fileProcessStatus = cli.processFile(ss.fileName);
-        return fileProcessStatus;
+        return cli.processFile(ss.fileName);
       }
     } catch (FileNotFoundException e) {
       System.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
       return 3;
     }
 
-    ConsoleReader reader = new ConsoleReader();
+    ConsoleReader reader =  getConsoleReader();
     reader.setBellEnabled(false);
     // reader.setDebug(new PrintWriter(new FileWriter("writer.debug", true)));
     for (Completor completor : getCommandCompletor()) {
@@ -793,6 +792,9 @@ public class CliDriver {
     return ret;
   }
 
+  protected ConsoleReader getConsoleReader() throws IOException{
+    return new ConsoleReader();
+  }
   /**
    * Retrieve the current database name string to display, based on the
    * configuration value.

Modified: hive/branches/tez/cli/src/java/org/apache/hadoop/hive/cli/RCFileCat.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/cli/src/java/org/apache/hadoop/hive/cli/RCFileCat.java?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/cli/src/java/org/apache/hadoop/hive/cli/RCFileCat.java (original)
+++ hive/branches/tez/cli/src/java/org/apache/hadoop/hive/cli/RCFileCat.java Mon Aug  5 22:31:28 2013
@@ -22,6 +22,7 @@ import java.io.BufferedOutputStream;
 import java.io.FileDescriptor;
 import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.OutputStream;
 import java.io.PrintStream;
 import java.nio.ByteBuffer;
 import java.nio.charset.Charset;
@@ -53,6 +54,8 @@ public class RCFileCat implements Tool{
   // In verbose mode, print an update per RECORD_PRINT_INTERVAL records
   private static final int RECORD_PRINT_INTERVAL = (1024*1024);
 
+  protected static boolean test=false;
+
   public RCFileCat() {
     super();
     decoder = Charset.forName("UTF-8").newDecoder().
@@ -81,6 +84,7 @@ public class RCFileCat implements Tool{
     //get options from arguments
     if (args.length < 1 || args.length > 3) {
       printUsage(null);
+      return -1;
     }
     Path fileName = null;
     for (int i = 0; i < args.length; i++) {
@@ -102,6 +106,7 @@ public class RCFileCat implements Tool{
         fileName = new Path(arg);
       } else {
         printUsage(null);
+        return -1;
       }
     }
 
@@ -253,14 +258,19 @@ public class RCFileCat implements Tool{
       e.printStackTrace();
       System.err.println("\n\n\n");
       printUsage(e.getMessage());
+      System.exit(1);
     }
   }
 
   private static void setupBufferedOutput() {
-    FileOutputStream fdout =
-        new FileOutputStream(FileDescriptor.out);
+    OutputStream pdataOut;
+    if (test) {
+      pdataOut = System.out;
+    } else {
+      pdataOut = new FileOutputStream(FileDescriptor.out);
+    }
     BufferedOutputStream bos =
-        new BufferedOutputStream(fdout, STDOUT_BUFFER_SIZE);
+        new BufferedOutputStream(pdataOut, STDOUT_BUFFER_SIZE);
     PrintStream ps =
         new PrintStream(bos, false);
     System.setOut(ps);
@@ -270,7 +280,6 @@ public class RCFileCat implements Tool{
     if(errorMsg != null) {
       System.err.println(errorMsg);
     }
-    System.exit(1);
   }
 
 }

Modified: hive/branches/tez/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java (original)
+++ hive/branches/tez/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java Mon Aug  5 22:31:28 2013
@@ -17,28 +17,50 @@
  */
 package org.apache.hadoop.hive.cli;
 
-import junit.framework.TestCase;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.Schema;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
-import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
-
-import java.io.PrintStream;
-import java.util.ArrayList;
-import java.util.List;
 
-import static org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import static org.mockito.Matchers.anyBoolean;
+import static org.mockito.Matchers.anyInt;
 import static org.mockito.Matchers.anyString;
 import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.doThrow;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.never;
 import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.lang.reflect.Field;
+import java.security.Permission;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+import jline.ArgumentCompletor;
+import jline.Completor;
+import jline.ConsoleReader;
+import junit.framework.TestCase;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Schema;
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.service.HiveClient;
+import org.apache.hadoop.hive.service.HiveServerException;
+import org.apache.thrift.TException;
+
+
 // Cannot call class TestCliDriver since that's the name of the generated
 // code for the script-based testing
 public class TestCliDriverMethods extends TestCase {
@@ -58,7 +80,8 @@ public class TestCliDriverMethods extend
   }
 
   // If the command has no schema, make sure nothing is printed
-  public void testThatCliDriverPrintsNoHeaderForCommandsWithNoSchema() throws CommandNeedRetryException {
+  public void testThatCliDriverPrintsNoHeaderForCommandsWithNoSchema()
+      throws CommandNeedRetryException {
     Schema mockSchema = mock(Schema.class);
     when(mockSchema.getFieldSchemas()).thenReturn(null);
 
@@ -69,16 +92,21 @@ public class TestCliDriverMethods extend
 
   /**
    * Do the actual testing against a mocked CliDriver based on what type of schema
-   * @param mockSchema Schema to throw against test
+   *
+   * @param mockSchema
+   *          Schema to throw against test
    * @return Output that would have been sent to the user
-   * @throws CommandNeedRetryException won't actually be thrown
+   * @throws CommandNeedRetryException
+   *           won't actually be thrown
    */
   private PrintStream headerPrintingTestDriver(Schema mockSchema) throws CommandNeedRetryException {
     CliDriver cliDriver = new CliDriver();
 
     // We want the driver to try to print the header...
+
     Configuration conf = mock(Configuration.class);
-    when(conf.getBoolean(eq(ConfVars.HIVE_CLI_PRINT_HEADER.varname), anyBoolean())).thenReturn(true);
+    when(conf.getBoolean(eq(ConfVars.HIVE_CLI_PRINT_HEADER.varname), anyBoolean()))
+        .thenReturn(true);
     cliDriver.setConf(conf);
 
     Driver proc = mock(Driver.class);
@@ -99,4 +127,429 @@ public class TestCliDriverMethods extend
     return mockOut;
   }
 
+
+  public void testGetCommandCompletor() {
+    Completor[] completors = CliDriver.getCommandCompletor();
+    assertEquals(2, completors.length);
+    assertTrue(completors[0] instanceof ArgumentCompletor);
+    assertTrue(completors[1] instanceof Completor);
+
+    //comletor add space after last delimeter
+   List<String>testList=new ArrayList<String>(Arrays.asList(new String[]{")"}));
+    completors[1].complete("fdsdfsdf", 0, testList);
+    assertEquals(") ", testList.get(0));
+    testList=new ArrayList<String>();
+    completors[1].complete("len", 0, testList);
+    assertTrue(testList.get(0).endsWith("length("));
+
+    testList=new ArrayList<String>();
+    completors[0].complete("set f", 0, testList);
+    assertEquals("set", testList.get(0));
+
+  }
+
+  public void testRun() throws Exception {
+    // clean history
+    String historyDirectory = System.getProperty("user.home");
+    if ((new File(historyDirectory)).exists()) {
+      File historyFile = new File(historyDirectory + File.separator + ".hivehistory");
+      historyFile.delete();
+    }
+    HiveConf configuration = new HiveConf();
+    CliSessionState ss = new CliSessionState(configuration);
+    CliSessionState.start(ss);
+    String[] args = {};
+    PrintStream oldOut = System.out;
+    ByteArrayOutputStream dataOut = new ByteArrayOutputStream();
+    System.setOut(new PrintStream(dataOut));
+
+    PrintStream oldErr = System.err;
+    ByteArrayOutputStream dataErr = new ByteArrayOutputStream();
+    System.setErr(new PrintStream(dataErr));
+
+
+    try {
+      new FakeCliDriver().run(args);
+      assertTrue(dataOut.toString().contains("test message"));
+      assertTrue(dataErr.toString().contains("Hive history file="));
+      assertTrue(dataErr.toString().contains("File: fakeFile is not a file."));
+      dataOut.reset();
+      dataErr.reset();
+
+    } finally {
+      System.setOut(oldOut);
+      System.setErr(oldErr);
+
+    }
+
+  }
+
+  /**
+   * Test commands exit and quit
+   */
+  public void testQuit() throws Exception {
+
+    CliSessionState ss = new CliSessionState(new HiveConf());
+    ss.err = System.err;
+    ss.out = System.out;
+
+    NoExitSecurityManager newSecurityManager = new NoExitSecurityManager();
+    try {
+      CliSessionState.start(ss);
+      CliDriver cliDriver = new CliDriver();
+      cliDriver.processCmd("quit");
+      fail("should be exit");
+    } catch (ExitException e) {
+      assertEquals(0, e.getStatus());
+
+    } catch (Exception e) {
+      newSecurityManager.resetSecurityManager();
+      throw e;
+    }
+
+    try {
+      CliSessionState.start(ss);
+      CliDriver cliDriver = new CliDriver();
+      cliDriver.processCmd("exit");
+      fail("should be exit");
+    } catch (ExitException e) {
+      assertEquals(0, e.getStatus());
+
+    } finally {
+      newSecurityManager.resetSecurityManager();
+    }
+
+  }
+
+  /**
+   * test remote execCommand
+   */
+  public void testRemoteCall() throws Exception {
+    MyCliSessionState ss = new MyCliSessionState(new HiveConf(),
+        org.apache.hadoop.hive.cli.TestCliDriverMethods.MyCliSessionState.ClientResult.RETURN_OK);
+    ss.err = System.err;
+    ByteArrayOutputStream data = new ByteArrayOutputStream();
+    ss.out = new PrintStream(data);
+    MyCliSessionState.start(ss);
+
+    CliDriver cliDriver = new CliDriver();
+    cliDriver.processCmd("remote command");
+    assertTrue(data.toString().contains("test result"));
+
+  }
+
+  /**
+   * test remote Exception
+   */
+  public void testServerException() throws Exception {
+    MyCliSessionState ss = new MyCliSessionState(
+        new HiveConf(),
+        org.apache.hadoop.hive.cli.TestCliDriverMethods.MyCliSessionState.ClientResult.RETURN_SERVER_EXCEPTION);
+    ByteArrayOutputStream data = new ByteArrayOutputStream();
+    ss.err = new PrintStream(data);
+    ss.out = System.out;
+    MyCliSessionState.start(ss);
+
+    CliDriver cliDriver = new CliDriver();
+    cliDriver.processCmd("remote command");
+    assertTrue(data.toString().contains("[Hive Error]: test HiveServerException"));
+    data.reset();
+
+
+  }
+
+  /**
+   * test remote Exception
+   */
+  public void testServerTException() throws Exception {
+    MyCliSessionState ss = new MyCliSessionState(
+        new HiveConf(),
+        org.apache.hadoop.hive.cli.TestCliDriverMethods.MyCliSessionState.ClientResult.RETURN_T_EXCEPTION);
+    ByteArrayOutputStream data = new ByteArrayOutputStream();
+    ss.err = new PrintStream(data);
+    ss.out = System.out;
+    MyCliSessionState.start(ss);
+
+    CliDriver cliDriver = new CliDriver();
+    cliDriver.processCmd("remote command");
+    assertTrue(data.toString().contains("[Thrift Error]: test TException"));
+    assertTrue(data.toString().contains(
+        "[Thrift Error]: Hive server is not cleaned due to thrift exception: test TException"));
+
+  }
+
+  /**
+   * test remote Exception
+   */
+  public void testProcessSelectDatabase() throws Exception {
+    CliSessionState sessinState = new CliSessionState(new HiveConf());
+    CliSessionState.start(sessinState);
+    ByteArrayOutputStream data = new ByteArrayOutputStream();
+    sessinState.err = new PrintStream(data);
+    sessinState.database = "database";
+    CliDriver driver = new CliDriver();
+    NoExitSecurityManager securityManager = new NoExitSecurityManager();
+    try {
+      driver.processSelectDatabase(sessinState);
+      fail("shuld be exit");
+    } catch (ExitException e) {
+      e.printStackTrace();
+      assertEquals(40000, e.getStatus());
+    } finally {
+      securityManager.resetSecurityManager();
+    }
+    assertTrue(data.toString().contains(
+        "FAILED: ParseException line 1:4 cannot recognize input near 'database'"));
+  }
+
+  public void testprocessInitFiles() throws Exception {
+    String oldHiveHome = System.getenv("HIVE_HOME");
+    String oldHiveConfDir = System.getenv("HIVE_CONF_DIR");
+
+    File homeFile = File.createTempFile("test", "hive");
+    String tmpDir = homeFile.getParentFile().getAbsoluteFile() + File.separator
+        + "TestCliDriverMethods";
+    homeFile.delete();
+    FileUtils.deleteDirectory(new File(tmpDir));
+    homeFile = new File(tmpDir + File.separator + "bin" + File.separator + CliDriver.HIVERCFILE);
+    homeFile.getParentFile().mkdirs();
+    homeFile.createNewFile();
+    FileUtils.write(homeFile, "-- init hive file for test ");
+    setEnv("HIVE_HOME", homeFile.getParentFile().getParentFile().getAbsolutePath());
+    setEnv("HIVE_CONF_DIR", homeFile.getParentFile().getAbsolutePath());
+    CliSessionState sessionState = new CliSessionState(new HiveConf());
+
+    ByteArrayOutputStream data = new ByteArrayOutputStream();
+    NoExitSecurityManager securityManager = new NoExitSecurityManager();
+
+    sessionState.err = new PrintStream(data);
+    sessionState.out = System.out;
+    try {
+      CliSessionState.start(sessionState);
+      CliDriver cliDriver = new CliDriver();
+      cliDriver.processInitFiles(sessionState);
+      assertTrue(data.toString().contains(
+          "Putting the global hiverc in $HIVE_HOME/bin/.hiverc is deprecated. " +
+              "Please use $HIVE_CONF_DIR/.hiverc instead."));
+      FileUtils.write(homeFile, "bla bla bla");
+      // if init file contains incorrect row
+      try {
+        cliDriver.processInitFiles(sessionState);
+        fail("should be exit");
+      } catch (ExitException e) {
+        assertEquals(40000, e.getStatus());
+      }
+      setEnv("HIVE_HOME", null);
+      try {
+        cliDriver.processInitFiles(sessionState);
+        fail("should be exit");
+      } catch (ExitException e) {
+        assertEquals(40000, e.getStatus());
+      }
+
+    } finally {
+      // restore data
+      setEnv("HIVE_HOME", oldHiveHome);
+      setEnv("HIVE_CONF_DIR", oldHiveConfDir);
+      FileUtils.deleteDirectory(new File(tmpDir));
+    }
+
+    File f = File.createTempFile("hive", "test");
+    FileUtils.write(f, "bla bla bla");
+    try {
+      sessionState.initFiles = Arrays.asList(new String[] {f.getAbsolutePath()});
+      CliDriver cliDriver = new CliDriver();
+      cliDriver.processInitFiles(sessionState);
+      fail("should be exit");
+    } catch (ExitException e) {
+      assertEquals(40000, e.getStatus());
+      assertTrue(data.toString().contains("cannot recognize input near 'bla' 'bla' 'bla'"));
+
+    } finally {
+      securityManager.resetSecurityManager();
+    }
+  }
+
+
+  private static void setEnv(String key, String value) throws Exception {
+    Class[] classes = Collections.class.getDeclaredClasses();
+    Map<String, String> env = (Map<String, String>) System.getenv();
+    for (Class cl : classes) {
+      if ("java.util.Collections$UnmodifiableMap".equals(cl.getName())) {
+        Field field = cl.getDeclaredField("m");
+        field.setAccessible(true);
+        Object obj = field.get(env);
+        Map<String, String> map = (Map<String, String>) obj;
+        if (value == null) {
+          map.remove(key);
+        } else {
+          map.put(key, value);
+        }
+      }
+    }
+  }
+
+
+  private static class FakeCliDriver extends CliDriver {
+
+    @Override
+    protected ConsoleReader getConsoleReader() throws IOException {
+      ConsoleReader reslt = new FakeConsoleReader();
+      return reslt;
+    }
+
+  }
+
+  private static class FakeConsoleReader extends ConsoleReader {
+    private int counter = 0;
+    File temp = null;
+
+    public FakeConsoleReader() throws IOException {
+      super();
+
+    }
+
+    @Override
+    public String readLine(String prompt) throws IOException {
+      FileWriter writer;
+      switch (counter++) {
+      case 0:
+        return "!echo test message;";
+      case 1:
+        temp = File.createTempFile("hive", "test");
+        temp.deleteOnExit();
+        return "source  " + temp.getAbsolutePath() + ";";
+      case 2:
+        temp = File.createTempFile("hive", "test");
+        temp.deleteOnExit();
+        writer = new FileWriter(temp);
+        writer.write("bla bla bla");
+        writer.close();
+        return "list file file://" + temp.getAbsolutePath() + ";";
+      case 3:
+        return "!echo ";
+      case 4:
+        return "test message;";
+      case 5:
+        return "source  fakeFile;";
+      case 6:
+        temp = File.createTempFile("hive", "test");
+        temp.deleteOnExit();
+        writer = new FileWriter(temp);
+        writer.write("source  fakeFile;");
+        writer.close();
+        return "list file file://" + temp.getAbsolutePath() + ";";
+
+
+        // drop table over10k;
+      default:
+        return null;
+      }
+    }
+  }
+
+  private static class NoExitSecurityManager extends SecurityManager {
+
+    public SecurityManager parentSecurityManager;
+
+    public NoExitSecurityManager() {
+      super();
+      parentSecurityManager = System.getSecurityManager();
+      System.setSecurityManager(this);
+    }
+
+    @Override
+    public void checkPermission(Permission perm, Object context) {
+      if (parentSecurityManager != null) {
+        parentSecurityManager.checkPermission(perm, context);
+      }
+    }
+
+    @Override
+    public void checkPermission(Permission perm) {
+      if (parentSecurityManager != null) {
+        parentSecurityManager.checkPermission(perm);
+      }
+    }
+
+    @Override
+    public void checkExit(int status) {
+      throw new ExitException(status);
+    }
+
+    public void resetSecurityManager() {
+      System.setSecurityManager(parentSecurityManager);
+    }
+  }
+
+  private static class ExitException extends RuntimeException {
+    int status;
+
+    public ExitException(int status) {
+      this.status = status;
+    }
+
+    public int getStatus() {
+      return status;
+    }
+  }
+
+  private static class MyCliSessionState extends CliSessionState {
+
+    public enum ClientResult {
+      RETURN_OK, RETURN_SERVER_EXCEPTION, RETURN_T_EXCEPTION
+    };
+
+    private final ClientResult result;
+
+    public MyCliSessionState(HiveConf conf, ClientResult result) {
+      super(conf);
+      this.result = result;
+    }
+
+    @Override
+    public boolean isRemoteMode() {
+      return true;
+    }
+
+    @Override
+    public HiveClient getClient() {
+
+      HiveClient result = mock(HiveClient.class);
+      if (ClientResult.RETURN_OK.equals(this.result)) {
+        List<String> fetchResult = new ArrayList<String>(1);
+        fetchResult.add("test result");
+        try {
+          when(result.fetchN(anyInt())).thenReturn(fetchResult);
+        } catch (HiveServerException e) {
+        } catch (Exception e) {
+        }
+      } else if (ClientResult.RETURN_SERVER_EXCEPTION.equals(this.result)) {
+        HiveServerException exception = new HiveServerException("test HiveServerException", 10,
+            "sql state");
+        try {
+          when(result.fetchN(anyInt())).thenThrow(exception);
+
+          when(result.fetchN(anyInt())).thenThrow(exception);
+        } catch (TException e) {
+          ;
+        }
+        return result;
+      } else if (ClientResult.RETURN_T_EXCEPTION.equals(this.result)) {
+        TException exception = new TException("test TException");
+        try {
+          // org.mockito.Mockito.
+          doThrow(exception).when(result).clean();
+          when(result.fetchN(anyInt())).thenThrow(exception);
+        } catch (TException e) {
+          e.printStackTrace();
+        }
+        return result;
+      }
+      return result;
+    }
+
+  }
+
+
 }

Modified: hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Mon Aug  5 22:31:28 2013
@@ -507,7 +507,6 @@ public class HiveConf extends Configurat
     HIVECONVERTJOINNOCONDITIONALTASK("hive.auto.convert.join.noconditionaltask", true),
     HIVECONVERTJOINNOCONDITIONALTASKTHRESHOLD("hive.auto.convert.join.noconditionaltask.size",
         10000000L),
-    HIVEOPTIMIZEMAPJOINFOLLOWEDBYMR("hive.optimize.mapjoin.mapreduce", false),
     HIVESKEWJOINKEY("hive.skewjoin.key", 100000),
     HIVESKEWJOINMAPJOINNUMMAPTASK("hive.skewjoin.mapjoin.map.tasks", 10000),
     HIVESKEWJOINMAPJOINMINSPLIT("hive.skewjoin.mapjoin.min.split", 33554432L), //32M

Modified: hive/branches/tez/conf/hive-default.xml.template
URL: http://svn.apache.org/viewvc/hive/branches/tez/conf/hive-default.xml.template?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/conf/hive-default.xml.template (original)
+++ hive/branches/tez/conf/hive-default.xml.template Mon Aug  5 22:31:28 2013
@@ -860,16 +860,6 @@
 </property>
 
 <property>
-  <name>hive.optimize.mapjoin.mapreduce</name>
-  <value>false</value>
-  <description>If hive.auto.convert.join is off, this parameter does not take
-    affect. If it is on, and if there are map-join jobs followed by a map-reduce
-    job (for e.g a group by), each map-only job is merged with the following
-    map-reduce job.
-  </description>
-</property>
-
-<property>
   <name>hive.script.auto.progress</name>
   <value>false</value>
   <description>Whether Hive Tranform/Map/Reduce Clause should automatically send progress information to TaskTracker to avoid the task getting killed because of inactivity.  Hive sends progress information when the script is outputting to stderr.  This option removes the need of periodically producing stderr messages, but users should be cautious because this may prevent infinite loops in the scripts to be killed by TaskTracker.  </description>

Modified: hive/branches/tez/data/files/csv.txt
URL: http://svn.apache.org/viewvc/hive/branches/tez/data/files/csv.txt?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/data/files/csv.txt (original)
+++ hive/branches/tez/data/files/csv.txt Mon Aug  5 22:31:28 2013
@@ -1,18 +1,18 @@
-why hello there,42,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,0:1:2:3:4:5,50:51:53
-another record,98,4,101,9999999,false,99.89,0.00000009,beta,Earth#101,1134:false:wazzup,RED,\N,6:7:8:9:10,54:55:56
-third record,45,5,102,999999999,true,89.99,0.00000000000009,alpha:gamma,Earth#237:Bob#723,102:false:BNL,GREEN,\N,11:12:13,57:58:59
-\N,42,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,0:1:2:3:4:5,50:51:53
-string,\N,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,0:1:2:3:4:5,50:51:53
-string,42,\N,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,0:1:2:3:4:5,50:51:53
-string,42,3,\N,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,0:1:2:3:4:5,50:51:53
-string,42,3,100,\N,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,0:1:2:3:4:5,50:51:53
-string,42,3,100,1412341,\N,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,0:1:2:3:4:5,50:51:53
-string,42,3,100,1412341,true,\N,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,0:1:2:3:4:5,50:51:53
-string,42,3,100,1412341,true,42.43,\N,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,0:1:2:3:4:5,50:51:53
-string,42,3,100,1412341,true,42.43,85.23423424,\N,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,0:1:2:3:4:5,50:51:53
-string,42,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,\N,17:true:Abe Linkedin,BLUE,72,0:1:2:3:4:5,50:51:53
-string,42,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,\N,BLUE,72,0:1:2:3:4:5,50:51:53
-string,42,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,\N,72,0:1:2:3:4:5,50:51:53
-string,42,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,\N,0:1:2:3:4:5,50:51:53
-string,42,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,\N,50:51:53
-string,42,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,0:1:2:3:4:5,\N
+why hello there,42,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,,bc
+another record,98,4,101,9999999,false,99.89,0.00000009,beta,Earth#101,1134:false:wazzup,RED,\N,,ef
+third record,45,5,102,999999999,true,89.99,0.00000000000009,alpha:gamma,Earth#237:Bob#723,102:false:BNL,GREEN,\N,,hi
+\N,42,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,,bc
+string,\N,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,,bc
+string,42,\N,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,,bc
+string,42,3,\N,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,,bc
+string,42,3,100,\N,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,,bc
+string,42,3,100,1412341,\N,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,,bc
+string,42,3,100,1412341,true,\N,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,,bc
+string,42,3,100,1412341,true,42.43,\N,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,,bc
+string,42,3,100,1412341,true,42.43,85.23423424,\N,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,,bc
+string,42,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,\N,17:true:Abe Linkedin,BLUE,72,,bc
+string,42,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,\N,BLUE,72,,bc
+string,42,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,\N,72,,bc
+string,42,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,\N,,bc
+string,42,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,\N,bc
+string,42,3,100,1412341,true,42.43,85.23423424,alpha:beta:gamma,Earth#42:Control#86:Bob#31,17:true:Abe Linkedin,BLUE,72,,\N

Modified: hive/branches/tez/eclipse-templates/.classpath
URL: http://svn.apache.org/viewvc/hive/branches/tez/eclipse-templates/.classpath?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/eclipse-templates/.classpath (original)
+++ hive/branches/tez/eclipse-templates/.classpath Mon Aug  5 22:31:28 2013
@@ -49,7 +49,6 @@
   <classpathentry kind="lib" path="build/ivy/lib/default/avro-mapred-@avro.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/jline-@jline.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/json-@json.version@.jar"/>
-  <classpathentry kind="lib" path="build/ivy/lib/default/asm-@asm.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/commons-compress-@commons-compress.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/commons-lang-@commons-lang.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/commons-logging-@commons-logging.version@.jar"/>

Modified: hive/branches/tez/eclipse-templates/.classpath._hbase
URL: http://svn.apache.org/viewvc/hive/branches/tez/eclipse-templates/.classpath._hbase?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/eclipse-templates/.classpath._hbase (original)
+++ hive/branches/tez/eclipse-templates/.classpath._hbase Mon Aug  5 22:31:28 2013
@@ -25,7 +25,6 @@
 	<classpathentry exported="true" kind="lib" path="build/hadoopcore/hadoop-@HADOOPVER@/lib/@JETTYUTILJAR@"/>
 	<classpathentry exported="true" kind="lib" path="cli/lib/jline-@jline.version@.jar"/>
 	<classpathentry exported="true" kind="lib" path="lib/json.jar"/>
-	<classpathentry exported="true" kind="lib" path="lib/asm-@asm.version@.jar"/>
 	<classpathentry exported="true" kind="lib" path="lib/commons-cli-@commons-cli.version@.jar"/>
 	<classpathentry exported="true" kind="lib" path="lib/commons-codec-@commons-code.version@.jar"/>
 	<classpathentry exported="true" kind="lib" path="lib/commons-lang-@commons-lang.version@.jar"/>

Modified: hive/branches/tez/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerFactory.java?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerFactory.java (original)
+++ hive/branches/tez/hcatalog/storage-handlers/hbase/src/java/org/apache/hcatalog/hbase/snapshot/RevisionManagerFactory.java Mon Aug  5 22:31:28 2013
@@ -75,7 +75,7 @@ public class RevisionManagerFactory {
      * Internally used by endpoint implementation to instantiate from different configuration setting.
      * @param className
      * @param conf
-     * @return
+     * @return the opened revision manager
      * @throws IOException
      */
     static RevisionManager getOpenedRevisionManager(String className, Configuration conf) throws IOException {

Modified: hive/branches/tez/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hive/branches/tez/ivy/libraries.properties?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/ivy/libraries.properties (original)
+++ hive/branches/tez/ivy/libraries.properties Mon Aug  5 22:31:28 2013
@@ -22,7 +22,6 @@ ant-contrib.version=1.0b3
 ant-task.version=2.0.10
 antlr.version=3.4
 antlr-runtime.version=3.4
-asm.version=3.1
 avro.version=1.7.1
 datanucleus-api-jdo.version=3.2.1
 datanucleus-core.version=3.2.2

Modified: hive/branches/tez/metastore/ivy.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/ivy.xml?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/metastore/ivy.xml (original)
+++ hive/branches/tez/metastore/ivy.xml Mon Aug  5 22:31:28 2013
@@ -45,7 +45,6 @@
     <dependency org="javax.jdo" name="jdo-api" rev="${jdo-api.version}"
                 transitive="false"/>
     <dependency org="org.apache.derby" name="derby" rev="${derby.version}"/>
-    <dependency org="asm" name="asm" rev="${asm.version}"/>
 
   </dependencies>
 </ivy-module>

Modified: hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java (original)
+++ hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java Mon Aug  5 22:31:28 2013
@@ -17,9 +17,7 @@
  */
 package org.apache.hadoop.hive.metastore.parser;
 
-import java.util.ArrayList;
 import java.util.HashMap;
-import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.Stack;
@@ -28,9 +26,9 @@ import org.antlr.runtime.ANTLRStringStre
 import org.antlr.runtime.CharStream;
 import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.metastore.Warehouse;
-import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 
 import com.google.common.collect.Sets;
 
@@ -158,6 +156,7 @@ public class ExpressionTree {
   public static class LeafNode extends TreeNode {
     public String keyName;
     public Operator operator;
+    /** Constant expression side of the operator. Can currently be a String or a Long. */
     public Object value;
     public boolean isReverseOrder = false;
     private static final String PARAM_PREFIX = "hive_filter_param_";
@@ -196,7 +195,7 @@ public class ExpressionTree {
         String paramKeyName = keyName.substring(hive_metastoreConstants.HIVE_FILTER_FIELD_PARAMS.length());
         keyName = "this.parameters.get(\"" + paramKeyName + "\")";
         //value is persisted as a string in the db, so make sure it's a string here
-        // in case we get an integer.
+        // in case we get a long.
         value = value.toString();
       } else {
         throw new MetaException("Invalid key name in filter.  " +
@@ -210,8 +209,8 @@ public class ExpressionTree {
      * generates a statement of the form:
      * key1 operator value2 (&& | || ) key2 operator value2 ...
      *
-     * Currently supported types for value are String and Integer.
-     * The LIKE operator for Integers is unsupported.
+     * Currently supported types for value are String and Long.
+     * The LIKE operator for Longs is unsupported.
      */
     private String generateJDOFilterGeneral(Map<String, Object> params)
         throws MetaException {
@@ -257,23 +256,37 @@ public class ExpressionTree {
             "> is not a partitioning key for the table");
       }
 
-      //Can only support partitions whose types are string
-      if( ! table.getPartitionKeys().get(partitionColumnIndex).
-          getType().equals(org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME) ) {
-        throw new MetaException
-        ("Filtering is supported only on partition keys of type string");
+      String keyType = table.getPartitionKeys().get(partitionColumnIndex).getType();
+      boolean isIntegralSupported = doesOperatorSupportIntegral(operator);
+
+      // Can only support partitions whose types are string, or maybe integers
+      if (!keyType.equals(org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME)
+          && (!isIntegralSupported || !isIntegralType(keyType))) {
+        throw new MetaException("Filtering is supported only on partition keys of type " +
+            "string" + (isIntegralSupported ? ", or integral types" : ""));
+      }
+
+      boolean isStringValue = value instanceof String;
+      if (!isStringValue && (!isIntegralSupported || !(value instanceof Long))) {
+        throw new MetaException("Filtering is supported only on partition keys of type " +
+            "string" + (isIntegralSupported ? ", or integral types" : ""));
       }
 
-      String valueParam = null;
+      String valueAsString = null;
       try {
-        valueParam = (String) value;
+        valueAsString = isStringValue ? (String) value : Long.toString((Long) value);
       } catch (ClassCastException e) {
-        throw new MetaException("Filtering is supported only on partition keys of type string");
+        throw new MetaException("Unable to cast the constexpr to "
+            + (isStringValue ? "string" : "long"));
       }
 
       String paramName = PARAM_PREFIX + params.size();
-      params.put(paramName, valueParam);
-      String filter;
+      params.put(paramName, valueAsString);
+      boolean isOpEquals = operator == Operator.EQUALS;
+      if (isOpEquals || operator == Operator.NOTEQUALS || operator == Operator.NOTEQUALS2) {
+        return makeFilterForEquals(keyName, valueAsString, paramName, params,
+            partitionColumnIndex, partitionColumnCount, isOpEquals);
+      }
 
       String keyEqual = FileUtils.escapePathName(keyName) + "=";
       int keyEqualLength = keyEqual.length();
@@ -286,43 +299,52 @@ public class ExpressionTree {
         valString = "partitionName.substring(partitionName.indexOf(\"" + keyEqual + "\")+" + keyEqualLength + ").substring(0, partitionName.substring(partitionName.indexOf(\"" + keyEqual + "\")+" + keyEqualLength + ").indexOf(\"/\"))";
       }
 
-      //Handle "a > 10" and "10 > a" appropriately
-      if (isReverseOrder){
-        //For LIKE, the value should be on the RHS
-        if( operator == Operator.LIKE ) {
+      if (operator == Operator.LIKE) {
+        if (isReverseOrder) {
+          //For LIKE, the value should be on the RHS
           throw new MetaException(
-              "Value should be on the RHS for LIKE operator : " +
-              "Key <" + keyName + ">");
-        } else if (operator == Operator.EQUALS) {
-          filter = makeFilterForEquals(keyName, valueParam, paramName, params,
-              partitionColumnIndex, partitionColumnCount);
-        } else {
-          filter = paramName +
-          " " + operator.getJdoOp() + " " + valString;
-        }
-      } else {
-        if (operator == Operator.LIKE ) {
-          //generate this.values.get(i).matches("abc%")
-          filter = " " + valString + "."
-              + operator.getJdoOp() + "(" + paramName + ") ";
-        } else if (operator == Operator.EQUALS) {
-          filter = makeFilterForEquals(keyName, valueParam, paramName, params,
-              partitionColumnIndex, partitionColumnCount);
-        } else {
-          filter = " " + valString + " "
-              + operator.getJdoOp() + " " + paramName;
+              "Value should be on the RHS for LIKE operator : Key <" + keyName + ">");
         }
+        //generate this.values.get(i).matches("abc%")
+        return " " + valString + "." + operator.getJdoOp() + "(" + paramName + ") ";
       }
-      return filter;
+
+      // TODO: support for other ops for numbers to be handled in HIVE-4888.
+      return isReverseOrder
+          ? paramName + " " + operator.getJdoOp() + " " + valString
+          : " " + valString + " " + operator.getJdoOp() + " " + paramName;
+    }
+
+    /**
+     * @param operator operator
+     * @return true iff filter pushdown for this operator can be done for integral types.
+     */
+    private static boolean doesOperatorSupportIntegral(Operator operator) {
+      return (operator == Operator.EQUALS)
+          || (operator == Operator.NOTEQUALS)
+          || (operator == Operator.NOTEQUALS2);
+    }
+
+    /**
+     * @param type type
+     * @return true iff type is an integral type.
+     */
+    private static boolean isIntegralType(String type) {
+      return type.equals(org.apache.hadoop.hive.serde.serdeConstants.TINYINT_TYPE_NAME)
+          || type.equals(org.apache.hadoop.hive.serde.serdeConstants.SMALLINT_TYPE_NAME)
+          || type.equals(org.apache.hadoop.hive.serde.serdeConstants.INT_TYPE_NAME)
+          || type.equals(org.apache.hadoop.hive.serde.serdeConstants.BIGINT_TYPE_NAME);
     }
   }
 
   /**
-   * For equals, we can make the JDO query much faster by filtering based on the
-   * partition name. For a condition like ds="2010-10-01", we can see if there
-   * are any partitions with a name that contains the substring "ds=2010-10-01/"
+   * For equals and not-equals, we can make the JDO query much faster by filtering
+   * based on the partition name. For a condition like ds="2010-10-01", we can see
+   * if there are any partitions with a name that contains the substring "ds=2010-10-01/"
    * False matches aren't possible since "=" is escaped for partition names
    * and the trailing '/' ensures that we won't get a match with ds=2010-10-011
+   * Note that filters on integral type equality also work correctly by virtue of
+   * comparing them as part of ds=1234 string.
    *
    * Two cases to keep in mind: Case with only one partition column (no '/'s)
    * Case where the partition key column is at the end of the name. (no
@@ -332,11 +354,12 @@ public class ExpressionTree {
    * @param value
    * @param paramName name of the parameter to use for JDOQL
    * @param params a map from the parameter name to their values
+   * @param isEq whether the operator is equals, or not-equals.
    * @return
    * @throws MetaException
    */
-  private static String makeFilterForEquals(String keyName, String value,
-      String paramName, Map<String, Object> params, int keyPos, int keyCount)
+  private static String makeFilterForEquals(String keyName, String value, String paramName,
+      Map<String, Object> params, int keyPos, int keyCount, boolean isEq)
       throws MetaException {
     Map<String, String> partKeyToVal = new HashMap<String, String>();
     partKeyToVal.put(keyName, value);
@@ -348,22 +371,25 @@ public class ExpressionTree {
     if (keyCount == 1) {
       // Case where this is no other partition columns
       params.put(paramName, escapedNameFragment);
-      fltr.append("partitionName == ").append(paramName);
+      fltr.append("partitionName ").append(isEq ? "== " : "!= ").append(paramName);
     } else if (keyPos + 1 == keyCount) {
       // Case where the partition column is at the end of the name. There will
       // be a leading '/' but no trailing '/'
       params.put(paramName, "/" + escapedNameFragment);
-      fltr.append("partitionName.endsWith(").append(paramName).append(')');
+      fltr.append(isEq ? "" : "!").append("partitionName.endsWith(")
+        .append(paramName).append(')');
     } else if (keyPos == 0) {
       // Case where the parttion column is at the beginning of the name. There will
       // be a trailing '/' but no leading '/'
       params.put(paramName, escapedNameFragment + "/");
-      fltr.append("partitionName.startsWith(").append(paramName).append(')');
+      fltr.append(isEq ? "" : "!").append("partitionName.startsWith(")
+        .append(paramName).append(')');
     } else {
       // Case where the partition column is in the middle of the name. There will
       // be a leading '/' and an trailing '/'
       params.put(paramName, "/" + escapedNameFragment + "/");
-      fltr.append("partitionName.indexOf(").append(paramName).append(") >= 0");
+      fltr.append("partitionName.indexOf(").append(paramName).append(")")
+        .append(isEq ? ">= 0" : "< 0");
     }
     return fltr.toString();
   }

Modified: hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/parser/Filter.g
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/parser/Filter.g?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/parser/Filter.g (original)
+++ hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/parser/Filter.g Mon Aug  5 22:31:28 2013
@@ -100,10 +100,10 @@ operatorExpression 
        ) { val = TrimQuotes(value.getText()); }
        |
        (
-	       (key = Identifier op = operator value = IntLiteral)
+	       (key = Identifier op = operator value = IntegralLiteral)
 	       |
-	       (value = IntLiteral op = operator key = Identifier) { isReverseOrder = true; }
-       ) { val = Integer.parseInt(value.getText()); }
+	       (value = IntegralLiteral op = operator key = Identifier) { isReverseOrder = true; }
+       ) { val = Long.parseLong(value.getText()); }
     )
     {
         LeafNode node = new LeafNode();
@@ -157,9 +157,9 @@ StringLiteral
     ;
 
 
-IntLiteral
+IntegralLiteral
     :
-    (Digit)+
+    ('-')? (Digit)+
     ;
 
 Identifier

Modified: hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java (original)
+++ hive/branches/tez/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java Mon Aug  5 22:31:28 2013
@@ -64,6 +64,8 @@ import org.apache.hadoop.hive.serde.serd
 import org.apache.hadoop.util.StringUtils;
 import org.apache.thrift.TException;
 
+import com.google.common.collect.Lists;
+
 public abstract class TestHiveMetaStore extends TestCase {
   protected static HiveMetaStoreClient client;
   protected static HiveConf hiveConf;
@@ -1919,31 +1921,6 @@ public abstract class TestHiveMetaStore 
     String dbName = "filterdb";
     String tblName = "filtertbl";
 
-    List<String> vals = new ArrayList<String>(3);
-    vals.add("p11");
-    vals.add("p21");
-    vals.add("p31");
-    List <String> vals2 = new ArrayList<String>(3);
-    vals2.add("p11");
-    vals2.add("p22");
-    vals2.add("p31");
-    List <String> vals3 = new ArrayList<String>(3);
-    vals3.add("p12");
-    vals3.add("p21");
-    vals3.add("p31");
-    List <String> vals4 = new ArrayList<String>(3);
-    vals4.add("p12");
-    vals4.add("p23");
-    vals4.add("p31");
-    List <String> vals5 = new ArrayList<String>(3);
-    vals5.add("p13");
-    vals5.add("p24");
-    vals5.add("p31");
-    List <String> vals6 = new ArrayList<String>(3);
-    vals6.add("p13");
-    vals6.add("p25");
-    vals6.add("p31");
-
     silentDropDatabase(dbName);
 
     Database db = new Database();
@@ -1981,21 +1958,49 @@ public abstract class TestHiveMetaStore 
 
     tbl = client.getTable(dbName, tblName);
 
-    add_partition(client, tbl, vals, "part1");
-    add_partition(client, tbl, vals2, "part2");
-    add_partition(client, tbl, vals3, "part3");
-    add_partition(client, tbl, vals4, "part4");
-    add_partition(client, tbl, vals5, "part5");
-    add_partition(client, tbl, vals6, "part6");
+    add_partition(client, tbl, Lists.newArrayList("p11", "p21", "31"), "part1");
+    add_partition(client, tbl, Lists.newArrayList("p11", "p22", "32"), "part2");
+    add_partition(client, tbl, Lists.newArrayList("p12", "p21", "31"), "part3");
+    add_partition(client, tbl, Lists.newArrayList("p12", "p23", "32"), "part4");
+    add_partition(client, tbl, Lists.newArrayList("p13", "p24", "31"), "part5");
+    add_partition(client, tbl, Lists.newArrayList("p13", "p25", "-33"), "part6");
 
+    // Test equals operator for strings and integers.
     checkFilter(client, dbName, tblName, "p1 = \"p11\"", 2);
     checkFilter(client, dbName, tblName, "p1 = \"p12\"", 2);
     checkFilter(client, dbName, tblName, "p2 = \"p21\"", 2);
     checkFilter(client, dbName, tblName, "p2 = \"p23\"", 1);
+    checkFilter(client, dbName, tblName, "p3 = 31", 3);
+    checkFilter(client, dbName, tblName, "p3 = 33", 0);
+    checkFilter(client, dbName, tblName, "p3 = -33", 1);
     checkFilter(client, dbName, tblName, "p1 = \"p11\" and p2=\"p22\"", 1);
     checkFilter(client, dbName, tblName, "p1 = \"p11\" or p2=\"p23\"", 3);
     checkFilter(client, dbName, tblName, "p1 = \"p11\" or p1=\"p12\"", 4);
+    checkFilter(client, dbName, tblName, "p1 = \"p11\" or p1=\"p12\"", 4);
+    checkFilter(client, dbName, tblName, "p1 = \"p11\" or p1=\"p12\"", 4);
+    checkFilter(client, dbName, tblName, "p1 = \"p11\" and p3 = 31", 1);
+    checkFilter(client, dbName, tblName, "p3 = -33 or p1 = \"p12\"", 3);
 
+    // Test not-equals operator for strings and integers.
+    checkFilter(client, dbName, tblName, "p1 != \"p11\"", 4);
+    checkFilter(client, dbName, tblName, "p2 != \"p23\"", 5);
+    checkFilter(client, dbName, tblName, "p2 != \"p33\"", 6);
+    checkFilter(client, dbName, tblName, "p3 != 32", 4);
+    checkFilter(client, dbName, tblName, "p3 != 8589934592", 6);
+    checkFilter(client, dbName, tblName, "p1 != \"p11\" and p1 != \"p12\"", 2);
+    checkFilter(client, dbName, tblName, "p1 != \"p11\" and p2 != \"p22\"", 4);
+    checkFilter(client, dbName, tblName, "p1 != \"p11\" or p2 != \"p22\"", 5);
+    checkFilter(client, dbName, tblName, "p1 != \"p12\" and p2 != \"p25\"", 3);
+    checkFilter(client, dbName, tblName, "p1 != \"p12\" or p2 != \"p25\"", 6);
+    checkFilter(client, dbName, tblName, "p3 != -33 or p1 != \"p13\"", 5);
+    checkFilter(client, dbName, tblName, "p1 != \"p11\" and p3 = 31", 2);
+    checkFilter(client, dbName, tblName, "p3 != 31 and p1 = \"p12\"", 1);
+
+    // Test reverse order.
+    checkFilter(client, dbName, tblName, "31 != p3 and p1 = \"p12\"", 1);
+    checkFilter(client, dbName, tblName, "\"p23\" = p2", 1);
+
+    // Test and/or more...
     checkFilter(client, dbName, tblName,
         "p1 = \"p11\" or (p1=\"p12\" and p2=\"p21\")", 3);
     checkFilter(client, dbName, tblName,
@@ -2007,11 +2012,11 @@ public abstract class TestHiveMetaStore 
     checkFilter(client, dbName, tblName,
        "p1=\"p12\" and p2=\"p27\" Or p2=\"p21\"", 2);
 
+    // Test gt/lt/lte/gte/like for strings.
     checkFilter(client, dbName, tblName, "p1 > \"p12\"", 2);
     checkFilter(client, dbName, tblName, "p1 >= \"p12\"", 4);
     checkFilter(client, dbName, tblName, "p1 < \"p12\"", 2);
     checkFilter(client, dbName, tblName, "p1 <= \"p12\"", 4);
-    checkFilter(client, dbName, tblName, "p1 <> \"p12\"", 4);
     checkFilter(client, dbName, tblName, "p1 like \"p1.*\"", 6);
     checkFilter(client, dbName, tblName, "p2 like \"p.*3\"", 1);
 
@@ -2033,6 +2038,17 @@ public abstract class TestHiveMetaStore 
     assertTrue("Filter on int partition key", me.getMessage().contains(
           "Filtering is supported only on partition keys of type string"));
 
+    try {
+      client.listPartitionsByFilter(dbName,
+          tblName, "p3 >= 31", (short) -1);
+    } catch(MetaException e) {
+      me = e;
+    }
+    assertNotNull(me);
+    assertTrue("Filter on int partition key", me.getMessage().contains(
+          "Filtering is supported only on partition keys of type string"));
+
+
     me = null;
     try {
       client.listPartitionsByFilter(dbName,

Modified: hive/branches/tez/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp (original)
+++ hive/branches/tez/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp Mon Aug  5 22:31:28 2013
@@ -49,7 +49,9 @@ int _kOperatorTypeValues[] = {
   OperatorType::LATERALVIEWFORWARD,
   OperatorType::HASHTABLESINK,
   OperatorType::HASHTABLEDUMMY,
-  OperatorType::PTF
+  OperatorType::PTF,
+  OperatorType::MUX,
+  OperatorType::DEMUX
 };
 const char* _kOperatorTypeNames[] = {
   "JOIN",
@@ -70,9 +72,11 @@ const char* _kOperatorTypeNames[] = {
   "LATERALVIEWFORWARD",
   "HASHTABLESINK",
   "HASHTABLEDUMMY",
-  "PTF"
+  "PTF",
+  "MUX",
+  "DEMUX"
 };
-const std::map<int, const char*> _OperatorType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(19, _kOperatorTypeValues, _kOperatorTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
+const std::map<int, const char*> _OperatorType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(21, _kOperatorTypeValues, _kOperatorTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
 
 int _kTaskTypeValues[] = {
   TaskType::MAP,

Modified: hive/branches/tez/ql/src/gen/thrift/gen-cpp/queryplan_types.h
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/gen/thrift/gen-cpp/queryplan_types.h?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/ql/src/gen/thrift/gen-cpp/queryplan_types.h (original)
+++ hive/branches/tez/ql/src/gen/thrift/gen-cpp/queryplan_types.h Mon Aug  5 22:31:28 2013
@@ -54,7 +54,9 @@ struct OperatorType {
     LATERALVIEWFORWARD = 15,
     HASHTABLESINK = 16,
     HASHTABLEDUMMY = 17,
-    PTF = 18
+    PTF = 18,
+    MUX = 19,
+    DEMUX = 20
   };
 };
 

Modified: hive/branches/tez/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java (original)
+++ hive/branches/tez/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java Mon Aug  5 22:31:28 2013
@@ -51,7 +51,7 @@ public enum OperatorType implements org.
    * Find a the enum type by its integer value, as defined in the Thrift IDL.
    * @return null if the value is not found.
    */
-  public static OperatorType findByValue(int value) {
+  public static OperatorType findByValue(int value) { 
     switch (value) {
       case 0:
         return JOIN;

Modified: hive/branches/tez/ql/src/gen/thrift/gen-php/Types.php
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/gen/thrift/gen-php/Types.php?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/ql/src/gen/thrift/gen-php/Types.php (original)
+++ hive/branches/tez/ql/src/gen/thrift/gen-php/Types.php Mon Aug  5 22:31:28 2013
@@ -54,6 +54,8 @@ final class OperatorType {
   const HASHTABLESINK = 16;
   const HASHTABLEDUMMY = 17;
   const PTF = 18;
+  const MUX = 19;
+  const DEMUX = 20;
   static public $__names = array(
     0 => 'JOIN',
     1 => 'MAPJOIN',
@@ -74,6 +76,8 @@ final class OperatorType {
     16 => 'HASHTABLESINK',
     17 => 'HASHTABLEDUMMY',
     18 => 'PTF',
+    19 => 'MUX',
+    20 => 'DEMUX',
   );
 }
 

Modified: hive/branches/tez/ql/src/gen/thrift/gen-py/queryplan/ttypes.py
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/gen/thrift/gen-py/queryplan/ttypes.py?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/ql/src/gen/thrift/gen-py/queryplan/ttypes.py (original)
+++ hive/branches/tez/ql/src/gen/thrift/gen-py/queryplan/ttypes.py Mon Aug  5 22:31:28 2013
@@ -64,6 +64,8 @@ class OperatorType:
   HASHTABLESINK = 16
   HASHTABLEDUMMY = 17
   PTF = 18
+  MUX = 19
+  DEMUX = 20
 
   _VALUES_TO_NAMES = {
     0: "JOIN",
@@ -85,6 +87,8 @@ class OperatorType:
     16: "HASHTABLESINK",
     17: "HASHTABLEDUMMY",
     18: "PTF",
+    19: "MUX",
+    20: "DEMUX",
   }
 
   _NAMES_TO_VALUES = {
@@ -107,6 +111,8 @@ class OperatorType:
     "HASHTABLESINK": 16,
     "HASHTABLEDUMMY": 17,
     "PTF": 18,
+    "MUX": 19,
+    "DEMUX": 20,
   }
 
 class TaskType:

Modified: hive/branches/tez/ql/src/gen/thrift/gen-rb/queryplan_types.rb
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/gen/thrift/gen-rb/queryplan_types.rb?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/ql/src/gen/thrift/gen-rb/queryplan_types.rb (original)
+++ hive/branches/tez/ql/src/gen/thrift/gen-rb/queryplan_types.rb Mon Aug  5 22:31:28 2013
@@ -40,8 +40,10 @@ module OperatorType
   HASHTABLESINK = 16
   HASHTABLEDUMMY = 17
   PTF = 18
-  VALUE_MAP = {0 => "JOIN", 1 => "MAPJOIN", 2 => "EXTRACT", 3 => "FILTER", 4 => "FORWARD", 5 => "GROUPBY", 6 => "LIMIT", 7 => "SCRIPT", 8 => "SELECT", 9 => "TABLESCAN", 10 => "FILESINK", 11 => "REDUCESINK", 12 => "UNION", 13 => "UDTF", 14 => "LATERALVIEWJOIN", 15 => "LATERALVIEWFORWARD", 16 => "HASHTABLESINK", 17 => "HASHTABLEDUMMY", 18 => "PTF"}
-  VALID_VALUES = Set.new([JOIN, MAPJOIN, EXTRACT, FILTER, FORWARD, GROUPBY, LIMIT, SCRIPT, SELECT, TABLESCAN, FILESINK, REDUCESINK, UNION, UDTF, LATERALVIEWJOIN, LATERALVIEWFORWARD, HASHTABLESINK, HASHTABLEDUMMY, PTF]).freeze
+  MUX = 19
+  DEMUX = 20
+  VALUE_MAP = {0 => "JOIN", 1 => "MAPJOIN", 2 => "EXTRACT", 3 => "FILTER", 4 => "FORWARD", 5 => "GROUPBY", 6 => "LIMIT", 7 => "SCRIPT", 8 => "SELECT", 9 => "TABLESCAN", 10 => "FILESINK", 11 => "REDUCESINK", 12 => "UNION", 13 => "UDTF", 14 => "LATERALVIEWJOIN", 15 => "LATERALVIEWFORWARD", 16 => "HASHTABLESINK", 17 => "HASHTABLEDUMMY", 18 => "PTF", 19 => "MUX", 20 => "DEMUX"}
+  VALID_VALUES = Set.new([JOIN, MAPJOIN, EXTRACT, FILTER, FORWARD, GROUPBY, LIMIT, SCRIPT, SELECT, TABLESCAN, FILESINK, REDUCESINK, UNION, UDTF, LATERALVIEWJOIN, LATERALVIEWFORWARD, HASHTABLESINK, HASHTABLEDUMMY, PTF, MUX, DEMUX]).freeze
 end
 
 module TaskType

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/Context.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/Context.java?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/Context.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/Context.java Mon Aug  5 22:31:28 2013
@@ -563,6 +563,10 @@ public class Context {
     pathToCS.put(path, cs);
   }
 
+  public ContentSummary getCS(Path path) {
+    return getCS(path.toString());
+  }
+
   public ContentSummary getCS(String path) {
     return pathToCS.get(path);
   }
@@ -575,7 +579,6 @@ public class Context {
     return conf;
   }
 
-
   /**
    * Given a mapping from paths to objects, localize any MR tmp paths
    * @param map mapping from paths to objects

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java Mon Aug  5 22:31:28 2013
@@ -112,8 +112,7 @@ public abstract class CommonJoinOperator
   // to RowContainer
   int joinEmitInterval = -1;
   int joinCacheSize = 0;
-  int nextSz = 0;
-  transient Byte lastAlias = null;
+  long nextSz = 0;
 
   transient boolean handleSkewJoin = false;
 
@@ -251,6 +250,7 @@ public abstract class CommonJoinOperator
 
     joinEmitInterval = HiveConf.getIntVar(hconf,
         HiveConf.ConfVars.HIVEJOINEMITINTERVAL);
+    nextSz = joinEmitInterval;
     joinCacheSize = HiveConf.getIntVar(hconf,
         HiveConf.ConfVars.HIVEJOINCACHESIZE);
 
@@ -334,7 +334,7 @@ public abstract class CommonJoinOperator
     super.startGroup();
   }
 
-  protected int getNextSize(int sz) {
+  protected long getNextSize(long sz) {
     // A very simple counter to keep track of join entries for a key
     if (sz >= 100000) {
       return sz + 100000;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java?rev=1510788&r1=1510787&r2=1510788&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java Mon Aug  5 22:31:28 2013
@@ -88,6 +88,9 @@ public class DemuxOperator extends Opera
 
   private int childrenDone;
 
+  // The index of the child which the last row was forwarded to in a key group.
+  private int lastChildIndex;
+
   // Since DemuxOperator may appear multiple times in MuxOperator's parents list.
   // We use newChildIndexTag instead of childOperatorsTag.
   // Example:
@@ -227,18 +230,26 @@ public class DemuxOperator extends Opera
 
   @Override
   public void processOp(Object row, int tag) throws HiveException {
-    int childIndex = newTagToChildIndex.get(tag);
+    int currentChildIndex = newTagToChildIndex.get(tag);
+
+    // Check if we start to forward rows to a new child.
+    // If so, in the current key group, rows will not be forwarded
+    // to those children which have an index less than the currentChildIndex.
+    // We can call flush the buffer of children from lastChildIndex (inclusive)
+    // to currentChildIndex (exclusive) and propagate processGroup to those children.
+    endGroupIfNecessary(currentChildIndex);
+
     int oldTag = newTagToOldTag.get(tag);
     if (isLogInfoEnabled) {
       cntrs[tag]++;
       if (cntrs[tag] == nextCntrs[tag]) {
-        LOG.info(id + " (newTag, childIndex, oldTag)=(" + tag + ", " + childIndex + ", "
+        LOG.info(id + " (newTag, childIndex, oldTag)=(" + tag + ", " + currentChildIndex + ", "
             + oldTag + "), forwarding " + cntrs[tag] + " rows");
         nextCntrs[tag] = getNextCntr(cntrs[tag]);
       }
     }
 
-    Operator<? extends OperatorDesc> child = childOperatorsArray[childIndex];
+    Operator<? extends OperatorDesc> child = childOperatorsArray[currentChildIndex];
     if (child.getDone()) {
       childrenDone++;
     } else {
@@ -270,6 +281,36 @@ public class DemuxOperator extends Opera
     }
   }
 
+  /**
+   * We assume that the input rows associated with the same key are ordered by
+   * the tag. Because a tag maps to a childindex, when we see a new childIndex,
+   * we will not see the last childIndex (lastChildIndex) again before we start
+   * a new key group. So, we can call flush the buffer of children
+   * from lastChildIndex (inclusive) to currentChildIndex (exclusive) and
+   * propagate processGroup to those children.
+   * @param currentChildIndex the childIndex we have right now.
+   * @throws HiveException
+   */
+  private void endGroupIfNecessary(int currentChildIndex) throws HiveException {
+    if (lastChildIndex != currentChildIndex) {
+      for (int i = lastChildIndex; i < currentChildIndex; i++) {
+        Operator<? extends OperatorDesc> child = childOperatorsArray[i];
+        child.flush();
+        child.endGroup();
+        for (Integer childTag: newChildOperatorsTag.get(i)) {
+          child.processGroup(childTag);
+        }
+      }
+      lastChildIndex = currentChildIndex;
+    }
+  }
+
+  @Override
+  public void startGroup() throws HiveException {
+    lastChildIndex = 0;
+    super.startGroup();
+  }
+
   @Override
   public void endGroup() throws HiveException {
     if (childOperators == null) {
@@ -280,7 +321,10 @@ public class DemuxOperator extends Opera
       return;
     }
 
-    for (int i = 0; i < childOperatorsArray.length; i++) {
+    // We will start a new key group. We can call flush the buffer
+    // of children from lastChildIndex (inclusive) to the last child and
+    // propagate processGroup to those children.
+    for (int i = lastChildIndex; i < childOperatorsArray.length; i++) {
       Operator<? extends OperatorDesc> child = childOperatorsArray[i];
       child.flush();
       child.endGroup();



Mime
View raw message