pig-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From z..@apache.org
Subject svn commit: r1784237 [1/22] - in /pig/branches/spark: ./ bin/ conf/ contrib/piggybank/java/ contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/evaluation/ contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/evaluation/util/apachelo...
Date Fri, 24 Feb 2017 08:19:46 GMT
Author: zly
Date: Fri Feb 24 08:19:42 2017
New Revision: 1784237

URL: http://svn.apache.org/viewvc?rev=1784237&view=rev
Log:
PIG-5132:Merge from trunk (5) [Spark Branch]-2 (Adam via Liyun)

Added:
    pig/branches/spark/BUILDING.md
    pig/branches/spark/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/evaluation/TestMaxTupleBy1stField.java
    pig/branches/spark/dev-support/
    pig/branches/spark/dev-support/docker/
    pig/branches/spark/dev-support/docker/Dockerfile
    pig/branches/spark/dev-support/docker/build_env_checks.sh
    pig/branches/spark/dev-support/docker/configure-for-user.sh
    pig/branches/spark/ivy/ant-contrib-1.0b3.jar   (with props)
    pig/branches/spark/shims/src/hadoop2/
    pig/branches/spark/shims/src/hadoop2/org/
    pig/branches/spark/shims/src/hadoop2/org/apache/
    pig/branches/spark/shims/src/hadoop2/org/apache/pig/
    pig/branches/spark/shims/src/hadoop2/org/apache/pig/backend/
    pig/branches/spark/shims/src/hadoop2/org/apache/pig/backend/hadoop/
    pig/branches/spark/shims/src/hadoop2/org/apache/pig/backend/hadoop/executionengine/
    pig/branches/spark/shims/src/hadoop2/org/apache/pig/backend/hadoop/executionengine/shims/
    pig/branches/spark/shims/src/hadoop2/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java
    pig/branches/spark/shims/test/hadoop2/
    pig/branches/spark/shims/test/hadoop2/.gitignore
    pig/branches/spark/src/docs/jdiff/pig_0.16.0.xml
    pig/branches/spark/src/docs/src/documentation/content/xdocs/v_editors.xml
    pig/branches/spark/src/docs/src/documentation/resources/images/pig_zeppelin.png   (with props)
    pig/branches/spark/src/org/apache/pig/NonFSLoadFunc.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/PigATSClient.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/PigJobControl.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/operator/BloomPackager.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/operator/POBloomFilterRearrangeTez.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/operator/POBuildBloomRearrangeTez.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/runtime/HashValuePartitioner.java
    pig/branches/spark/src/org/apache/pig/newplan/logical/visitor/ForEachUserSchemaVisitor.java
    pig/branches/spark/src/org/apache/pig/tools/grunt/ConsoleReaderInputStream.java
    pig/branches/spark/start-build-env.sh
    pig/branches/spark/test/e2e/pig/tests/join.conf
    pig/branches/spark/test/org/apache/pig/builtin/avro/code/pig/group_test.pig
    pig/branches/spark/test/org/apache/pig/impl/builtin/TestHiveUDTF.java
    pig/branches/spark/test/org/apache/pig/test/MiniCluster.java
    pig/branches/spark/test/org/apache/pig/test/SparkMiniCluster.java
    pig/branches/spark/test/org/apache/pig/test/TestConfigurationUtil.java
    pig/branches/spark/test/org/apache/pig/test/TestNewPredicatePushDown.java
    pig/branches/spark/test/org/apache/pig/test/TezMiniCluster.java
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-BloomJoin-1-KeyToReducer.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-BloomJoin-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-BloomJoin-2-KeyToReducer.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-BloomJoin-2.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-BloomJoin-3-KeyToReducer.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-BloomJoin-3.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-BloomJoin-4-KeyToReducer.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-BloomJoin-4.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-BloomJoin-5-KeyToReducer.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-BloomJoin-5.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-BloomJoin-6-KeyToReducer.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-BloomJoin-6.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-BloomJoin-7-KeyToReducer.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-BloomJoin-7.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Limit-4.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-LoadStore-3.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-LoadStore-4.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-LoadStore-5.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-LoadStore-6.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-10-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-10.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-9-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-9.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-1-DummyStore2-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-19-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-19.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-20-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-20.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-21-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-21.gld
    pig/branches/spark/test/org/apache/pig/tez/TestTezJobExecution.java
Removed:
    pig/branches/spark/contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/storage/HadoopJobHistoryLoader.java
    pig/branches/spark/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/TestHadoopJobHistoryLoader.java
    pig/branches/spark/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java
    pig/branches/spark/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
    pig/branches/spark/shims/src/hadoop20/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java
    pig/branches/spark/shims/src/hadoop20/org/apache/pig/backend/hadoop20/PigJobControl.java
    pig/branches/spark/shims/src/hadoop23/org/apache/hadoop/mapred/DowngradeHelper.java
    pig/branches/spark/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapBase.java
    pig/branches/spark/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
    pig/branches/spark/shims/src/hadoop23/org/apache/pig/backend/hadoop/executionengine/shims/HadoopShims.java
    pig/branches/spark/shims/src/hadoop23/org/apache/pig/backend/hadoop23/PigJobControl.java
    pig/branches/spark/shims/test/hadoop20/org/apache/pig/test/MiniCluster.java
    pig/branches/spark/shims/test/hadoop20/org/apache/pig/test/SparkMiniCluster.java
    pig/branches/spark/shims/test/hadoop20/org/apache/pig/test/TezMiniCluster.java
    pig/branches/spark/shims/test/hadoop23/org/apache/pig/test/MiniCluster.java
    pig/branches/spark/shims/test/hadoop23/org/apache/pig/test/SparkMiniCluster.java
    pig/branches/spark/shims/test/hadoop23/org/apache/pig/test/TezMiniCluster.java
    pig/branches/spark/src/META-INF/services/org.apache.hadoop.mapreduce.protocol.ClientProtocolProvider
    pig/branches/spark/src/docs/jdiff/pig_0.15.0.xml
    pig/branches/spark/src/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantBooleanObjectInspector.java
    pig/branches/spark/src/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantDoubleObjectInspector.java
    pig/branches/spark/src/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantFloatObjectInspector.java
    pig/branches/spark/src/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantIntObjectInspector.java
    pig/branches/spark/src/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantLongObjectInspector.java
    pig/branches/spark/src/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantStringObjectInspector.java
    pig/branches/spark/test/e2e/pig/lib/hadoop-0.23.0-streaming.jar
    pig/branches/spark/test/excluded-tests-20
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-LoadStore-2-JDK7.gld
Modified:
    pig/branches/spark/.gitignore
    pig/branches/spark/CHANGES.txt
    pig/branches/spark/bin/pig
    pig/branches/spark/bin/pig.py
    pig/branches/spark/build.xml
    pig/branches/spark/conf/pig.properties
    pig/branches/spark/contrib/piggybank/java/build.xml
    pig/branches/spark/contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/evaluation/MaxTupleBy1stField.java
    pig/branches/spark/contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/evaluation/Over.java
    pig/branches/spark/contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/evaluation/util/apachelogparser/SearchEngineExtractor.java
    pig/branches/spark/contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/evaluation/xml/XPath.java
    pig/branches/spark/contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/storage/CSVExcelStorage.java
    pig/branches/spark/contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/storage/DBStorage.java
    pig/branches/spark/contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/storage/IndexedStorage.java
    pig/branches/spark/contrib/piggybank/java/src/main/java/org/apache/pig/piggybank/storage/MultiStorage.java
    pig/branches/spark/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/evaluation/TestOver.java
    pig/branches/spark/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/evaluation/xml/XPathTest.java
    pig/branches/spark/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/TestCSVExcelStorage.java
    pig/branches/spark/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/TestLogFormatLoader.java
    pig/branches/spark/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/TestMultiStorage.java
    pig/branches/spark/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/TestMultiStorageCompression.java
    pig/branches/spark/ivy.xml
    pig/branches/spark/ivy/libraries.properties
    pig/branches/spark/ivy/pig-template.xml
    pig/branches/spark/ivy/piggybank-template.xml
    pig/branches/spark/ivy/pigsmoke-template.xml
    pig/branches/spark/ivy/pigunit-template.xml
    pig/branches/spark/src/META-INF/services/org.apache.pig.ExecType
    pig/branches/spark/src/docs/src/documentation/content/xdocs/basic.xml
    pig/branches/spark/src/docs/src/documentation/content/xdocs/cont.xml
    pig/branches/spark/src/docs/src/documentation/content/xdocs/func.xml
    pig/branches/spark/src/docs/src/documentation/content/xdocs/perf.xml
    pig/branches/spark/src/docs/src/documentation/content/xdocs/pig-index.xml
    pig/branches/spark/src/docs/src/documentation/content/xdocs/site.xml
    pig/branches/spark/src/docs/src/documentation/content/xdocs/start.xml
    pig/branches/spark/src/docs/src/documentation/content/xdocs/tabs.xml
    pig/branches/spark/src/docs/src/documentation/content/xdocs/test.xml
    pig/branches/spark/src/docs/src/documentation/content/xdocs/udf.xml
    pig/branches/spark/src/org/apache/pig/CounterBasedErrorHandler.java
    pig/branches/spark/src/org/apache/pig/EvalFunc.java
    pig/branches/spark/src/org/apache/pig/JVMReuseImpl.java
    pig/branches/spark/src/org/apache/pig/LoadFunc.java
    pig/branches/spark/src/org/apache/pig/Main.java
    pig/branches/spark/src/org/apache/pig/PigConfiguration.java
    pig/branches/spark/src/org/apache/pig/PigServer.java
    pig/branches/spark/src/org/apache/pig/StoreFunc.java
    pig/branches/spark/src/org/apache/pig/StreamToPig.java
    pig/branches/spark/src/org/apache/pig/backend/executionengine/ExecutionEngine.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/accumulo/AbstractAccumuloStorage.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/accumulo/Utils.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/datastorage/ConfigurationUtil.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/datastorage/HDataStorage.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/HJob.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/Launcher.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/fetch/FetchLauncher.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/fetch/FetchPOStoreImpl.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/DistinctCombiner.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/FileBasedOutputSizeReader.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/InputSizeReducerEstimator.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/JobControlCompiler.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MRCompiler.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceOper.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MultiQueryOptimizer.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigCombiner.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigGenericMapBase.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigGenericMapReduce.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigHadoopLogger.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigInputFormat.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigOutputCommitter.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigSplit.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/partitioners/DiscreteProbabilitySampleGenerator.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/partitioners/WeightedRangePartitioner.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/plans/EndOfAllInputSetter.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/plans/MRPrinter.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/PhysicalOperator.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/expressionOperators/Divide.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/expressionOperators/POCast.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/expressionOperators/POProject.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/relationalOperators/CombinerPackager.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/relationalOperators/LitePackager.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/relationalOperators/POCross.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/relationalOperators/POFRJoin.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/relationalOperators/POFRJoinSpark.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/relationalOperators/POForEach.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/relationalOperators/POMergeJoin.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/relationalOperators/POPoissonSample.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/relationalOperators/POReservoirSample.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/relationalOperators/Packager.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/relationalOperators/StoreFuncDecorator.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/TezDagBuilder.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/TezJob.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/TezJobCompiler.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/TezLauncher.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/TezResourceManager.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/TezSessionManager.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/TezCompiler.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/TezEdgeDescriptor.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/TezOperPlan.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/TezOperator.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/TezPOPackageAnnotator.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/TezPlanContainer.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/TezPrinter.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/operator/POCounterStatsTez.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/operator/POFRJoinTez.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/operator/POIdentityInOutTez.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/operator/POLocalRearrangeTez.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/operator/PORankTez.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/operator/POShuffleTezLoad.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/operator/POShuffledValueInputTez.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/operator/POSimpleTezLoad.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/operator/POStoreTez.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/operator/POValueInputTez.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/optimizer/CombinerOptimizer.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/optimizer/LoaderProcessor.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/optimizer/MultiQueryOptimizerTez.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/optimizer/ParallelismSetter.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/optimizer/SecondaryKeyOptimizerTez.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/optimizer/TezEstimatedParallelismClearer.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/optimizer/TezOperDependencyParallelismEstimator.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/plan/optimizer/UnionOptimizer.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/runtime/PartitionerDefinedVertexManager.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/runtime/PigGraceShuffleVertexManager.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/runtime/PigProcessor.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/runtime/TezInput.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/runtime/WeightedRangePartitionerTez.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/util/MRToTezHelper.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/tez/util/TezCompilerUtil.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/executionengine/util/MapRedUtil.java
    pig/branches/spark/src/org/apache/pig/backend/hadoop/hbase/HBaseStorage.java
    pig/branches/spark/src/org/apache/pig/builtin/Bloom.java
    pig/branches/spark/src/org/apache/pig/builtin/BuildBloomBase.java
    pig/branches/spark/src/org/apache/pig/builtin/HiveUDFBase.java
    pig/branches/spark/src/org/apache/pig/builtin/OrcStorage.java
    pig/branches/spark/src/org/apache/pig/builtin/PigStorage.java
    pig/branches/spark/src/org/apache/pig/builtin/RoundRobinPartitioner.java
    pig/branches/spark/src/org/apache/pig/builtin/TextLoader.java
    pig/branches/spark/src/org/apache/pig/data/DefaultAbstractBag.java
    pig/branches/spark/src/org/apache/pig/data/DefaultDataBag.java
    pig/branches/spark/src/org/apache/pig/data/DistinctDataBag.java
    pig/branches/spark/src/org/apache/pig/data/ReadOnceBag.java
    pig/branches/spark/src/org/apache/pig/data/SchemaTupleBackend.java
    pig/branches/spark/src/org/apache/pig/data/SortedDataBag.java
    pig/branches/spark/src/org/apache/pig/data/SortedSpillBag.java
    pig/branches/spark/src/org/apache/pig/data/UnlimitedNullTuple.java
    pig/branches/spark/src/org/apache/pig/data/utils/SedesHelper.java
    pig/branches/spark/src/org/apache/pig/impl/PigImplConstants.java
    pig/branches/spark/src/org/apache/pig/impl/builtin/DefaultIndexableLoader.java
    pig/branches/spark/src/org/apache/pig/impl/builtin/GFCross.java
    pig/branches/spark/src/org/apache/pig/impl/builtin/PoissonSampleLoader.java
    pig/branches/spark/src/org/apache/pig/impl/io/NullableTuple.java
    pig/branches/spark/src/org/apache/pig/impl/io/PigFile.java
    pig/branches/spark/src/org/apache/pig/impl/io/ReadToEndLoader.java
    pig/branches/spark/src/org/apache/pig/impl/plan/NodeIdGenerator.java
    pig/branches/spark/src/org/apache/pig/impl/streaming/ExecutableManager.java
    pig/branches/spark/src/org/apache/pig/impl/streaming/OutputHandler.java
    pig/branches/spark/src/org/apache/pig/impl/util/JarManager.java
    pig/branches/spark/src/org/apache/pig/impl/util/SpillableMemoryManager.java
    pig/branches/spark/src/org/apache/pig/impl/util/Utils.java
    pig/branches/spark/src/org/apache/pig/impl/util/avro/AvroStorageDataConversionUtilities.java
    pig/branches/spark/src/org/apache/pig/impl/util/avro/AvroTupleWrapper.java
    pig/branches/spark/src/org/apache/pig/newplan/FilterExtractor.java
    pig/branches/spark/src/org/apache/pig/newplan/logical/expression/ExpToPhyTranslationVisitor.java
    pig/branches/spark/src/org/apache/pig/newplan/logical/expression/MapLookupExpression.java
    pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LOGenerate.java
    pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LOJoin.java
    pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LogToPhyTranslationVisitor.java
    pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LogicalPlan.java
    pig/branches/spark/src/org/apache/pig/newplan/logical/relational/LogicalSchema.java
    pig/branches/spark/src/org/apache/pig/newplan/logical/rules/AddForEach.java
    pig/branches/spark/src/org/apache/pig/newplan/logical/visitor/CastLineageSetter.java
    pig/branches/spark/src/org/apache/pig/newplan/logical/visitor/LineageFindRelVisitor.java
    pig/branches/spark/src/org/apache/pig/newplan/logical/visitor/TypeCheckingExpVisitor.java
    pig/branches/spark/src/org/apache/pig/newplan/logical/visitor/TypeCheckingRelVisitor.java
    pig/branches/spark/src/org/apache/pig/newplan/logical/visitor/UnionOnSchemaSetter.java
    pig/branches/spark/src/org/apache/pig/parser/LogicalPlanBuilder.java
    pig/branches/spark/src/org/apache/pig/parser/PigMacro.java
    pig/branches/spark/src/org/apache/pig/parser/QueryParser.g
    pig/branches/spark/src/org/apache/pig/parser/RegisterResolver.java
    pig/branches/spark/src/org/apache/pig/parser/SourceLocation.java
    pig/branches/spark/src/org/apache/pig/pen/LocalMapReduceSimulator.java
    pig/branches/spark/src/org/apache/pig/scripting/ScriptEngine.java
    pig/branches/spark/src/org/apache/pig/scripting/js/JsFunction.java
    pig/branches/spark/src/org/apache/pig/scripting/jython/JythonFunction.java
    pig/branches/spark/src/org/apache/pig/tools/DownloadResolver.java
    pig/branches/spark/src/org/apache/pig/tools/grunt/Grunt.java
    pig/branches/spark/src/org/apache/pig/tools/grunt/GruntParser.java
    pig/branches/spark/src/org/apache/pig/tools/grunt/PigCompletor.java
    pig/branches/spark/src/org/apache/pig/tools/grunt/PigCompletorAliases.java
    pig/branches/spark/src/org/apache/pig/tools/parameters/PigFileParser.jj
    pig/branches/spark/src/org/apache/pig/tools/parameters/PreprocessorContext.java
    pig/branches/spark/src/org/apache/pig/tools/pigscript/parser/PigScriptParser.jj
    pig/branches/spark/src/org/apache/pig/tools/pigstats/EmbeddedPigStats.java
    pig/branches/spark/src/org/apache/pig/tools/pigstats/EmptyPigStats.java
    pig/branches/spark/src/org/apache/pig/tools/pigstats/PigStats.java
    pig/branches/spark/src/org/apache/pig/tools/pigstats/PigStatsUtil.java
    pig/branches/spark/src/org/apache/pig/tools/pigstats/ScriptState.java
    pig/branches/spark/src/org/apache/pig/tools/pigstats/mapreduce/MRJobStats.java
    pig/branches/spark/src/org/apache/pig/tools/pigstats/mapreduce/MRPigStatsUtil.java
    pig/branches/spark/src/org/apache/pig/tools/pigstats/mapreduce/SimplePigStats.java
    pig/branches/spark/src/org/apache/pig/tools/pigstats/spark/SparkPigStats.java
    pig/branches/spark/src/org/apache/pig/tools/pigstats/tez/TezDAGStats.java
    pig/branches/spark/src/org/apache/pig/tools/pigstats/tez/TezPigScriptStats.java
    pig/branches/spark/src/org/apache/pig/tools/pigstats/tez/TezScriptState.java
    pig/branches/spark/src/org/apache/pig/tools/pigstats/tez/TezVertexStats.java
    pig/branches/spark/src/pig-default.properties
    pig/branches/spark/test/e2e/pig/build.xml
    pig/branches/spark/test/e2e/pig/conf/spark.conf
    pig/branches/spark/test/e2e/pig/deployers/ExistingClusterDeployer.pm
    pig/branches/spark/test/e2e/pig/deployers/LocalDeployer.pm
    pig/branches/spark/test/e2e/pig/drivers/TestDriverPig.pm
    pig/branches/spark/test/e2e/pig/streaming/PigStreaming.pl
    pig/branches/spark/test/e2e/pig/tests/grunt.conf
    pig/branches/spark/test/e2e/pig/tests/hcat.conf
    pig/branches/spark/test/e2e/pig/tests/multiquery.conf
    pig/branches/spark/test/e2e/pig/tests/negative.conf
    pig/branches/spark/test/e2e/pig/tests/nightly.conf
    pig/branches/spark/test/e2e/pig/tests/orc.conf
    pig/branches/spark/test/e2e/pig/tests/turing_jython.conf
    pig/branches/spark/test/e2e/pig/tools/generate/generate_data.pl
    pig/branches/spark/test/org/apache/pig/TestLoadStoreFuncLifeCycle.java
    pig/branches/spark/test/org/apache/pig/TestMain.java
    pig/branches/spark/test/org/apache/pig/builtin/TestAvroStorage.java
    pig/branches/spark/test/org/apache/pig/builtin/TestOrcStorage.java
    pig/branches/spark/test/org/apache/pig/data/TestSchemaTuple.java
    pig/branches/spark/test/org/apache/pig/parser/TestQueryParser.java
    pig/branches/spark/test/org/apache/pig/parser/TestQueryParserUtils.java
    pig/branches/spark/test/org/apache/pig/test/TestBZip.java
    pig/branches/spark/test/org/apache/pig/test/TestBuiltin.java
    pig/branches/spark/test/org/apache/pig/test/TestCounters.java
    pig/branches/spark/test/org/apache/pig/test/TestDataBag.java
    pig/branches/spark/test/org/apache/pig/test/TestDivide.java
    pig/branches/spark/test/org/apache/pig/test/TestEmptyInputDir.java
    pig/branches/spark/test/org/apache/pig/test/TestErrorHandlingStoreFunc.java
    pig/branches/spark/test/org/apache/pig/test/TestEvalPipeline.java
    pig/branches/spark/test/org/apache/pig/test/TestFindQuantiles.java
    pig/branches/spark/test/org/apache/pig/test/TestForEachNestedPlanLocal.java
    pig/branches/spark/test/org/apache/pig/test/TestGFCross.java
    pig/branches/spark/test/org/apache/pig/test/TestGrunt.java
    pig/branches/spark/test/org/apache/pig/test/TestHBaseStorage.java
    pig/branches/spark/test/org/apache/pig/test/TestJobControlCompiler.java
    pig/branches/spark/test/org/apache/pig/test/TestLineageFindRelVisitor.java
    pig/branches/spark/test/org/apache/pig/test/TestLoad.java
    pig/branches/spark/test/org/apache/pig/test/TestLoaderStorerShipCacheFiles.java
    pig/branches/spark/test/org/apache/pig/test/TestLocal.java
    pig/branches/spark/test/org/apache/pig/test/TestLogicalPlanBuilder.java
    pig/branches/spark/test/org/apache/pig/test/TestMRJobStats.java
    pig/branches/spark/test/org/apache/pig/test/TestMacroExpansion.java
    pig/branches/spark/test/org/apache/pig/test/TestMultiQuery.java
    pig/branches/spark/test/org/apache/pig/test/TestMultiQueryCompiler.java
    pig/branches/spark/test/org/apache/pig/test/TestNewPartitionFilterPushDown.java
    pig/branches/spark/test/org/apache/pig/test/TestNewPlanColumnPrune.java
    pig/branches/spark/test/org/apache/pig/test/TestPOGenerate.java
    pig/branches/spark/test/org/apache/pig/test/TestParamSubPreproc.java
    pig/branches/spark/test/org/apache/pig/test/TestPigContext.java
    pig/branches/spark/test/org/apache/pig/test/TestPigRunner.java
    pig/branches/spark/test/org/apache/pig/test/TestPigScriptParser.java
    pig/branches/spark/test/org/apache/pig/test/TestPigStatsMR.java
    pig/branches/spark/test/org/apache/pig/test/TestPlanGeneration.java
    pig/branches/spark/test/org/apache/pig/test/TestPruneColumn.java
    pig/branches/spark/test/org/apache/pig/test/TestRegisterParser.java
    pig/branches/spark/test/org/apache/pig/test/TestScriptUDF.java
    pig/branches/spark/test/org/apache/pig/test/TestSkewedJoin.java
    pig/branches/spark/test/org/apache/pig/test/TestStreamingLocal.java
    pig/branches/spark/test/org/apache/pig/test/TestTypeCheckingValidatorNewLP.java
    pig/branches/spark/test/org/apache/pig/test/TestUnionOnSchema.java
    pig/branches/spark/test/org/apache/pig/test/Util.java
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Cogroup-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Cross-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Cross-2.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Cross-3.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Distinct-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Distinct-2.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-FRJoin-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-FRJoin-2.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Filter-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Group-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Join-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Limit-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Limit-2.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Limit-3.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-LoadStore-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-LoadStore-2.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-1-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-2-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-2.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-3-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-3.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-4-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-4.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-5-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-5.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-6-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-6.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-7-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-7.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-8-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-MQ-8.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Native-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Order-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Order-2.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Order-3.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Rank-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Rank-2.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-SecKeySort-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-SecKeySort-2.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-SelfJoin-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-SelfJoin-2.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-SelfJoin-3.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-SelfJoin-4.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-SelfJoin-5.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-SelfJoin-6.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-SkewJoin-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-SkewJoin-2.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Stream-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-1-DummyStore-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-1-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-1-SplitStore.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-1.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-10-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-10.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-11-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-11.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-12-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-12.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-13-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-13.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-14-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-14.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-15-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-15.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-16-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-16.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-17-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-17.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-18-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-18.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-2-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-2.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-3-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-3.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-4-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-4.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-5-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-5.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-6-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-6.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-7-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-7.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-8-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-8.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-9-OPTOFF.gld
    pig/branches/spark/test/org/apache/pig/test/data/GoldenFiles/tez/TEZC-Union-9.gld
    pig/branches/spark/test/org/apache/pig/tez/TestTezAutoParallelism.java
    pig/branches/spark/test/org/apache/pig/tez/TestTezCompiler.java
    pig/branches/spark/test/org/apache/pig/tez/TestTezGraceParallelism.java
    pig/branches/spark/test/org/apache/pig/tez/TestTezJobControlCompiler.java
    pig/branches/spark/test/org/apache/pig/tez/TestTezLauncher.java
    pig/branches/spark/test/perf/pigmix/bin/generate_data.sh
    pig/branches/spark/test/perf/pigmix/build.xml

Modified: pig/branches/spark/.gitignore
URL: http://svn.apache.org/viewvc/pig/branches/spark/.gitignore?rev=1784237&r1=1784236&r2=1784237&view=diff
==============================================================================
--- pig/branches/spark/.gitignore (original)
+++ pig/branches/spark/.gitignore Fri Feb 24 08:19:42 2017
@@ -23,3 +23,4 @@ contrib/piggybank/java/piggybank.jar
 conf/log4j.properties
 lib/jdiff/pig_*SNAPSHOT.xml
 test/resources/*.jar
+!ivy/ant-contrib-1.0b3.jar

Added: pig/branches/spark/BUILDING.md
URL: http://svn.apache.org/viewvc/pig/branches/spark/BUILDING.md?rev=1784237&view=auto
==============================================================================
--- pig/branches/spark/BUILDING.md (added)
+++ pig/branches/spark/BUILDING.md Fri Feb 24 08:19:42 2017
@@ -0,0 +1,71 @@
+# Building Apache Pig
+
+## Requirements:
+
+* Unix System
+* JDK 1.7+
+* Ant 1.8.1+
+* Findbugs 3.x+
+* Forrest 0.9 (for building the documentation)
+* Internet connection for first build (to fetch all dependencies)
+
+**Note**: Further down this document you can read about the _ready to run build environment_.
+
+## Building Pig
+
+To compile with Hadoop 2.x 
+
+    ant clean jar piggybank
+
+Building and running the tests needed before submitting a patch.
+For more details https://cwiki.apache.org/confluence/display/PIG/HowToContribute
+    
+    ANT_OPTS='-Djavac.args="-Xlint -Xmaxwarns 1000"'
+    ant ${ANT_OPTS} clean piggybank jar compile-test test-commit
+    cd contrib/piggybank/java && ant ${ANT_OPTS} test
+
+Generate documentation
+
+    ant docs
+
+# Ready to run build environment
+The easiest way to get an environment with all the appropriate tools is by means
+of the provided Docker config.
+This requires a recent version of docker ( 1.4.1 and higher are known to work ).
+
+## How it works
+By using the mounted volumes feature of Docker this image will wrap itself around the directory from which it is started.
+So the files within the docker environment are actually the same as outsite.
+
+A very valid way of working is by having your favourite IDE that has the project 
+open and a commandline into the docker that has the exact right tools to do the full build.
+
+## Using it on Linux:
+Install Docker and run this command:
+
+    $ ./start-build-env.sh
+
+## Using it on Mac:
+First make sure Homebrew has been installed ( http://brew.sh/ )
+    
+    $ brew install docker boot2docker
+    $ boot2docker init -m 4096
+    $ boot2docker start
+    $ $(boot2docker shellinit)
+    $ ./start-build-env.sh
+
+The prompt which is then presented is located at a mounted version of the source tree
+and all required tools for testing and building have been installed and configured.
+
+Note that from within this docker environment you ONLY have access to the source
+tree from where you started. 
+
+## Known issues:
+On Mac with Boot2Docker the performance on the mounted directory is currently extremely slow.
+This is a known problem related to boot2docker on the Mac.
+https://github.com/boot2docker/boot2docker/issues/593
+    This issue has been resolved as a duplicate, and they point to a new feature for utilizing NFS mounts as the proposed solution:
+
+https://github.com/boot2docker/boot2docker/issues/64
+    An alternative solution to this problem is when you install Linux native inside a virtual machine and run your IDE and Docker etc in side that VM.
+

Modified: pig/branches/spark/CHANGES.txt
URL: http://svn.apache.org/viewvc/pig/branches/spark/CHANGES.txt?rev=1784237&r1=1784236&r2=1784237&view=diff
==============================================================================
--- pig/branches/spark/CHANGES.txt (original)
+++ pig/branches/spark/CHANGES.txt Fri Feb 24 08:19:42 2017
@@ -19,11 +19,263 @@
 Pig Change Log
 
 Trunk (unreleased changes)
+ 
+INCOMPATIBLE CHANGES
+
+PIG-4728: Compilation against hbase 1.x fails with hbase-hadoop1-compat not found (szita via rohini)
+
+PIG-4897: Scope of param substitution for run/exec commands (knoguchi)
+
+PIG-4923: Drop Hadoop 1.x support in Pig 0.17 (szita via rohini)
+
+PIG-5109: Remove HadoopJobHistoryLoader (szita via daijy)
+
+PIG-5067: Revisit union on numeric type and chararray to bytearray (knoguchi)
+ 
+IMPROVEMENTS
+
+PIG-5085: Support FLATTEN of maps (szita via rohini)
+
+PIG-5126. Add doc about pig in zeppelin (zjffdu)
+
+PIG-5120: Let tez_local mode run without a jar file (knoguchi)
+
+PIG-3851: Upgrade jline to 2.11 (daijy)
+
+PIG-4963: Add a Bloom join (rohini)
+
+PIG-3938: Add LoadCaster to EvalFunc (knoguchi)
+
+PIG-5105: Tez unit tests failing with "Argument list too long" (rohini)
+
+PIG-4901: To use Multistorage for each Group (szita via daijy)
+
+PIG-5025: Fix flaky test failures in TestLoad.java (szita via rohini)
+
+PIG-4939: QueryParserUtils.setHdfsServers(QueryParserUtils.java:104) should not be called for non-dfs
+  methods (szita via daijy)
+
+PIG-5034: Remove org.apache.hadoop.hive.serde2.objectinspector.primitive package (nkollar via daijy)
+
+PIG-5036: Remove biggish from e2e input dataset (daijy)
+
+PIG-5053: Can't change HDFS user home in e2e tests using Ant (nkollar via daijy)
+
+PIG-5037: Add api getDisplayString to PigStats (zjffdu)
+
+PIG-5020: Give file location for loadcaster related warning and errors (knoguchi)
+
+PIG-5027: Improve SAMPLE Scalar Expression Example (icook via knoguchi)
+
+PIG-5023: Documentation for BagToTuple (icook via knoguchi)
+
+PIG-5022: Error in TOKENIZE Example (icook vi knoguchi)
+
+PIG-4931: Document IN operator (dbist13 vi daijy)
+
+PIG-4852: Add accumulator implementation for MaxTupleBy1stField (szita via daijy)
+
+PIG-4925: Support for passing the bloom filter to the Bloom UDF (rohini)
+
+PIG-4911: Provide option to disable DAG recovery (rohini)
+
+PIG-4906: Add Bigdecimal functions in Over function (cgalan via daijy)
+
+PIG-2768: Fix org.apache.hadoop.conf.Configuration deprecation warnings for Hadoop 23 (rohini)
+ 
+OPTIMIZATIONS
+ 
+BUG FIXES
+
+PIG-5127: Test fail when running test-core-mrtez (daijy)
+
+PIG-5083: CombinerPackager and LitePackager should not materialize bags (rohini)
+
+PIG-5087: e2e Native3 failing after PIG-4923 (knoguchi)
+
+PIG-5073: Skip e2e Limit_5 test for Tez (knoguchi)
+
+PIG-5072: e2e Union_12 fails on typecast when oldpig=0.11 (knoguchi)
+
+PIG-3891: FileBasedOutputSizeReader does not calculate size of files in sub-directories (nkollar via rohini)
+
+PIG-5070: Allow Grunt e2e tests to run in parallel (knoguchi)
+
+PIG-5061: ant test -Dtestcase=TestBoolean failing (knoguchi)
+
+PIG-5066: e2e Jython_Checkin_2 failing due to floating precision difference (knoguchi)
+
+PIG-5063: e2e IOErrors_1 on mapreduce is unstable (knoguchi)
+
+PIG-5062: Allow Native e2e tests to run in parallel (knoguchi)
+
+PIG-5060: TestPigRunner.testDisablePigCounters2 failing with tez (knoguchi)
+
+PIG-5056: Fix AvroStorage writing enums (szita via daijy)
+
+PIG-5055: Infinite loop with join by fixed index (knoguchi)
+
+PIG-5049: Cleanup e2e tests turing_jython.conf (Daniel Dai)
+
+PIG-5033: MultiQueryOptimizerTez creates bad plan with union, split and FRJoin (rohini,tmwoordruff via rohini)
+
+PIG-4934: SET command does not work well with deprecated settings (szita via daijy)
+
+PIG-4798: big integer literals fail to parse (szita via daijy)
+
+PIG-5045: CSVExcelStorage Load: A Quoted Field with a Single Escaped Quote """" Becomes "" This should become " instead
+  (szita via daijy)
+
+PIG-5026: Remove src/META-INF/services/org.apache.hadoop.mapreduce.protocol.ClientProtocolProvider (nkollar via daijy)
+
+PIG-5041: RoundRobinPartitioner is not deterministic when order of input records change (rohini)
+
+PIG-5040: Order by and CROSS partitioning is not deterministic due to usage of Random (rohini
+
+PIG-5038: Pig Limit_2 e2e test failed with sort check (Konstantin_Harasov via rohini)
+
+PIG-5039: TestTypeCheckingValidatorNewLP.TestTypeCheckingValidatorNewLP is failing (nkollar via knoguchi)
+
+PIG-3087: Refactor TestLogicalPlanBuilder to be meaningful (szita via daijy)
+
+PIG-4976: streaming job with store clause stuck if the script fail (daijy via knoguchi)
+
+PIG-5035: killJob API does not work in Tez (zjffdu via rohini)
+
+PIG-5032: Output record stats in Tez is wrong when there is split followed by union (rohini)
+
+PIG-5031: Tez failing to compile when replicate join is done with a limit vertex on left (knoguchi)
+
+PIG-5019: Pig generates tons of warnings for udf with enabled warnings aggregation (murshyd via rohini)
+
+PIG-4974: A simple map reference fail to cast (knoguchi)
+
+PIG-4975 Map schema shows "Type: null Uid: null" in explain (knoguchi)
+
+PIG-4973: Bigdecimal divison fails (szita via daijy)
+
+PIG-4967: NPE in PigJobControl.run() when job status is null (water via daijy)
+
+PIG-4972: StreamingIO_1 fail on perl 5.22 (daijy)
+
+PIG-4933: TestDataBagAccess.testBagConstantFlatten1/TestLogicalPlanBuilder.testQuery90 broken after PIG-2315 (knoguchi)
+
+PIG-4965: Refactor test/perf/pigmix/bin/runpigmix.pl to delete the output of single test case
+  if we enable cleanup_after_test (kellyzly via daijy)
+
+PIG-4966: Fix Pig compatibility with Hive 2.1.0 (zyork via daijy)
+
+PIG-4935: TEZ_USE_CLUSTER_HADOOP_LIBS is always set to true (rohini)
+
+PIG-4961: CROSS followed by LIMIT inside nested foreach drop data from result (rohini)
+
+PIG-4960: Split followed by order by/skewed join is skewed in Tez (rohini)
+
+PIG-4957: See "Received kill signal" message for a normal run after PIG-4921 (rohini)
+
+PIG-4953: Predicate push-down will not run filters for single unary expressions (rdblue via daijy)
+
+PIG-4940: Predicate push-down filtering unary expressions can be pushed (rdblue via daijy)
+
+PIG-4938: [PiggyBank] XPath returns empty values when using aggregation method (nkollar via daijy)
+
+PIG-4896: Param substitution ignored when redefined (knoguchi)
+
+PIG-2315: Make as clause work in generate (daijy via knoguchi)
+
+PIG-4921: Kill running jobs on InterruptedException (rohini)
+
+PIG-4916: Pig on Tez fail to remove temporary HDFS files in some cases (daijy)
+
+Release 0.16.1 - Unreleased
 
 INCOMPATIBLE CHANGES
 
 IMPROVEMENTS
 
+PIG-4945: Update document for conflicting macro params (knoguchi via daijy)
+
+OPTIMIZATIONS
+
+BUG FIXES
+
+PIG-5119: SkewedJoin_15 is unstable (daijy)
+
+PIG-5118: Script fails with Invalid dag containing 0 vertices (rohini)
+
+PIG-5111: e2e Utf8Test fails in local mode (rohini)
+
+PIG-5112: Cleanup pig-template.xml (daijy)
+
+PIG-5046: Skewed join with auto parallelism hangs when right input also has autoparallelism (rohini)
+
+PIG-5108: AvroStorage on Tez with exception on nested records (daijy)
+
+PIG-4260: SpillableMemoryManager.spill should revert spill on all exception (rohini)
+
+PIG-4918: Pig on Tez cannot switch pig.temp.dir to another fs (daijy)
+
+PIG-5078: Script fails with error - POStoreTez only accepts MROutput (rohini)
+
+PIG-5088: HashValuePartitioner has skew when there is only map fields (rohini)
+
+PIG-5043: Slowstart not applied in Tez with PARALLEL clause (rohini)
+
+PIG-4930: Skewed Join Breaks On Empty Sampled Input When Key is From Map (nkollar via rohini)
+
+PIG-3417: Job fails when skewed join is done on tuple key (nkollar via rohini)
+
+PIG-5074: Build broken when hadoopversion=20 in branch 0.16 (szita via daijy)
+
+PIG-5064: NPE in TestScriptUDF#testPythonBuiltinModuleImport1 when JAVA_HOME is not set (water via daijy)
+
+PIG-5048: HiveUDTF fail if it is the first expression in projection (nkollar via daijy)
+
+PIG-4951: Rename PIG_ATS_ENABLED constant (szita via daijy)
+
+PIG-4947: LOAD with HBaseStorage using a mix of pure wildcards and prefixed wildcards results
+  in empty maps for the pure wildcarded column families (daijy)
+
+PIG-4948: Pig on Tez AM use too much memory on a small cluster (daijy)
+
+PIG-4949: Fix registering jar in S3 which was broken by PIG-4417 in Pig 0.16 (yangyishan0901m via daijy)
+
+PIG-4950: Fix minor issues with running scripts in non-local FileSystems (petersla via daijy)
+
+Release 0.16.0
+
+INCOMPATIBLE CHANGES
+
+IMPROVEMENTS
+
+PIG-4719: Documentation for PIG-4704: Customizable Error Handling for Storers in Pig (daijy)
+
+PIG-4714: Improve logging across multiple components with callerId (daijy)
+
+PIG-4885: Turn off union optimizer if there is PARALLEL clause in union in Tez (rohini)
+
+PIG-4894: Add API for StoreFunc to specify if they are write safe from two different vertices (rohini)
+
+PIG-4884: Tez needs to use DistinctCombiner.Combine (rohini)
+
+PIG-4874: Remove schema tuple reference overhead for replicate join hashmap (rohini)
+
+PIG-4879: Pull latest version of joda-time (rohini)
+
+PIG-4526: Make setting up the build environment easier (nielsbasjes via rohini)
+
+PIG-4641: Print the instance of Object without using toString() (sandyridgeracer via rohini)
+
+PIG-4455: Should use DependencyOrderWalker instead of DepthFirstWalker in MRPrinter (zjffdu via rohini)
+
+PIG-4866: Do not serialize PigContext in configuration to the backend (rohini)
+
+PIG-4547: Update Jython version to 2.7.0 (erwaman via daijy)
+
+PIG-4862: POProject slow by creating StackTrace repeatedly (knoguchi)
+
+PIG-4853: Fetch inputs before starting outputs (rohini)
+
 PIG-4847: POPartialAgg processing and spill improvements (rohini)
 
 PIG-4840: Do not turn off UnionOptimizer for unsupported storefuncs in case of no vertex groups (rohini)
@@ -105,6 +357,62 @@ PIG-4639: Add better parser for Apache H
 
 BUG FIXES
 
+PIG-4821: Pig chararray field with special UTF-8 chars as part of tuple join key produces wrong results in Tez (rohini)
+
+PIG-4734: TOMAP schema inferring breaks some scripts in type checking for bincond (daijy)
+
+PIG-4786: CROSS will not work correctly with Grace Parallelism (daijy)
+
+PIG-3227: SearchEngineExtractor does not work for bing (dannyant via daijy)
+
+PIG-4902: Fix UT failures on 0.16 branch: TestTezGraceParallelism, TestPigScriptParser (daijy)
+
+PIG-4909: PigStorage incompatible with commons-cli-1.3 (knoguchi)
+
+PIG-4908: JythonFunction refers to Oozie launcher script absolute path (rohini)
+
+PIG-4905: Input of empty dir does not produce empty output file in Tez (rohini)
+
+PIG-4576: Nightly test HCat_DDL_2 fails with TDE ON (nmaheshwari via daijy)
+
+PIG-4873: InputSplit.getLocations return null and result a NPE in Pig (daijy)
+
+PIG-4895: User UDFs relying on mapreduce.job.maps broken in Tez (rohini)
+
+PIG-4883: MapKeyType of splitter was set wrongly in specific multiquery case (kellyzly via rohini)
+
+PIG-4887: Parameter substitution skipped with glob on register (knoguchi)
+
+PIG-4889: Replacing backslash fails as lexical error (knoguchi)
+
+PIG-4880: Overlapping of parameter substitution names inside&outside a macro fails with NPE (knoguchi)
+
+PIG-4881: TestBuiltin.testUniqueID failing on hadoop-1.x (knoguchi)
+
+PIG-4888: Line number off when reporting syntax error inside a macro (knoguchi)
+
+PIG-3772: Syntax error when casting an inner schema of a bag and line break involved (ssvinarchukhorton via knoguchi)
+
+PIG-4892: removing /tmp/output before UT (daijy)
+
+PIG-4882: Remove hardcoded groovy.grape.report.downloads=true from DownloadResolver (erwaman via daijy)
+
+PIG-4581: thread safe issue in NodeIdGenerator (rcatherinot via rohini)
+
+PIG-4878: Fix issues from PIG-4847 (rohini)
+
+PIG-4877: LogFormat parser fails test (nielsbasjes via daijy)
+
+PIG-4860: Loading data using OrcStorage() accepts only default FileSystem path (beriaanirudh via rohini)
+
+PIG-4868: Low values for bytes.per.reducer configured by user not honored in Tez for inputs (rohini)
+
+PIG-4869: Removing unwanted configuration in Tez broke ConfiguredFailoverProxyProvider (rohini)
+
+PIG-4867: -stop_on_failure does not work with Tez (rohini)
+
+PIG-4844: Tez AM runs out of memory when vertex has high number of outputs (rohini)
+
 PIG-4851: Null not padded when input has less fields than declared schema for some loader (rohini)
 
 PIG-4850: Registered jars do not use submit replication (rdblue via cheolsoo)

Modified: pig/branches/spark/bin/pig
URL: http://svn.apache.org/viewvc/pig/branches/spark/bin/pig?rev=1784237&r1=1784236&r2=1784237&view=diff
==============================================================================
--- pig/branches/spark/bin/pig (original)
+++ pig/branches/spark/bin/pig Fri Feb 24 08:19:42 2017
@@ -330,7 +330,8 @@ HADOOP_CORE_JAR=`echo ${HADOOP_HOME}/had
 if [ -z "$HADOOP_CORE_JAR" ]; then
     HADOOP_VERSION=2
 else
-    HADOOP_VERSION=1
+    echo "Pig requires Hadoop 2 to be present in HADOOP_HOME (currently: $HADOOP_HOME). Please install Hadoop 2.x"
+    exit 1
 fi
 
 # if using HBase, likely want to include HBase jars and config
@@ -439,11 +440,7 @@ if [ -n "$HADOOP_BIN" ]; then
     if [ -n "$PIG_JAR" ]; then
         CLASSPATH=${CLASSPATH}:$PIG_JAR
     else
-        if [ "$HADOOP_VERSION" == "1" ]; then
-            echo "Cannot locate pig-core-h${HADOOP_VERSION}.jar. do 'ant jar', and try again"
-        else
-            echo "Cannot locate pig-core-h${HADOOP_VERSION}.jar. do 'ant -Dhadoopversion=23 jar', and try again"
-        fi
+        echo "Cannot locate pig-core-h${HADOOP_VERSION}.jar. do 'ant jar', and try again"
         exit 1
     fi
 
@@ -464,8 +461,8 @@ if [ -n "$HADOOP_BIN" ]; then
         exec "$HADOOP_BIN" jar "$PIG_JAR" "${remaining[@]}"
     fi
 else
-    # use hadoop-core.jar to run local mode
-    PIG_JAR=`echo $PIG_HOME/pig*-core-h1.jar`
+    # use bundled hadoop to run local mode
+    PIG_JAR=`echo $PIG_HOME/pig*-core-h2.jar`
 
     if [ -n "$PIG_JAR" ]; then
         CLASSPATH="${CLASSPATH}:$PIG_JAR"
@@ -474,12 +471,12 @@ else
         exit 1
     fi
 
-    for f in $PIG_HOME/lib/h1/*.jar; do
+    for f in $PIG_HOME/lib/h2/*.jar; do
         CLASSPATH=${CLASSPATH}:$f;
     done
 
-    # Add bundled hadoop-core.jar
-    for f in $PIG_HOME/lib/hadoop1-runtime/*.jar; do
+    # Add bundled hadoop jars
+    for f in $PIG_HOME/lib/hadoop2-runtime/*.jar; do
         CLASSPATH=${CLASSPATH}:$f;
     done
 

Modified: pig/branches/spark/bin/pig.py
URL: http://svn.apache.org/viewvc/pig/branches/spark/bin/pig.py?rev=1784237&r1=1784236&r2=1784237&view=diff
==============================================================================
--- pig/branches/spark/bin/pig.py (original)
+++ pig/branches/spark/bin/pig.py Fri Feb 24 08:19:42 2017
@@ -338,7 +338,7 @@ hadoopCoreJars = glob.glob(os.path.join(
 if len(hadoopCoreJars) == 0:
   hadoopVersion = 2
 else:
-  hadoopVersion = 1
+  sys.exit("Cannot locate Hadoop 2 binaries, please install Hadoop 2.x and try again.")
 
 if hadoopBin != "":
   if debug == True:
@@ -361,10 +361,7 @@ if hadoopBin != "":
       if len(pigJars) == 1:
         pigJar = pigJars[0]
       else:
-        if hadoopVersion == 1:
-          sys.exit("Cannot locate pig-core-h1.jar do 'ant jar', and try again")
-        else:
-          sys.exit("Cannot locate pig-core-h2.jar do 'ant -Dhadoopversion=23 jar', and try again")
+        sys.exit("Cannot locate pig-core-h2.jar do 'ant jar', and try again")
 
   pigLibJars = glob.glob(os.path.join(os.environ['PIG_HOME']+"/lib", "h" + str(hadoopVersion), "*.jar"))
   for jar in pigLibJars:
@@ -393,13 +390,13 @@ if hadoopBin != "":
 else:
   # fall back to use fat pig.jar
   if debug == True:
-    print "Cannot find local hadoop installation, using bundled hadoop 1"
-    
-  if os.path.exists(os.path.join(os.environ['PIG_HOME'], "pig-core-h1.jar")):
-    pigJar = os.path.join(os.environ['PIG_HOME'], "pig-core-h1.jar")
+    print "Cannot find local hadoop installation, using bundled hadoop 2"
+
+  if os.path.exists(os.path.join(os.environ['PIG_HOME'], "pig-core-h2.jar")):
+    pigJar = os.path.join(os.environ['PIG_HOME'], "pig-core-h2.jar")
 
   else:
-    pigJars = glob.glob(os.path.join(os.environ['PIG_HOME'], "pig-*-core-h1.jar"))
+    pigJars = glob.glob(os.path.join(os.environ['PIG_HOME'], "pig-*-core-h2.jar"))
 
     if len(pigJars) == 1:
       pigJar = pigJars[0]
@@ -407,15 +404,15 @@ else:
     elif len(pigJars) > 1:
       print "Ambiguity with pig jars found the following jars"
       print pigJars
-      sys.exit("Please remove irrelavant jars from %s" % os.path.join(os.environ['PIG_HOME'], "pig-core-h1.jar"))
+      sys.exit("Please remove irrelavant jars from %s" % os.path.join(os.environ['PIG_HOME'], "pig-core-h2.jar"))
     else:
-      sys.exit("Cannot locate pig-core-h1.jar. do 'ant jar' and try again")
+      sys.exit("Cannot locate pig-core-h2.jar. do 'ant jar' and try again")
 
-  pigLibJars = glob.glob(os.path.join(os.environ['PIG_HOME']+"/lib", "h1", "*.jar"))
+  pigLibJars = glob.glob(os.path.join(os.environ['PIG_HOME']+"/lib", "h2", "*.jar"))
   for jar in pigLibJars:
     classpath += os.pathsep + jar
 
-  pigLibJars = glob.glob(os.path.join(os.environ['PIG_HOME']+"/lib", "hadoop1-runtime", "*.jar"))
+  pigLibJars = glob.glob(os.path.join(os.environ['PIG_HOME']+"/lib", "hadoop2-runtime", "*.jar"))
   for jar in pigLibJars:
     classpath += os.pathsep + jar
 
@@ -423,7 +420,7 @@ else:
   pigClass = "org.apache.pig.Main"
   if debug == True:
     print "dry runXXX:"
-    print "%s %s %s -classpath %s %s %s" % (java, javaHeapMax, pigOpts, classpath, pigClass, ' '.join(restArgs)) 
+    print "%s %s %s -classpath %s %s %s" % (java, javaHeapMax, pigOpts, classpath, pigClass, ' '.join(restArgs))
   else:
     cmdLine = java + ' ' + javaHeapMax + ' ' + pigOpts
     cmdLine += ' ' + '-classpath ' + classpath + ' ' + pigClass +  ' ' + ' '.join(restArgs)

Modified: pig/branches/spark/build.xml
URL: http://svn.apache.org/viewvc/pig/branches/spark/build.xml?rev=1784237&r1=1784236&r2=1784237&view=diff
==============================================================================
--- pig/branches/spark/build.xml (original)
+++ pig/branches/spark/build.xml Fri Feb 24 08:19:42 2017
@@ -20,6 +20,13 @@
          xmlns:ivy="antlib:org.apache.ivy.ant">
     <!-- Load all the default properties, and any the user wants    -->
     <!-- to contribute (without having to type -D or edit this file -->
+
+    <taskdef resource="net/sf/antcontrib/antcontrib.properties">
+        <classpath>
+            <pathelement location="${basedir}/ivy/ant-contrib-1.0b3.jar"/>
+        </classpath>
+    </taskdef>
+
     <property file="${user.home}/build.properties" />
     <property file="${basedir}/build.properties" />
 
@@ -31,11 +38,11 @@
     <property name="pigsmoke.pom" value="${basedir}/ivy/pigsmoke.pom" />
     <property name="pigunit.pom" value="${basedir}/ivy/pigunit.pom" />
     <property name="piggybank.pom" value="${basedir}/ivy/piggybank.pom" />
-    <property name="pig.version" value="0.16.0" />
+    <property name="pig.version" value="0.17.0" />
     <property name="pig.version.suffix" value="-SNAPSHOT" />
     <property name="version" value="${pig.version}${pig.version.suffix}" />
     <property name="final.name" value="${name}-${version}" />
-    <property name="year" value="2007-2012" />
+    <property name="year" value="2007-2016" />
 
     <!-- source properties -->
     <property name="lib.dir" value="${basedir}/lib" />
@@ -70,7 +77,6 @@
 
     <!-- artifact jar file names -->
     <property name="artifact.pig.jar" value="${final.name}.jar"/>
-    <property name="artifact.pig-h1.jar" value="${final.name}-h1.jar"/>
     <property name="artifact.pig-h2.jar" value="${final.name}-h2.jar"/>
     <property name="artifact.pig-sources.jar" value="${final.name}-sources.jar"/>
     <property name="artifact.pig-javadoc.jar" value="${final.name}-javadoc.jar"/>
@@ -78,15 +84,12 @@
 
     <!-- jar names. TODO we might want to use the svn reversion name in the name in case it is a dev version -->
     <property name="output.jarfile.withouthadoop" value="${build.dir}/${final.name}-withouthadoop.jar" />
-    <property name="output.jarfile.withouthadoop-h1" value="${legacy.dir}/${final.name}-withouthadoop-h1.jar" />
     <property name="output.jarfile.withouthadoop-h2" value="${legacy.dir}/${final.name}-withouthadoop-h2.jar" />
     <property name="output.jarfile.core" value="${build.dir}/${artifact.pig.jar}" />
-    <property name="output.jarfile.core-h1" value="${build.dir}/${artifact.pig-h1.jar}" />
     <property name="output.jarfile.core-h2" value="${build.dir}/${artifact.pig-h2.jar}" />
     <property name="output.jarfile.sources" value="${build.dir}/${artifact.pig-sources.jar}" />
     <property name="output.jarfile.javadoc" value="${build.dir}/${artifact.pig-javadoc.jar}" />
     <!-- Maintain old pig.jar in top level directory. -->
-    <property name="output.jarfile.backcompat-core-h1" value="${basedir}/${final.name}-core-h1.jar" />
     <property name="output.jarfile.backcompat-core-h2" value="${basedir}/${final.name}-core-h2.jar" />
 
     <!-- test properties -->
@@ -107,8 +110,6 @@
     <property name="test.spark.file" value="${test.src.dir}/spark-tests"/>
     <property name="test.spark_local.file" value="${test.src.dir}/spark-local-tests"/>
     <property name="test.exclude.file" value="${test.src.dir}/excluded-tests"/>
-    <property name="test.exclude.file.20" value="${test.src.dir}/excluded-tests-20"/>
-    <property name="test.exclude.file.23" value="${test.src.dir}/excluded-tests-23"/>
     <property name="test.exclude.file.mr" value="${test.src.dir}/excluded-tests-mr"/>
     <property name="test.exclude.file.tez" value="${test.src.dir}/excluded-tests-tez"/>
     <property name="test.exclude.file.spark" value="${test.src.dir}/excluded-tests-spark"/>
@@ -155,9 +156,8 @@
 	
     <target name="setTezEnv">
         <propertyreset name="test.timeout" value="900000" />
-        <propertyreset name="hadoopversion" value="23" />
-        <propertyreset name="isHadoop23" value="true" />
-        <propertyreset name="hbase.hadoop.version" value="hadoop2" />
+        <propertyreset name="hadoopversion" value="2" />
+        <propertyreset name="isHadoop2" value="true" />
         <propertyreset name="src.shims.dir" value="${basedir}/shims/src/hadoop${hadoopversion}" />
         <propertyreset name="src.shims.test.dir" value="${basedir}/shims/test/hadoop${hadoopversion}" />
         <propertyreset name="src.exclude.dir" value="" />
@@ -209,40 +209,42 @@
     <property name="loglevel" value="quiet" />
     <loadproperties srcfile="${ivy.dir}/libraries.properties"/>
 
-    <property name="hadoopversion" value="20" />
 
-    <condition property="isHadoop23">
+    <!--
+      Hadoop master version
+      (Value 23 is translated for backward compatibility in old build scripts)
+    -->
+    <if>
         <equals arg1="${hadoopversion}" arg2="23"/>
-    </condition>
+        <then>
+            <echo>Property setting hadoopversion=23 is deprecated. Overwriting to hadoopversion=2</echo>
+            <var name="hadoopversion" unset="true"/>
+            <property name="hadoopversion" value="2" />
+        </then>
+    </if>
+    <property name="hadoopversion" value="2" />
 
-    <condition property="hbase.hadoop.version" value="hadoop1" else="hadoop2">
-        <not>
-            <equals arg1="${hadoopversion}" arg2="23"/>
-        </not>
+    <condition property="isHadoop2">
+        <equals arg1="${hadoopversion}" arg2="2"/>
     </condition>
 
     <!--
       HBase master version
-      Denotes how the HBase dependencies are layout. Value "94" denotes older
-      format where all HBase code is present in one single jar, which is the
-      way HBase is available up to version 0.94. Value "95" denotes new format
-      where HBase is cut into multiple dependencies per each major subsystem,
-      e.g. "client", "server", ... . Only values "94" and "95" are supported
-      at the moment.
+      (Value 95 is translated for backward compatibility in old build scripts)
     -->
-    <property name="hbaseversion" value="95" />
-
-    <!-- exclude tez code if not hadoop20 -->
-    <condition property="src.exclude.dir" value="**/tez/**" else="">
-        <not>
-            <equals arg1="${hadoopversion}" arg2="23"/>
-        </not>
-    </condition>
+    <if>
+        <equals arg1="${hbaseversion}" arg2="95"/>
+        <then>
+            <echo>Property setting hbaseversion=95 is deprecated. Overwriting to hbaseversion=1</echo>
+            <var name="hbaseversion" unset="true"/>
+            <property name="hbaseversion" value="1" />
+        </then>
+    </if>
+    <property name="hbaseversion" value="1" />
 
     <property name="src.shims.dir" value="${basedir}/shims/src/hadoop${hadoopversion}" />
     <property name="src.shims.test.dir" value="${basedir}/shims/test/hadoop${hadoopversion}" />
 
-    <property name="hadoop.jar" value="hadoop-core-${hadoop-core.version}.jar" />
     <property name="asfrepo" value="https://repository.apache.org"/>
     <property name="asfsnapshotrepo" value="${asfrepo}/content/repositories/snapshots"/>
     <property name="mvnrepo" value="http://repo2.maven.org/maven2"/>
@@ -284,7 +286,7 @@
     <property name="xerces.jar" value="${ivy.lib.dir}/xercesImpl-${xerces.version}.jar"/>
     <property name="jdiff.build.dir" value="${build.docs}/jdiff"/>
     <property name="jdiff.xml.dir" value="${docs.dir}/jdiff"/>
-    <property name="jdiff.stable" value="0.15.0"/>
+    <property name="jdiff.stable" value="0.16.0"/>
     <property name="jdiff.stable.javadoc" value="http://hadoop.apache.org/${name}/docs/r${jdiff.stable}/api/"/>
 
     <!-- Packaging properties -->
@@ -392,27 +394,6 @@
             <include name="joda-time-${joda-time.version}.jar"/>
             <include name="automaton-${automaton.version}.jar"/>
             <include name="jansi-${jansi.version}.jar"/>
-            <include name="jackson-mapper-asl-${jackson.version}.jar" unless="isHadoop23"/>
-            <include name="jackson-core-asl-${jackson.version}.jar" unless="isHadoop23"/>
-            <include name="guava-${guava.version}.jar" unless="isHadoop23"/>
-            <include name="snappy-java-${snappy.version}.jar" unless="isHadoop23"/>
-            <include name="asm-${asm.version}.jar" unless="isHadoop23"/>
-
-            <include name="scala*.jar"/>
-            <include name="akka*.jar"/>
-            <include name="jcl-over-slf4j*.jar"/>
-            <include name="jul-to-slf4j*.jar"/>
-            <include name="slf4j*.jar"/>
-            <include name="commons*.jar"/>
-            <include name="config*.jar"/>
-            <include name="netty*.jar"/>
-            <include name="jetty*.jar"/>
-            <include name="metrics-core*.jar"/>
-            <include name="jackson*.jar"/>
-            <include name="metrics-json-*.jar"/>
-            <include name="json4s-*.jar"/>
-            <include name="javax.servlet-*.jar"/>
-            <include name="reflectasm*.jar"/>
         </patternset>
     </fileset>
 
@@ -574,6 +555,7 @@
         <echo>*** Building Main Sources ***</echo>
         <echo>*** To compile with all warnings enabled, supply -Dall.warnings=1 on command line ***</echo>
         <echo>*** Else, you will only be warned about deprecations ***</echo>
+        <echo>*** Hadoop version used: ${hadoopversion} ; HBase version used: ${hbaseversion} ***</echo>
         <compileSources sources="${src.dir};${src.gen.dir};${src.lib.dir}/bzip2;${src.shims.dir}"
             excludes="${src.exclude.dir}" dist="${build.classes}" cp="classpath" warnings="${javac.args.warnings}" />
         <copy todir="${build.classes}/META-INF">
@@ -703,23 +685,6 @@
     </target>
 
     <!-- ================================================================== -->
-    <!-- Facede to build pig.jar for both Hadoop 1 and Hadoop 2             -->
-    <!-- ================================================================== -->
-    <target name="jar-h12" description="Create pig for both Hadoop 1 and Hadoop 2">
-        <propertyreset name="hadoopversion" value="20" />
-        <propertyreset name="src.shims.dir" value="${basedir}/shims/src/hadoop${hadoopversion}" />
-        <antcall target="clean" inheritRefs="true" inheritall="true"/>
-        <antcall target="jar" inheritRefs="true" inheritall="true"/>
-        <antcall target="copyHadoop1LocalRuntimeDependencies"/>
-        <delete dir="${build.dir}" />
-        <propertyreset name="hadoopversion" value="23" />
-        <propertyreset name="hbase.hadoop.version" value="hadoop2" />
-        <propertyreset name="src.shims.dir" value="${basedir}/shims/src/hadoop${hadoopversion}" />
-        <propertyreset name="src.exclude.dir" value="" />
-        <antcall target="jar" inheritRefs="true" inheritall="true"/>
-    </target>
-
-    <!-- ================================================================== -->
     <!-- Make pig.jar                                                       -->
     <!-- ================================================================== -->
     <target name="jar" depends="compile,ivy-buildJar" description="Create pig core jar">
@@ -727,8 +692,8 @@
         <buildJar svnString="${svn.revision}" outputFile="${output.jarfile.withouthadoop}" includedJars="runtime.dependencies-withouthadoop.jar"/>
         <antcall target="copyCommonDependencies"/>
         <antcall target="copySparkDependencies"/>
-        <antcall target="copyh1Dependencies"/>
         <antcall target="copyh2Dependencies"/>
+        <antcall target="copyHadoop2LocalRuntimeDependencies" />
     </target>
 
     <target name="copyCommonDependencies">
@@ -752,7 +717,9 @@
             <fileset dir="${ivy.lib.dir}" includes="jruby-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="groovy-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="js-*.jar"/>
-            <fileset dir="${ivy.lib.dir}" includes="hbase-*.jar" excludes="hbase-*tests.jar,hbase-*hadoop*.jar"/>
+            <fileset dir="${ivy.lib.dir}" includes="htrace-core*incubating.jar"/>
+            <fileset dir="${ivy.lib.dir}" includes="metrics-core-*.jar"/>
+            <fileset dir="${ivy.lib.dir}" includes="hbase-*.jar" excludes="hbase-*tests.jar,hbase-*hadoop2*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="hive-*.jar" excludes="hive-shims-0.*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="protobuf-java-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="zookeeper-*.jar"/>
@@ -771,25 +738,13 @@
             <fileset dir="${build.ivy.spark.lib.dir}/${ant.project.name}" includes="*.jar"/>
         </copy>
     </target>
-
-    <target name="copyh1Dependencies" unless="isHadoop23">
-        <mkdir dir="${lib.dir}/h1" />
-        <copy todir="${lib.dir}/h1">
-            <fileset dir="${ivy.lib.dir}" includes="avro-mapred-*.jar"/>
-            <fileset dir="${ivy.lib.dir}" includes="hive-shims-0.*.jar"/>
-            <fileset dir="${ivy.lib.dir}" includes="hbase-*hadoop1.jar"/>
-        </copy>
-        <copy file="${output.jarfile.core}" tofile="${output.jarfile.backcompat-core-h1}"/>
-        <mkdir dir="${legacy.dir}" />
-        <move file="${output.jarfile.withouthadoop}" tofile="${output.jarfile.withouthadoop-h1}"/>
-    </target>
-
-    <target name="copyh2Dependencies" if="isHadoop23">
+    
+    <target name="copyh2Dependencies" if="isHadoop2">
         <mkdir dir="${lib.dir}/h2" />
         <copy todir="${lib.dir}/h2">
             <fileset dir="${ivy.lib.dir}" includes="avro-mapred-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="hive-shims-0.*.jar"/>
-            <fileset dir="${ivy.lib.dir}" includes="hbase-*hadoop2.jar"/>
+            <fileset dir="${ivy.lib.dir}" includes="hbase-hadoop2*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="tez-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="commons-collections4-*.jar"/>
         </copy>
@@ -798,18 +753,21 @@
         <move file="${output.jarfile.withouthadoop}" tofile="${output.jarfile.withouthadoop-h2}"/>
     </target>
 
-    <target name="copyHadoop1LocalRuntimeDependencies">
-        <mkdir dir="${lib.dir}/hadoop1-runtime" />
-        <copy todir="${lib.dir}/hadoop1-runtime">
-            <fileset dir="${ivy.lib.dir}" includes="hadoop-core-*.jar"/>
+    <target name="copyHadoop2LocalRuntimeDependencies">
+        <mkdir dir="${lib.dir}/hadoop2-runtime" />
+        <copy todir="${lib.dir}/hadoop2-runtime">
+            <fileset dir="${ivy.lib.dir}" includes="hadoop-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="commons-cli-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="commons-configuration-*.jar"/>
+            <fileset dir="${ivy.lib.dir}" includes="commons-collections-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="commons-lang-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="commons-codec-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="commons-io-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="commons-logging-*.jar"/>
-            <fileset dir="${ivy.lib.dir}" includes="commons-httpclient-*.jar"/>
+            <fileset dir="${ivy.lib.dir}" includes="httpclient-*.jar"/>
+            <fileset dir="${ivy.lib.dir}" includes="httpcore-*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="log4j-*.jar"/>
+            <fileset dir="${ivy.lib.dir}" includes="slf4j-*.jar"/>
         </copy>
     </target>
 
@@ -955,6 +913,9 @@
             <sysproperty key="test.exec.type" value="${test.exec.type}" />
             <sysproperty key="ssh.gateway" value="${ssh.gateway}" />
             <sysproperty key="hod.server" value="${hod.server}" />
+            <sysproperty key="build.classes" value="${build.classes}" />
+            <sysproperty key="test.build.classes" value="${test.build.classes}" />
+            <sysproperty key="ivy.lib.dir" value="${ivy.lib.dir}" />
             <sysproperty key="java.io.tmpdir" value="${junit.tmp.dir}" />
             <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
             <jvmarg line="-XX:+CMSClassUnloadingEnabled -XX:MaxPermSize=128M ${debugArgs} -Djava.library.path=${hadoop.root}\bin"/>
@@ -980,8 +941,6 @@
                     <patternset>
                        <includesfile name="@{test.file}"/>
                        <excludesfile name="${test.exclude.file}" if="test.exclude.file"/>
-                       <excludesfile name="${test.exclude.file.20}" unless="isHadoop23"/>
-                       <excludesfile name="${test.exclude.file.23}" if="isHadoop23"/>
                        <excludesfile name="${test.exclude.file.for.exectype}"/>
                     </patternset>
                     <exclude name="**/${exclude.testcase}.java" if="exclude.testcase" />
@@ -990,7 +949,9 @@
                 </fileset>
             </batchtest>
             <batchtest fork="yes" todir="${test.log.dir}" if="testcase">
-                <fileset dir="test" includes="**/${testcase}.java"/>
+                <fileset dir="test" includes="**/${testcase}.java">
+                    <exclude name="e2e/**/*.java"/>
+                </fileset>
             </batchtest>
 
             <assertions>
@@ -1008,10 +969,10 @@
 
     <target name="test-core-mrtez" description="run core tests on both mr and tez mode"
             depends="setWindowsPath,setLinuxPath,compile-test,jar,debugger.check,jackson-pig-3039-test-download">
-        <fail message="hadoopversion must be set to 23 when invoking test-core-mrtez">
+        <fail message="hadoopversion must be set to 2 when invoking test-core-mrtez">
           <condition>
             <not>
-              <equals arg1="${hadoopversion}" arg2="23" />
+              <equals arg1="${hadoopversion}" arg2="2" />
             </not>
           </condition>
         </fail>
@@ -1021,6 +982,9 @@
         <propertyreset name="test.exec.type" value="mr" />
         <propertyreset name="test.log.dir" value="${test.build.dir}/logs/${test.exec.type}" />
         <macro-test-runner test.file="${test.all.file}" tests.failed="test.mr.failed"/>
+        <delete>
+            <fileset dir="${build.classes}" includes="*.xml" />
+        </delete>
         <echo />
         <echo message="=======================" />
         <echo message="Running Tez tests" />
@@ -1099,10 +1063,7 @@
     <!-- ================================================================== -->
     <!-- Distribution                                                       -->
     <!-- ================================================================== -->
-    <target name="package-h12" depends="jar-h12, docs, api-report, piggybank" description="Create a Pig tar release">
-        <package-base/>
-    </target>
-	
+
     <target name="package" depends="jar, docs, api-report, piggybank" description="Create a Pig tar release">
         <package-base/>
     </target>
@@ -1122,7 +1083,6 @@
             <fileset dir="${lib.dir}"/>
         </copy>
 
-        <copy file="${output.jarfile.backcompat-core-h1}" tofile="${tar.dist.dir}/${final.name}-core-h1.jar" failonerror="false"/>
         <copy file="${output.jarfile.backcompat-core-h2}" tofile="${tar.dist.dir}/${final.name}-core-h2.jar" failonerror="false"/>
 
         <copy todir="${tar.dist.dir}/lib" file="contrib/piggybank/java/piggybank.jar"/>
@@ -1200,10 +1160,6 @@
         <tar-base/>
     </target>
 
-    <target name="tar-h12" depends="package-h12" description="Source distribution">
-        <tar-base/>
-    </target>
-
     <macrodef name="tar-base">
       <sequential>
         <tar compression="gzip" longfile="gnu" destfile="${build.dir}/${artifact.pig.tar}">
@@ -1289,15 +1245,13 @@
           uri="urn:maven-artifact-ant"
           classpathref="mvn-ant-task.classpath"/>
     </target>
-    <target name="mvn-install" depends="mvn-taskdef,jar-h12, set-version, source-jar,
-      javadoc-jar, pigunit-jar, smoketests-jar, piggybank"
+    <target name="mvn-install" depends="mvn-taskdef, mvn-build, set-version"
          description="To install pig to local filesystem's m2 cache">
          <artifact:pom file="${pig.pom}" id="pig"/>
-          <artifact:install file="${output.jarfile.core-h1}">
+          <artifact:install file="${output.jarfile.core-h2}">
                <pom refid="pig"/>
            <attach file="${output.jarfile.sources}" classifier="sources" />
            <attach file="${output.jarfile.javadoc}" classifier="javadoc" />
-           <attach file="${output.jarfile.core-h2}" classifier="h2" />
           </artifact:install>
          <artifact:pom file="${pigunit.pom}" id="pigunit"/>
           <artifact:install file="${pigunit.jarfile}">
@@ -1313,10 +1267,9 @@
          </artifact:install>
     </target>
 
-    <target name="mvn-build" depends="jar-h12, source-jar,
+    <target name="mvn-build" depends="jar, source-jar,
                                       javadoc-jar, smoketests-jar, pigunit-jar, piggybank"
          description="To build the pig jar artifacts to be deployed to apache maven repository">
-        <move file="${output.jarfile.backcompat-core-h1}" tofile="${output.jarfile.core}"/>
         <move file="${output.jarfile.backcompat-core-h2}" tofile="${output.jarfile.core-h2}"/>
     </target>
 
@@ -1338,8 +1291,6 @@
            <pom refid="pig"/>
            <attach file="${output.jarfile.core}.asc" type="jar.asc"/>
            <attach file="${pig.pom}.asc" type="pom.asc"/>
-           <attach file="${output.jarfile.core-h2}.asc" type="jar.asc" classifier="h2"/>
-           <attach file="${output.jarfile.core-h2}" classifier="h2" />
            <attach file="${output.jarfile.sources}.asc" type="jar.asc" classifier="sources"/>
            <attach file="${output.jarfile.sources}" classifier="sources" />
            <attach file="${output.jarfile.javadoc}.asc" type="jar.asc"  classifier="javadoc"/>
@@ -1374,7 +1325,6 @@
       <artifact:deploy file="${output.jarfile.core}">
               <remoteRepository id="${snapshots_repo_id}" url="${asfsnapshotrepo}"/>
               <pom refid="pig"/>
-              <attach file="${output.jarfile.core-h2}" classifier="h2" />
               <attach file="${output.jarfile.sources}" classifier="sources" />
               <attach file="${output.jarfile.javadoc}" classifier="javadoc" />
       </artifact:deploy>
@@ -1418,8 +1368,6 @@
        </macrodef>
        <sign-artifact input.file="${output.jarfile.core}"
         output.file="${output.jarfile.core}.asc" gpg.passphrase="${gpg.passphrase}"/>
-       <sign-artifact input.file="${output.jarfile.core-h2}"
-        output.file="${output.jarfile.core-h2}.asc" gpg.passphrase="${gpg.passphrase}"/>
        <sign-artifact input.file="${output.jarfile.sources}"
         output.file="${output.jarfile.sources}.asc" gpg.passphrase="${gpg.passphrase}"/>
        <sign-artifact input.file="${output.jarfile.javadoc}"
@@ -1707,7 +1655,9 @@
 
      <target name="ivy-resolve" depends="ivy-init" unless="ivy.resolved" description="Resolve Ivy dependencies">
        <property name="ivy.resolved" value="true"/>
+       <echo>*** Ivy resolve with Hadoop ${hadoopversion} and HBase ${hbaseversion} ***</echo>
        <ivy:resolve log="${loglevel}" settingsRef="${ant.project.name}.ivy.settings" conf="compile"/>
+       <ivy:report toDir="build/ivy/report"/>
      </target>
 
      <target name="ivy-compile" depends="ivy-resolve" description="Retrieve Ivy-managed artifacts for compile configuration">

Modified: pig/branches/spark/conf/pig.properties
URL: http://svn.apache.org/viewvc/pig/branches/spark/conf/pig.properties?rev=1784237&r1=1784236&r2=1784237&view=diff
==============================================================================
--- pig/branches/spark/conf/pig.properties (original)
+++ pig/branches/spark/conf/pig.properties Fri Feb 24 08:19:42 2017
@@ -557,6 +557,9 @@ pig.location.check.strict=false
 #
 hcat.bin=/usr/local/hcat/bin/hcat
 
+# Enable ATS hook to log the Pig specific ATS entry, disable only when ATS server is not deployed
+pig.ats.enabled=true
+
 ###########################################################################
 #
 # Overrides for extreme environments
@@ -611,13 +614,13 @@ hcat.bin=/usr/local/hcat/bin/hcat
 # If you want Pig to allow certain errors before failing you can set this property.
 # If the propery is set to true and the StoreFunc implements ErrorHandling if will allow configurable errors 
 # based on the OutputErrorHandler implementation  
-# pig.allow.store.errors = false
+# pig.error-handling.enabled = false
 #
 # Controls the minimum number of errors for store
-# pig.errors.min.records = 0
+# pig.error-handling.min.error.records = 0
 #
 # Set the threshold for percentage of errors
-# pig.error.threshold.percent = 0.0f
+# pig.error-handling.error.threshold = 0.0f
 
 ###########################################################################
 #
@@ -675,3 +678,6 @@ hcat.bin=/usr/local/hcat/bin/hcat
      
 pig.sort.readonce.loadfuncs=org.apache.pig.backend.hadoop.hbase.HBaseStorage,org.apache.pig.backend.hadoop.accumulo.AccumuloStorage
 
+# If set, Pig will override tez.am.launch.cmd-opts and tez.am.resource.memory.mb to optimal
+# even they are set to a different value. Default value is true.
+#pig.tez.configure.am.memory=false

Modified: pig/branches/spark/contrib/piggybank/java/build.xml
URL: http://svn.apache.org/viewvc/pig/branches/spark/contrib/piggybank/java/build.xml?rev=1784237&r1=1784236&r2=1784237&view=diff
==============================================================================
--- pig/branches/spark/contrib/piggybank/java/build.xml (original)
+++ pig/branches/spark/contrib/piggybank/java/build.xml Fri Feb 24 08:19:42 2017
@@ -16,13 +16,20 @@
 -->
 
 <project basedir="." default="jar" name="pigudf">
+
+    <taskdef resource="net/sf/antcontrib/antcontrib.properties">
+        <classpath>
+            <pathelement location="../../../ivy/ant-contrib-1.0b3.jar"/>
+        </classpath>
+    </taskdef>
+
     <property file="../../../build.properties" />
     <!-- javac properties -->
     <property name="javac.debug" value="on" />
     <property name="javac.level" value="source,lines,vars"/>
     <property name="javac.optimize" value="on" />
     <property name="javac.deprecation" value="off" />
-    <property name="javac.version" value="1.6" />
+    <property name="javac.version" value="1.7" />
     <property name="javac.args" value="" />
     <!-- TODO we should use warning...   <property name="javac.args.warnings" value="-Xlint:unchecked" /> -->
     <property name="javac.args.warnings" value="" />
@@ -38,16 +45,22 @@
     <property name="src.dir" value="src/main/java/org/apache/pig/piggybank" />
     <property name="hsqldb.jar" value="../../../build/ivy/lib/Pig/hsqldb-1.8.0.10.jar"/>
 
-    <!-- JobHistoryLoader currently does not support 0.23 -->
-    <condition property="build.classes.excludes" value="**/HadoopJobHistoryLoader.java" else="">
-        <equals arg1="${hadoopversion}" arg2="23"/>
-    </condition>
-    <condition property="test.classes.excludes" value="**/TestHadoopJobHistoryLoader.java" else="">
+    <!--
+      Hadoop master version
+      (Value 23 is translated for backward compatibility in old build scripts)
+    -->
+    <if>
         <equals arg1="${hadoopversion}" arg2="23"/>
-    </condition>
+        <then>
+            <echo>Property setting hadoopversion=23 is deprecated. Overwriting to hadoopversion=2</echo>
+            <var name="hadoopversion" unset="true"/>
+            <property name="hadoopversion" value="2" />
+        </then>
+    </if>
+    <property name="hadoopversion" value="2" />
 
-    <condition property="hadoopsuffix" value="2" else="1">
-        <equals arg1="${hadoopversion}" arg2="23"/>
+    <condition property="hadoopsuffix" value="2" else="">
+        <equals arg1="${hadoopversion}" arg2="2"/>
     </condition>
 
     <!-- jar properties -->



Mime
View raw message