hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From br...@apache.org
Subject svn commit: r1626482 [1/6] - in /hive/branches/spark: ./ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/ common/src/java/org/apache/hadoop/hive/conf/ data/files/ hcatalog/hc...
Date Sat, 20 Sep 2014 17:34:43 GMT
Author: brock
Date: Sat Sep 20 17:34:39 2014
New Revision: 1626482

URL: http://svn.apache.org/r1626482
Log:
HIVE-8206 - Merge from trunk to spark 9/20/14

Added:
    hive/branches/spark/data/files/data_with_escape.txt
      - copied unchanged from r1626481, hive/trunk/data/files/data_with_escape.txt
    hive/branches/spark/data/files/opencsv-data.txt
      - copied unchanged from r1626481, hive/trunk/data/files/opencsv-data.txt
    hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorerMulti.java
      - copied unchanged from r1626481, hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorerMulti.java
    hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/MetastoreAuthzAPIDisallowAuthorizer.java
      - copied unchanged from r1626481, hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/MetastoreAuthzAPIDisallowAuthorizer.java
    hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/JdbcUriParseException.java
      - copied unchanged from r1626481, hive/trunk/jdbc/src/java/org/apache/hive/jdbc/JdbcUriParseException.java
    hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientException.java
      - copied unchanged from r1626481, hive/trunk/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientException.java
    hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java
      - copied unchanged from r1626481, hive/trunk/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java
    hive/branches/spark/metastore/scripts/upgrade/mssql/hive-schema-0.14.0.mssql.sql
      - copied unchanged from r1626481, hive/trunk/metastore/scripts/upgrade/mssql/hive-schema-0.14.0.mssql.sql
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCountMerge.java
      - copied unchanged from r1626481, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCountMerge.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java
      - copied unchanged from r1626481, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdConfOnlyAuthorizerFactory.java
      - copied unchanged from r1626481, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdConfOnlyAuthorizerFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java
      - copied unchanged from r1626481, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java
    hive/branches/spark/ql/src/test/queries/clientnegative/authorization_delete_nodeletepriv.q
      - copied unchanged from r1626481, hive/trunk/ql/src/test/queries/clientnegative/authorization_delete_nodeletepriv.q
    hive/branches/spark/ql/src/test/queries/clientnegative/authorization_update_noupdatepriv.q
      - copied unchanged from r1626481, hive/trunk/ql/src/test/queries/clientnegative/authorization_update_noupdatepriv.q
    hive/branches/spark/ql/src/test/queries/clientnegative/update_no_such_table.q
      - copied unchanged from r1626481, hive/trunk/ql/src/test/queries/clientnegative/update_no_such_table.q
    hive/branches/spark/ql/src/test/queries/clientpositive/acid_vectorization.q
      - copied unchanged from r1626481, hive/trunk/ql/src/test/queries/clientpositive/acid_vectorization.q
    hive/branches/spark/ql/src/test/queries/clientpositive/array_map_access_nonconstant.q
      - copied unchanged from r1626481, hive/trunk/ql/src/test/queries/clientpositive/array_map_access_nonconstant.q
    hive/branches/spark/ql/src/test/queries/clientpositive/authorization_cli_createtab_noauthzapi.q
      - copied unchanged from r1626481, hive/trunk/ql/src/test/queries/clientpositive/authorization_cli_createtab_noauthzapi.q
    hive/branches/spark/ql/src/test/queries/clientpositive/authorization_cli_nonsql.q
      - copied unchanged from r1626481, hive/trunk/ql/src/test/queries/clientpositive/authorization_cli_nonsql.q
    hive/branches/spark/ql/src/test/queries/clientpositive/authorization_cli_stdconfigauth.q
      - copied unchanged from r1626481, hive/trunk/ql/src/test/queries/clientpositive/authorization_cli_stdconfigauth.q
    hive/branches/spark/ql/src/test/queries/clientpositive/authorization_delete.q
      - copied unchanged from r1626481, hive/trunk/ql/src/test/queries/clientpositive/authorization_delete.q
    hive/branches/spark/ql/src/test/queries/clientpositive/authorization_delete_own_table.q
      - copied unchanged from r1626481, hive/trunk/ql/src/test/queries/clientpositive/authorization_delete_own_table.q
    hive/branches/spark/ql/src/test/queries/clientpositive/authorization_non_id.q
      - copied unchanged from r1626481, hive/trunk/ql/src/test/queries/clientpositive/authorization_non_id.q
    hive/branches/spark/ql/src/test/queries/clientpositive/authorization_update.q
      - copied unchanged from r1626481, hive/trunk/ql/src/test/queries/clientpositive/authorization_update.q
    hive/branches/spark/ql/src/test/queries/clientpositive/authorization_update_own_table.q
      - copied unchanged from r1626481, hive/trunk/ql/src/test/queries/clientpositive/authorization_update_own_table.q
    hive/branches/spark/ql/src/test/queries/clientpositive/escape3.q
      - copied unchanged from r1626481, hive/trunk/ql/src/test/queries/clientpositive/escape3.q
    hive/branches/spark/ql/src/test/queries/clientpositive/serde_opencsv.q
      - copied unchanged from r1626481, hive/trunk/ql/src/test/queries/clientpositive/serde_opencsv.q
    hive/branches/spark/ql/src/test/results/clientnegative/authorization_delete_nodeletepriv.q.out
      - copied unchanged from r1626481, hive/trunk/ql/src/test/results/clientnegative/authorization_delete_nodeletepriv.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/authorization_update_noupdatepriv.q.out
      - copied unchanged from r1626481, hive/trunk/ql/src/test/results/clientnegative/authorization_update_noupdatepriv.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/update_no_such_table.q.out
      - copied unchanged from r1626481, hive/trunk/ql/src/test/results/clientnegative/update_no_such_table.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/acid_vectorization.q.out
      - copied unchanged from r1626481, hive/trunk/ql/src/test/results/clientpositive/acid_vectorization.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/array_map_access_nonconstant.q.out
      - copied unchanged from r1626481, hive/trunk/ql/src/test/results/clientpositive/array_map_access_nonconstant.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_cli_createtab_noauthzapi.q.out
      - copied unchanged from r1626481, hive/trunk/ql/src/test/results/clientpositive/authorization_cli_createtab_noauthzapi.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_cli_nonsql.q.out
      - copied unchanged from r1626481, hive/trunk/ql/src/test/results/clientpositive/authorization_cli_nonsql.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_cli_stdconfigauth.q.out
      - copied unchanged from r1626481, hive/trunk/ql/src/test/results/clientpositive/authorization_cli_stdconfigauth.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_delete.q.out
      - copied unchanged from r1626481, hive/trunk/ql/src/test/results/clientpositive/authorization_delete.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_delete_own_table.q.out
      - copied unchanged from r1626481, hive/trunk/ql/src/test/results/clientpositive/authorization_delete_own_table.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_non_id.q.out
      - copied unchanged from r1626481, hive/trunk/ql/src/test/results/clientpositive/authorization_non_id.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_update.q.out
      - copied unchanged from r1626481, hive/trunk/ql/src/test/results/clientpositive/authorization_update.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/authorization_update_own_table.q.out
      - copied unchanged from r1626481, hive/trunk/ql/src/test/results/clientpositive/authorization_update_own_table.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/escape3.q.out
      - copied unchanged from r1626481, hive/trunk/ql/src/test/results/clientpositive/escape3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/serde_opencsv.q.out
      - copied unchanged from r1626481, hive/trunk/ql/src/test/results/clientpositive/serde_opencsv.q.out
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/OpenCSVSerde.java
      - copied unchanged from r1626481, hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/OpenCSVSerde.java
    hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/TestOpenCSVSerde.java
      - copied unchanged from r1626481, hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/TestOpenCSVSerde.java
Removed:
    hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatPigStorer.java
    hive/branches/spark/ql/src/test/queries/negative/invalid_list_index.q
    hive/branches/spark/ql/src/test/queries/negative/invalid_list_index2.q
    hive/branches/spark/ql/src/test/queries/negative/invalid_map_index.q
    hive/branches/spark/ql/src/test/queries/negative/invalid_map_index2.q
    hive/branches/spark/ql/src/test/results/compiler/errors/invalid_list_index.q.out
    hive/branches/spark/ql/src/test/results/compiler/errors/invalid_list_index2.q.out
    hive/branches/spark/ql/src/test/results/compiler/errors/invalid_map_index.q.out
    hive/branches/spark/ql/src/test/results/compiler/errors/invalid_map_index2.q.out
Modified:
    hive/branches/spark/   (props changed)
    hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableOutputFormat.java
    hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java
    hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/MockLoader.java
    hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
    hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
    hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
    hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
    hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
    hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java
    hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorer.java
    hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestPigHCatUtil.java
    hive/branches/spark/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
    hive/branches/spark/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestExtendedAcls.java
    hive/branches/spark/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
    hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
    hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java
    hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactoryForTest.java
    hive/branches/spark/jdbc/pom.xml
    hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
    hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveDriver.java
    hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/Utils.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java
    hive/branches/spark/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java
    hive/branches/spark/packaging/pom.xml
    hive/branches/spark/pom.xml
    hive/branches/spark/ql/if/queryplan.thrift
    hive/branches/spark/ql/pom.xml
    hive/branches/spark/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp
    hive/branches/spark/ql/src/gen/thrift/gen-cpp/queryplan_types.h
    hive/branches/spark/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java
    hive/branches/spark/ql/src/gen/thrift/gen-php/Types.php
    hive/branches/spark/ql/src/gen/thrift/gen-py/queryplan/ttypes.py
    hive/branches/spark/ql/src/gen/thrift/gen-rb/queryplan_types.rb
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobMonitor.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCount.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFCountStar.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputFormat.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/CompressionCodec.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ZlibCodec.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessInfo.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandUtil.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerCLI.java
    hive/branches/spark/ql/src/test/queries/clientpositive/authorization_cli_createtab.q
    hive/branches/spark/ql/src/test/queries/clientpositive/delete_all_non_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/delete_all_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/delete_where_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/delete_whole_partition.q
    hive/branches/spark/ql/src/test/queries/clientpositive/drop_index.q
    hive/branches/spark/ql/src/test/queries/clientpositive/insert_update_delete.q
    hive/branches/spark/ql/src/test/queries/clientpositive/insert_values_dynamic_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/insert_values_non_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/insert_values_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/metadata_only_queries_with_filters.q
    hive/branches/spark/ql/src/test/queries/clientpositive/update_after_multiple_inserts.q
    hive/branches/spark/ql/src/test/queries/clientpositive/update_all_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/update_where_partitioned.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vectorization_short_regress.q
    hive/branches/spark/ql/src/test/results/clientpositive/drop_index.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/insert_values_non_partitioned.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/metadata_only_queries_with_filters.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/insert_values_non_partitioned.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vectorization_short_regress.q.out
    hive/branches/spark/serde/pom.xml
    hive/branches/spark/serde/src/gen/thrift/gen-cpp/complex_types.cpp
    hive/branches/spark/serde/src/gen/thrift/gen-cpp/complex_types.h
    hive/branches/spark/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
    hive/branches/spark/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
    hive/branches/spark/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/PropValueUnion.java
    hive/branches/spark/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/SetIntString.java
    hive/branches/spark/serde/src/gen/thrift/gen-py/complex/ttypes.py
    hive/branches/spark/serde/src/gen/thrift/gen-rb/complex_types.rb
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveChar.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveVarchar.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyString.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveCharObjectInspector.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveCharObjectInspector.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
    hive/branches/spark/service/src/java/org/apache/hive/service/cli/CLIService.java
    hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
    hive/branches/spark/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
    hive/branches/spark/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
    hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
    hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
    hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
    hive/branches/spark/service/src/java/org/apache/hive/service/server/HiveServer2.java
    hive/branches/spark/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java
    hive/branches/spark/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
    hive/branches/spark/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
    hive/branches/spark/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
    hive/branches/spark/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java

Propchange: hive/branches/spark/
------------------------------------------------------------------------------
  Merged /hive/trunk:r1625359-1626481

Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableOutputFormat.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableOutputFormat.java (original)
+++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableOutputFormat.java Sat Sep 20 17:34:39 2014
@@ -22,10 +22,14 @@ import org.apache.accumulo.core.client.A
 import org.apache.accumulo.core.client.mapred.AccumuloOutputFormat;
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
+import org.apache.accumulo.core.data.Mutation;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.accumulo.AccumuloConnectionParameters;
 import org.apache.hadoop.hive.accumulo.serde.AccumuloSerDeParameters;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.RecordWriter;
+import org.apache.hadoop.util.Progressable;
 
 import com.google.common.base.Preconditions;
 
@@ -41,6 +45,13 @@ public class HiveAccumuloTableOutputForm
     super.checkOutputSpecs(ignored, job);
   }
 
+  @Override
+  public RecordWriter<Text,Mutation> getRecordWriter(FileSystem ignored, JobConf job, String name, Progressable progress) throws IOException {
+    configureAccumuloOutputFormat(job);
+
+    return super.getRecordWriter(ignored, job, name, progress);
+  }
+
   protected void configureAccumuloOutputFormat(JobConf job) throws IOException {
     AccumuloConnectionParameters cnxnParams = new AccumuloConnectionParameters(job);
 
@@ -76,16 +87,32 @@ public class HiveAccumuloTableOutputForm
 
   protected void setAccumuloConnectorInfo(JobConf conf, String username, AuthenticationToken token)
       throws AccumuloSecurityException {
-    AccumuloOutputFormat.setConnectorInfo(conf, username, token);
+    try {
+      AccumuloOutputFormat.setConnectorInfo(conf, username, token);
+    } catch (IllegalStateException e) {
+      // AccumuloOutputFormat complains if you re-set an already set value. We just don't care.
+      log.debug("Ignoring exception setting Accumulo Connector instance for user " + username, e);
+    }
   }
 
   @SuppressWarnings("deprecation")
   protected void setAccumuloZooKeeperInstance(JobConf conf, String instanceName, String zookeepers) {
-    AccumuloOutputFormat.setZooKeeperInstance(conf, instanceName, zookeepers);
+    try {
+      AccumuloOutputFormat.setZooKeeperInstance(conf, instanceName, zookeepers);
+    } catch (IllegalStateException ise) {
+      // AccumuloOutputFormat complains if you re-set an already set value. We just don't care.
+      log.debug("Ignoring exception setting ZooKeeper instance of " + instanceName + " at "
+          + zookeepers, ise);
+    }
   }
 
   protected void setAccumuloMockInstance(JobConf conf, String instanceName) {
-    AccumuloOutputFormat.setMockInstance(conf, instanceName);
+    try {
+      AccumuloOutputFormat.setMockInstance(conf, instanceName);
+    } catch (IllegalStateException e) {
+      // AccumuloOutputFormat complains if you re-set an already set value. We just don't care.
+      log.debug("Ignoring exception setting mock instance of " + instanceName, e);
+    }
   }
 
   protected void setDefaultAccumuloTableName(JobConf conf, String tableName) {

Modified: hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java (original)
+++ hive/branches/spark/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloRowSerializer.java Sat Sep 20 17:34:39 2014
@@ -99,9 +99,6 @@ public class AccumuloRowSerializer {
     // The ObjectInspector for the row ID
     ObjectInspector fieldObjectInspector = field.getFieldObjectInspector();
 
-    log.info("Serializing rowId with " + value + " in " + field + " using "
-        + rowIdFactory.getClass());
-
     // Serialize the row component using the RowIdFactory. In the normal case, this will just
     // delegate back to the "local" serializeRowId method
     byte[] data = rowIdFactory.serializeRowId(value, field, output);

Modified: hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Sat Sep 20 17:34:39 2014
@@ -214,8 +214,8 @@ public class HiveConf extends Configurat
     PLAN_SERIALIZATION("hive.plan.serialization.format", "kryo",
         "Query plan format serialization between client and task nodes. \n" +
         "Two supported values are : kryo and javaXML. Kryo is default."),
-    SCRATCHDIR("hive.exec.scratchdir", "/tmp/hive", 
-        "HDFS root scratch dir for Hive jobs which gets created with 777 permission. " +
+    SCRATCHDIR("hive.exec.scratchdir", "/tmp/hive",
+        "HDFS root scratch dir for Hive jobs which gets created with write all (733) permission. " +
         "For each connecting user, an HDFS scratch dir: ${hive.exec.scratchdir}/<username> is created, " +
         "with ${hive.scratch.dir.permission}."),
     LOCALSCRATCHDIR("hive.exec.local.scratchdir",
@@ -224,7 +224,7 @@ public class HiveConf extends Configurat
     DOWNLOADED_RESOURCES_DIR("hive.downloaded.resources.dir",
         "${system:java.io.tmpdir}" + File.separator + "${hive.session.id}_resources",
         "Temporary local directory for added resources in the remote file system."),
-    SCRATCHDIRPERMISSION("hive.scratch.dir.permission", "700", 
+    SCRATCHDIRPERMISSION("hive.scratch.dir.permission", "700",
         "The permission for the user specific scratch directories that get created."),
     SUBMITVIACHILD("hive.exec.submitviachild", false, ""),
     SUBMITLOCALTASKVIACHILD("hive.exec.submit.local.task.via.child", true,
@@ -1253,10 +1253,16 @@ public class HiveConf extends Configurat
         "This param is to control whether or not only do lock on queries\n" +
         "that need to execute at least one mapred job."),
 
+     // Zookeeper related configs
     HIVE_ZOOKEEPER_QUORUM("hive.zookeeper.quorum", "",
-        "The list of ZooKeeper servers to talk to. This is only needed for read/write locks."),
+        "List of ZooKeeper servers to talk to. This is needed for: " +
+        "1. Read/write locks - when hive.lock.manager is set to " +
+        "org.apache.hadoop.hive.ql.lockmgr.zookeeper.ZooKeeperHiveLockManager, " +
+        "2. When HiveServer2 supports service discovery via Zookeeper."),
     HIVE_ZOOKEEPER_CLIENT_PORT("hive.zookeeper.client.port", "2181",
-        "The port of ZooKeeper servers to talk to. This is only needed for read/write locks."),
+        "The port of ZooKeeper servers to talk to. " +
+        "If the list of Zookeeper servers specified in hive.zookeeper.quorum," +
+        "does not contain port numbers, this value is used."),
     HIVE_ZOOKEEPER_SESSION_TIMEOUT("hive.zookeeper.session.timeout", 600*1000,
         "ZooKeeper client's session timeout. The client is disconnected, and as a result, all locks released, \n" +
         "if a heartbeat is not sent in the timeout."),
@@ -1456,11 +1462,6 @@ public class HiveConf extends Configurat
         "If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.properties\"), \n" +
         "which you can then extract a URL from and pass to PropertyConfigurator.configure(URL)."),
 
-    // Hive global init file location
-    HIVE_GLOBAL_INIT_FILE_LOCATION("hive.server2.global.init.file.location", "${env:HIVE_CONF_DIR}",
-        "The location of HS2 global init file (.hiverc).\n" +
-        "If the property is reset, the value must be a valid path where the init file is located."),
-
     // prefix used to auto generated column aliases (this should be started with '_')
     HIVE_AUTOGEN_COLUMNALIAS_PREFIX_LABEL("hive.autogen.columnalias.prefix.label", "_c",
         "String used as a prefix when auto generating column alias.\n" +
@@ -1499,16 +1500,29 @@ public class HiveConf extends Configurat
         "table. From 0.12 onwards, they are displayed separately. This flag will let you\n" +
         "get old behavior, if desired. See, test-case in patch for HIVE-6689."),
 
+     // HiveServer2 specific configs
     HIVE_SERVER2_MAX_START_ATTEMPTS("hive.server2.max.start.attempts", 30L, new RangeValidator(0L, null),
-        "This number of times HiveServer2 will attempt to start before exiting, sleeping 60 seconds between retries. \n" +
-        "The default of 30 will keep trying for 30 minutes."),
-
+        "Number of times HiveServer2 will attempt to start before exiting, sleeping 60 seconds " +
+        "between retries. \n The default of 30 will keep trying for 30 minutes."),
+    HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY("hive.server2.support.dynamic.service.discovery", false,
+        "Whether HiveServer2 supports dynamic service discovery for its clients. " +
+        "To support this, each instance of HiveServer2 currently uses ZooKeeper to register itself, " +
+        "when it is brought up. JDBC/ODBC clients should use the ZooKeeper ensemble: " +
+        "hive.zookeeper.quorum in their connection string."),
+    HIVE_SERVER2_ZOOKEEPER_NAMESPACE("hive.server2.zookeeper.namespace", "hiveserver2",
+        "The parent node in ZooKeeper used by HiveServer2 when supporting dynamic service discovery."),
+    // HiveServer2 global init file location
+    HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION("hive.server2.global.init.file.location", "${env:HIVE_CONF_DIR}",
+        "The location of HS2 global init file (.hiverc).\n" +
+        "If the property is reset, the value must be a valid path where the init file is located."),
     HIVE_SERVER2_TRANSPORT_MODE("hive.server2.transport.mode", "binary", new StringSet("binary", "http"),
         "Transport mode of HiveServer2."),
+    HIVE_SERVER2_THRIFT_BIND_HOST("hive.server2.thrift.bind.host", "",
+        "Bind host on which to run the HiveServer2 Thrift service."),
 
     // http (over thrift) transport settings
     HIVE_SERVER2_THRIFT_HTTP_PORT("hive.server2.thrift.http.port", 10001,
-        "Port number when in HTTP mode."),
+        "Port number of HiveServer2 Thrift interface when hive.server2.transport.mode is 'http'."),
     HIVE_SERVER2_THRIFT_HTTP_PATH("hive.server2.thrift.http.path", "cliservice",
         "Path component of URL endpoint when in HTTP mode."),
     HIVE_SERVER2_THRIFT_HTTP_MIN_WORKER_THREADS("hive.server2.thrift.http.min.worker.threads", 5,
@@ -1525,11 +1539,7 @@ public class HiveConf extends Configurat
 
     // binary transport settings
     HIVE_SERVER2_THRIFT_PORT("hive.server2.thrift.port", 10000,
-        "Port number of HiveServer2 Thrift interface.\n" +
-        "Can be overridden by setting $HIVE_SERVER2_THRIFT_PORT"),
-    HIVE_SERVER2_THRIFT_BIND_HOST("hive.server2.thrift.bind.host", "",
-        "Bind host on which to run the HiveServer2 Thrift interface.\n" +
-        "Can be overridden by setting $HIVE_SERVER2_THRIFT_BIND_HOST"),
+        "Port number of HiveServer2 Thrift interface when hive.server2.transport.mode is 'binary'."),
     // hadoop.rpc.protection being set to a higher level than HiveServer2
     // does not make sense in most situations.
     // HiveServer2 ignores hadoop.rpc.protection in favor of hive.server2.thrift.sasl.qop.

Modified: hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/MockLoader.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/MockLoader.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/MockLoader.java (original)
+++ hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/MockLoader.java Sat Sep 20 17:34:39 2014
@@ -36,6 +36,7 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
 import org.apache.pig.LoadFunc;
 import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit;
 import org.apache.pig.data.Tuple;

Modified: hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java (original)
+++ hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java Sat Sep 20 17:34:39 2014
@@ -23,9 +23,8 @@ import java.io.IOException;
 import java.util.HashMap;
 import java.util.Iterator;
 
-import junit.framework.TestCase;
-
 import org.apache.commons.io.FileUtils;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.cli.CliSessionState;
@@ -42,6 +41,7 @@ import org.apache.hadoop.mapreduce.Recor
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
+
 import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.common.HCatContext;
@@ -51,12 +51,16 @@ import org.apache.hive.hcatalog.mapreduc
 import org.apache.hive.hcatalog.mapreduce.HCatOutputFormat;
 import org.apache.hive.hcatalog.mapreduce.OutputJobInfo;
 import org.apache.hive.hcatalog.mapreduce.HCatMapRedUtil;
+
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.data.Tuple;
 
-public class TestE2EScenarios extends TestCase {
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
 
+public class TestE2EScenarios {
   private static final String TEST_DATA_DIR = System.getProperty("java.io.tmpdir") + File.separator
       + TestHCatLoader.class.getCanonicalName() + "-" + System.currentTimeMillis();
   private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
@@ -69,9 +73,8 @@ public class TestE2EScenarios extends Te
     return "orc";
   }
 
-  @Override
-  protected void setUp() throws Exception {
-
+  @Before
+  public void setUp() throws Exception {
     File f = new File(TEST_WAREHOUSE_DIR);
     if (f.exists()) {
       FileUtil.fullyDelete(f);
@@ -90,8 +93,8 @@ public class TestE2EScenarios extends Te
 
   }
 
-  @Override
-  protected void tearDown() throws Exception {
+  @After
+  public void tearDown() throws Exception {
     try {
       dropTable("inpy");
       dropTable("rc5318");
@@ -146,16 +149,13 @@ public class TestE2EScenarios extends Te
     System.err.println("===");
   }
 
-
   private void copyTable(String in, String out) throws IOException, InterruptedException {
     Job ijob = new Job();
     Job ojob = new Job();
     HCatInputFormat inpy = new HCatInputFormat();
     inpy.setInput(ijob , null, in);
     HCatOutputFormat oupy = new HCatOutputFormat();
-    oupy.setOutput(ojob,
-      OutputJobInfo.create(null, out, new HashMap<String,String>()
-      ));
+    oupy.setOutput(ojob, OutputJobInfo.create(null, out, new HashMap<String,String>()));
 
     // Test HCatContext
 
@@ -207,6 +207,7 @@ public class TestE2EScenarios extends Te
   }
 
 
+  @Test
   public void testReadOrcAndRCFromPig() throws Exception {
     String tableSchema = "ti tinyint, si smallint,i int, bi bigint, f float, d double, b boolean";
 
@@ -224,15 +225,14 @@ public class TestE2EScenarios extends Te
     driverRun("LOAD DATA LOCAL INPATH '"+TEXTFILE_LOCN+"' OVERWRITE INTO TABLE inpy");
 
     // write it out from hive to an rcfile table, and to an orc table
-//        driverRun("insert overwrite table rc5318 select * from inpy");
+    //driverRun("insert overwrite table rc5318 select * from inpy");
     copyTable("inpy","rc5318");
-//        driverRun("insert overwrite table orc5318 select * from inpy");
+    //driverRun("insert overwrite table orc5318 select * from inpy");
     copyTable("inpy","orc5318");
 
     pigDump("inpy");
     pigDump("rc5318");
     pigDump("orc5318");
-
   }
 
 }

Modified: hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java (original)
+++ hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java Sat Sep 20 17:34:39 2014
@@ -34,6 +34,7 @@ import java.util.Map;
 import java.util.Properties;
 
 import org.apache.commons.io.FileUtils;
+
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
@@ -45,11 +46,13 @@ import org.apache.hadoop.hive.ql.process
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.mapreduce.Job;
+
 import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.data.Pair;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
+
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.ResourceStatistics;
@@ -60,10 +63,13 @@ import org.apache.pig.impl.logicalLayer.
 import org.apache.pig.PigRunner;
 import org.apache.pig.tools.pigstats.OutputStats;
 import org.apache.pig.tools.pigstats.PigStats;
+
 import org.joda.time.DateTime;
+
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -72,7 +78,7 @@ import static org.junit.Assert.*;
 public class TestHCatLoader {
   private static final Logger LOG = LoggerFactory.getLogger(TestHCatLoader.class);
   private static final String TEST_DATA_DIR = HCatUtil.makePathASafeFileName(System.getProperty("java.io.tmpdir") +
-          File.separator + TestHCatLoader.class.getCanonicalName() + "-" + System.currentTimeMillis());
+      File.separator + TestHCatLoader.class.getCanonicalName() + "-" + System.currentTimeMillis());
   private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + "/warehouse";
   private static final String BASIC_FILE_NAME = TEST_DATA_DIR + "/basic.input.data";
   private static final String COMPLEX_FILE_NAME = TEST_DATA_DIR + "/complex.input.data";
@@ -93,6 +99,7 @@ public class TestHCatLoader {
   private void dropTable(String tablename) throws IOException, CommandNeedRetryException {
     dropTable(tablename, driver);
   }
+
   static void dropTable(String tablename, Driver driver) throws IOException, CommandNeedRetryException {
     driver.run("drop table if exists " + tablename);
   }
@@ -100,7 +107,8 @@ public class TestHCatLoader {
   private void createTable(String tablename, String schema, String partitionedBy) throws IOException, CommandNeedRetryException {
     createTable(tablename, schema, partitionedBy, driver, storageFormat());
   }
-  static void createTable(String tablename, String schema, String partitionedBy, Driver driver, String storageFormat) 
+
+  static void createTable(String tablename, String schema, String partitionedBy, Driver driver, String storageFormat)
       throws IOException, CommandNeedRetryException {
     String createTable;
     createTable = "create table " + tablename + "(" + schema + ") ";
@@ -114,6 +122,7 @@ public class TestHCatLoader {
   private void createTable(String tablename, String schema) throws IOException, CommandNeedRetryException {
     createTable(tablename, schema, null);
   }
+
   /**
    * Execute Hive CLI statement
    * @param cmd arbitrary statement to execute
@@ -125,20 +134,20 @@ public class TestHCatLoader {
       throw new IOException("Failed to execute \"" + cmd + "\". Driver returned " + cpr.getResponseCode() + " Error: " + cpr.getErrorMessage());
     }
   }
+
   private static void checkProjection(FieldSchema fs, String expectedName, byte expectedPigType) {
     assertEquals(fs.alias, expectedName);
     assertEquals("Expected " + DataType.findTypeName(expectedPigType) + "; got " +
       DataType.findTypeName(fs.type), expectedPigType, fs.type);
   }
-  
+
   @Before
   public void setup() throws Exception {
-
     File f = new File(TEST_WAREHOUSE_DIR);
     if (f.exists()) {
       FileUtil.fullyDelete(f);
     }
-    if(!(new File(TEST_WAREHOUSE_DIR).mkdirs())) {
+    if (!(new File(TEST_WAREHOUSE_DIR).mkdirs())) {
       throw new RuntimeException("Could not create " + TEST_WAREHOUSE_DIR);
     }
 
@@ -192,7 +201,7 @@ public class TestHCatLoader {
     server.registerQuery("B = foreach A generate a,b;", ++i);
     server.registerQuery("B2 = filter B by a < 2;", ++i);
     server.registerQuery("store B2 into '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer('bkt=0');", ++i);
-    
+
     server.registerQuery("C = foreach A generate a,b;", ++i);
     server.registerQuery("C2 = filter C by a >= 2;", ++i);
     server.registerQuery("store C2 into '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer('bkt=1');", ++i);
@@ -470,7 +479,7 @@ public class TestHCatLoader {
       {
         fs.delete(new Path(PIGOUTPUT_DIR), true);
       }
-    }finally {
+    } finally {
       new File(PIG_FILE).delete();
     }
   }
@@ -534,7 +543,7 @@ public class TestHCatLoader {
   }
 
   /**
-   * basic tests that cover each scalar type 
+   * basic tests that cover each scalar type
    * https://issues.apache.org/jira/browse/HIVE-5814
    */
   private static final class AllTypesTable {

Modified: hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java (original)
+++ hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java Sat Sep 20 17:34:39 2014
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.ql.Command
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
+
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 import org.apache.pig.backend.executionengine.ExecException;
@@ -44,8 +45,10 @@ import org.apache.pig.data.TupleFactory;
 import org.apache.pig.impl.logicalLayer.FrontendException;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
+
 import org.junit.BeforeClass;
 import org.junit.Test;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -225,7 +228,7 @@ public class TestHCatLoaderComplexSchema
       dropTable(tablename);
     }
   }
-  
+
   private void compareTuples(Tuple t1, Tuple t2) throws ExecException {
     Assert.assertEquals("Tuple Sizes don't match", t1.size(), t2.size());
     for (int i = 0; i < t1.size(); i++) {
@@ -237,7 +240,7 @@ public class TestHCatLoaderComplexSchema
       Assert.assertEquals(msg, noOrder(f1.toString()), noOrder(f2.toString()));
     }
   }
-  
+
   private String noOrder(String s) {
     char[] chars = s.toCharArray();
     Arrays.sort(chars);

Modified: hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java (original)
+++ hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java Sat Sep 20 17:34:39 2014
@@ -31,8 +31,10 @@ import java.util.Properties;
 
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+
 import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.mapreduce.HCatBaseTest;
+
 import org.apache.pig.EvalFunc;
 import org.apache.pig.ExecType;
 import org.apache.pig.PigException;
@@ -41,10 +43,13 @@ import org.apache.pig.data.DataByteArray
 import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.logicalLayer.FrontendException;
 import org.apache.pig.impl.util.LogUtils;
+
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
+
 import org.junit.Assert;
 import org.junit.Test;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -63,6 +68,7 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "tinyint", "int", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       Integer.toString(300));
   }
+
   @Test
   public void testWriteSmallint() throws Exception {
     pigValueRangeTest("junitTypeTest1", "smallint", "int", null, Integer.toString(Short.MIN_VALUE),
@@ -72,6 +78,7 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "smallint", "int", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       Integer.toString(Short.MAX_VALUE + 1));
   }
+
   @Test
   public void testWriteChar() throws Exception {
     pigValueRangeTest("junitTypeTest1", "char(5)", "chararray", null, "xxx", "xxx  ");
@@ -81,6 +88,7 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "char(5)", "chararray", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       "too_long2");
   }
+
   @Test
   public void testWriteVarchar() throws Exception {
     pigValueRangeTest("junitTypeTest1", "varchar(5)", "chararray", null, "xxx", "xxx");
@@ -90,6 +98,7 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "varchar(5)", "chararray", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       "too_long2");
   }
+
   @Test
   public void testWriteDecimalXY() throws Exception {
     pigValueRangeTest("junitTypeTest1", "decimal(5,2)", "bigdecimal", null, BigDecimal.valueOf(1.2).toString(),
@@ -100,6 +109,7 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "decimal(5,2)", "bigdecimal", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       BigDecimal.valueOf(500.123).toString());
   }
+
   @Test
   public void testWriteDecimalX() throws Exception {
     //interestingly decimal(2) means decimal(2,0)
@@ -110,6 +120,7 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "decimal(2)", "bigdecimal", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       BigDecimal.valueOf(50.123).toString());
   }
+
   @Test
   public void testWriteDecimal() throws Exception {
     //decimal means decimal(10,0)
@@ -120,9 +131,10 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "decimal", "bigdecimal", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       BigDecimal.valueOf(12345678900L).toString());
   }
+
   /**
    * because we want to ignore TZ which is included in toString()
-   * include time to make sure it's 0 
+   * include time to make sure it's 0
    */
   private static final String FORMAT_4_DATE = "yyyy-MM-dd HH:mm:ss";
   @Test
@@ -142,6 +154,7 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest6", "date", "datetime", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       d.plusMinutes(1).toString(), FORMAT_4_DATE);//date out of range due to time!=0
   }
+
   @Test
   public void testWriteDate3() throws Exception {
     DateTime d = new DateTime(1991,10,11,23,10,DateTimeZone.forOffsetHours(-11));
@@ -154,6 +167,7 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest6", "date", "datetime", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       d.plusMinutes(1).toString(), FORMAT_4_DATE);
   }
+
   @Test
   public void testWriteDate2() throws Exception {
     DateTime d = new DateTime(1991,11,12,0,0, DateTimeZone.forID("US/Eastern"));
@@ -168,46 +182,48 @@ public class TestHCatStorer extends HCat
     pigValueRangeTestOverflow("junitTypeTest3", "date", "datetime", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Throw,
       d.plusMinutes(1).toString(), FORMAT_4_DATE);
   }
+
   /**
-   * Note that the value that comes back from Hive will have local TZ on it.  Using local is 
+   * Note that the value that comes back from Hive will have local TZ on it.  Using local is
    * arbitrary but DateTime needs TZ (or will assume default) and Hive does not have TZ.
    * So if you start with Pig value in TZ=x and write to Hive, when you read it back the TZ may
    * be different.  The millis value should match, of course.
-   * 
+   *
    * @throws Exception
    */
   @Test
   public void testWriteTimestamp() throws Exception {
     DateTime d = new DateTime(1991,10,11,14,23,30, 10);//uses default TZ
-    pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(), 
+    pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(),
       d.toDateTime(DateTimeZone.getDefault()).toString());
     d = d.plusHours(2);
     pigValueRangeTest("junitTypeTest2", "timestamp", "datetime", HCatBaseStorer.OOR_VALUE_OPT_VALUES.Null,
       d.toString(), d.toDateTime(DateTimeZone.getDefault()).toString());
     d = d.toDateTime(DateTimeZone.UTC);
-    pigValueRangeTest("junitTypeTest3", "timestamp", "datetime", null, d.toString(), 
+    pigValueRangeTest("junitTypeTest3", "timestamp", "datetime", null, d.toString(),
       d.toDateTime(DateTimeZone.getDefault()).toString());
 
     d = new DateTime(1991,10,11,23,24,25, 26);
-    pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(), 
+    pigValueRangeTest("junitTypeTest1", "timestamp", "datetime", null, d.toString(),
       d.toDateTime(DateTimeZone.getDefault()).toString());
     d = d.toDateTime(DateTimeZone.UTC);
-    pigValueRangeTest("junitTypeTest3", "timestamp", "datetime", null, d.toString(), 
+    pigValueRangeTest("junitTypeTest3", "timestamp", "datetime", null, d.toString(),
       d.toDateTime(DateTimeZone.getDefault()).toString());
   }
   //End: tests that check values from Pig that are out of range for target column
 
-
   private void pigValueRangeTestOverflow(String tblName, String hiveType, String pigType,
     HCatBaseStorer.OOR_VALUE_OPT_VALUES goal, String inputValue, String format) throws Exception {
     pigValueRangeTest(tblName, hiveType, pigType, goal, inputValue, null, format);
   }
+
   private void pigValueRangeTestOverflow(String tblName, String hiveType, String pigType,
                                  HCatBaseStorer.OOR_VALUE_OPT_VALUES goal, String inputValue) throws Exception {
     pigValueRangeTest(tblName, hiveType, pigType, goal, inputValue, null, null);
   }
+
   private void pigValueRangeTest(String tblName, String hiveType, String pigType,
-                                 HCatBaseStorer.OOR_VALUE_OPT_VALUES goal, String inputValue, 
+                                 HCatBaseStorer.OOR_VALUE_OPT_VALUES goal, String inputValue,
                                  String expectedValue) throws Exception {
     pigValueRangeTest(tblName, hiveType, pigType, goal, inputValue, expectedValue, null);
   }
@@ -218,6 +234,7 @@ public class TestHCatStorer extends HCat
   String getStorageFormat() {
     return "RCFILE";
   }
+
   /**
    * This is used to test how Pig values of various data types which are out of range for Hive target
    * column are handled.  Currently the options are to raise an error or write NULL.
@@ -236,7 +253,7 @@ public class TestHCatStorer extends HCat
    * @param format date format to use for comparison of values since default DateTime.toString()
    *               includes TZ which is meaningless for Hive DATE type
    */
-  private void pigValueRangeTest(String tblName, String hiveType, String pigType, 
+  private void pigValueRangeTest(String tblName, String hiveType, String pigType,
                                  HCatBaseStorer.OOR_VALUE_OPT_VALUES goal, String inputValue, String expectedValue, String format)
     throws Exception {
     TestHCatLoader.dropTable(tblName, driver);
@@ -309,6 +326,7 @@ public class TestHCatStorer extends HCat
     Unfortunately Timestamp.toString() adjusts the value for local TZ and 't' is a String
     thus the timestamp in 't' doesn't match rawData*/
   }
+
   /**
    * Create a data file with datatypes added in 0.13.  Read it with Pig and use
    * Pig + HCatStorer to write to a Hive table.  Then read it using Pig and Hive
@@ -365,6 +383,7 @@ public class TestHCatStorer extends HCat
     }
     Assert.assertEquals("Expected " + NUM_ROWS + " rows; got " + numRowsRead + " file=" + INPUT_FILE_NAME, NUM_ROWS, numRowsRead);
   }
+
   static void dumpFile(String fileName) throws Exception {
     File file = new File(fileName);
     BufferedReader reader = new BufferedReader(new FileReader(file));
@@ -375,6 +394,7 @@ public class TestHCatStorer extends HCat
     }
     reader.close();
   }
+
   @Test
   public void testPartColsInData() throws IOException, CommandNeedRetryException {
 

Modified: hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java (original)
+++ hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java Sat Sep 20 17:34:39 2014
@@ -25,20 +25,26 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.session.SessionState;
+
 import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.hive.hcatalog.data.Pair;
+
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
 
-public class TestHCatStorerMulti extends TestCase {
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestHCatStorerMulti {
   public static final String TEST_DATA_DIR = HCatUtil.makePathASafeFileName(
           System.getProperty("user.dir") + "/build/test/data/" +
                   TestHCatStorerMulti.class.getCanonicalName() + "-" + System.currentTimeMillis());
@@ -77,8 +83,8 @@ public class TestHCatStorerMulti extends
     createTable(tablename, schema, null);
   }
 
-  @Override
-  protected void setUp() throws Exception {
+  @Before
+  public void setUp() throws Exception {
     if (driver == null) {
       HiveConf hiveConf = new HiveConf(this.getClass());
       hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
@@ -92,14 +98,13 @@ public class TestHCatStorerMulti extends
     cleanup();
   }
 
-  @Override
-  protected void tearDown() throws Exception {
+  @After
+  public void tearDown() throws Exception {
     cleanup();
   }
 
+  @Test
   public void testStoreBasicTable() throws Exception {
-
-
     createTable(BASIC_TABLE, "a int, b string");
 
     populateBasicFile();
@@ -117,6 +122,7 @@ public class TestHCatStorerMulti extends
     assertEquals(basicInputData.size(), unpartitionedTableValuesReadFromHiveDriver.size());
   }
 
+  @Test
   public void testStorePartitionedTable() throws Exception {
     createTable(PARTITIONED_TABLE, "a int, b string", "bkt string");
 
@@ -139,9 +145,8 @@ public class TestHCatStorerMulti extends
     assertEquals(basicInputData.size(), partitionedTableValuesReadFromHiveDriver.size());
   }
 
+  @Test
   public void testStoreTableMulti() throws Exception {
-
-
     createTable(BASIC_TABLE, "a int, b string");
     createTable(PARTITIONED_TABLE, "a int, b string", "bkt string");
 

Modified: hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java (original)
+++ hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerWrapper.java Sat Sep 20 17:34:39 2014
@@ -25,10 +25,13 @@ import java.util.Iterator;
 import java.util.UUID;
 
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+
 import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.mapreduce.HCatBaseTest;
+
 import org.apache.pig.ExecType;
 import org.apache.pig.PigServer;
+
 import org.junit.Assert;
 import org.junit.Test;
 

Modified: hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorer.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorer.java (original)
+++ hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestOrcHCatStorer.java Sat Sep 20 17:34:39 2014
@@ -18,11 +18,15 @@
  */
 package org.apache.hive.hcatalog.pig;
 
-public class TestOrcHCatStorer extends TestHCatStorerMulti {
+import java.io.IOException;
 
-  @Override
-  protected String storageFormat() {
-    return "orc";
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+
+import org.junit.Ignore;
+import org.junit.Test;
+
+public class TestOrcHCatStorer extends TestHCatStorer {
+  @Override String getStorageFormat() {
+    return "ORC";
   }
 }
-

Modified: hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestPigHCatUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestPigHCatUtil.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestPigHCatUtil.java (original)
+++ hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestPigHCatUtil.java Sat Sep 20 17:34:39 2014
@@ -20,14 +20,18 @@
 package org.apache.hive.hcatalog.pig;
 
 import com.google.common.collect.Lists;
+
 import junit.framework.Assert;
+
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
+
 import org.apache.pig.ResourceSchema;
 import org.apache.pig.ResourceSchema.ResourceFieldSchema;
 import org.apache.pig.data.DataType;
 import org.apache.pig.impl.util.UDFContext;
+
 import org.junit.Test;
 
 public class TestPigHCatUtil {

Modified: hive/branches/spark/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java (original)
+++ hive/branches/spark/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java Sat Sep 20 17:34:39 2014
@@ -24,6 +24,7 @@ import java.net.URISyntaxException;
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.shims.HadoopShimsSecure;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.util.StringUtils;
@@ -314,9 +315,9 @@ public class TestTempletonUtils {
 
   @Test
   public void testFindContainingJar() throws Exception {
-    String result = TempletonUtils.findContainingJar(ShimLoader.class, ".*hive-shims.*");
+    String result = TempletonUtils.findContainingJar(Configuration.class, ".*hadoop.*\\.jar.*");
     Assert.assertNotNull(result);
-    result = TempletonUtils.findContainingJar(HadoopShimsSecure.class, ".*hive-shims.*");
+    result = TempletonUtils.findContainingJar(FileSystem.class, ".*hadoop.*\\.jar.*");
     Assert.assertNotNull(result);
     result = TempletonUtils.findContainingJar(HadoopShimsSecure.class, ".*unknownjar.*");
     Assert.assertNull(result);

Modified: hive/branches/spark/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestExtendedAcls.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestExtendedAcls.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestExtendedAcls.java (original)
+++ hive/branches/spark/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestExtendedAcls.java Sat Sep 20 17:34:39 2014
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.junit.Assert;
 import org.junit.BeforeClass;
 
+import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
 
 public class TestExtendedAcls extends FolderPermissionBase {
@@ -46,7 +47,7 @@ public class TestExtendedAcls extends Fo
     baseSetup();
   }
 
-  List<AclEntry> aclSpec1 = Lists.newArrayList(
+  private final ImmutableList<AclEntry> aclSpec1 = ImmutableList.of(
       aclEntry(ACCESS, USER, FsAction.ALL),
       aclEntry(ACCESS, GROUP, FsAction.ALL),
       aclEntry(ACCESS, OTHER, FsAction.ALL),
@@ -55,7 +56,7 @@ public class TestExtendedAcls extends Fo
       aclEntry(ACCESS, GROUP, "bar", FsAction.READ_WRITE),
       aclEntry(ACCESS, GROUP, "foo", FsAction.READ_EXECUTE));
 
-  List<AclEntry> aclSpec2 = Lists.newArrayList(
+  private final ImmutableList<AclEntry> aclSpec2 = ImmutableList.of(
       aclEntry(ACCESS, USER, FsAction.ALL),
       aclEntry(ACCESS, GROUP, FsAction.ALL),
       aclEntry(ACCESS, OTHER, FsAction.READ_EXECUTE),
@@ -83,20 +84,20 @@ public class TestExtendedAcls extends Fo
     switch (permIndex) {
       case 0:
         FsPermission perm = fs.getFileStatus(new Path(locn)).getPermission();
-        Assert.assertEquals(perm.toString(), "rwxrwxrwx");
+        Assert.assertEquals("Location: " + locn, "rwxrwxrwx", String.valueOf(perm));
 
         List<AclEntry> actual = getAcl(locn);
         verifyAcls(aclSpec1, actual);
         break;
       case 1:
         perm = fs.getFileStatus(new Path(locn)).getPermission();
-        Assert.assertEquals(perm.toString(), "rwxrwxr-x");
+        Assert.assertEquals("Location: " + locn, "rwxrwxr-x", String.valueOf(perm));
 
         List<AclEntry> acls = getAcl(locn);
         verifyAcls(aclSpec2, acls);
         break;
       default:
-        throw new RuntimeException("Only 2 permissions by this test");
+        throw new RuntimeException("Only 2 permissions by this test: " + permIndex);
     }
   }
 

Modified: hive/branches/spark/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java (original)
+++ hive/branches/spark/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java Sat Sep 20 17:34:39 2014
@@ -32,6 +32,7 @@ import org.apache.hadoop.fs.permission.F
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.shims.HadoopShims.MiniDFSShim;
 import org.apache.hadoop.hive.shims.HadoopShims.MiniMrShim;
 import org.apache.hadoop.hive.shims.ShimLoader;
@@ -50,6 +51,7 @@ public class MiniHS2 extends AbstractHiv
   public static final String HS2_HTTP_MODE = "http";
   private static final String driverName = "org.apache.hive.jdbc.HiveDriver";
   private static final FsPermission FULL_PERM = new FsPermission((short)00777);
+  private static final FsPermission WRITE_ALL_PERM = new FsPermission((short)00733);
   private HiveServer2 hiveServer2 = null;
   private final File baseDir;
   private final Path baseDfsDir;
@@ -200,9 +202,8 @@ public class MiniHS2 extends AbstractHiv
     hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT, getHttpPort());
 
     Path scratchDir = new Path(baseDfsDir, "scratch");
-
-    // Create scratchdir with 777, so that user impersonation has no issues.
-    FileSystem.mkdirs(fs, scratchDir, FULL_PERM);
+    // Create root scratchdir with write all, so that user impersonation has no issues.
+    Utilities.createDirsWithPermission(hiveConf, scratchDir, WRITE_ALL_PERM, true);
     System.setProperty(HiveConf.ConfVars.SCRATCHDIR.varname, scratchDir.toString());
     hiveConf.setVar(ConfVars.SCRATCHDIR, scratchDir.toString());
 

Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java Sat Sep 20 17:34:39 2014
@@ -52,7 +52,7 @@ public abstract class FolderPermissionBa
   protected static Path warehouseDir;
   protected static Path baseDfsDir;
 
-  public static final PathFilter hiddenFileFilter = new PathFilter(){
+  protected static final PathFilter hiddenFileFilter = new PathFilter(){
     public boolean accept(Path p){
       String name = p.getName();
       return !name.startsWith("_") && !name.startsWith(".");
@@ -591,7 +591,7 @@ public abstract class FolderPermissionBa
 
   private List<String> listStatus(String locn) throws Exception {
     List<String> results = new ArrayList<String>();
-    FileStatus[] listStatus = fs.listStatus(new Path(locn));
+    FileStatus[] listStatus = fs.listStatus(new Path(locn), hiddenFileFilter);
     for (FileStatus status : listStatus) {
       results.add(status.getPath().toString());
     }

Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java Sat Sep 20 17:34:39 2014
@@ -33,10 +33,13 @@ import java.util.List;
 
 import org.apache.commons.lang3.tuple.ImmutablePair;
 import org.apache.commons.lang3.tuple.Pair;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
 import org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator;
@@ -52,10 +55,12 @@ import org.mockito.Mockito;
  * Test HiveAuthorizer api invocation
  */
 public class TestHiveAuthorizerCheckInvocation {
+  private final Log LOG = LogFactory.getLog(this.getClass().getName());;
   protected static HiveConf conf;
   protected static Driver driver;
   private static final String tableName = TestHiveAuthorizerCheckInvocation.class.getSimpleName()
       + "Table";
+  private static final String acidTableName = tableName + "_acid";
   private static final String dbName = TestHiveAuthorizerCheckInvocation.class.getSimpleName()
       + "Db";
   static HiveAuthorizer mockedAuthorizer;
@@ -82,14 +87,18 @@ public class TestHiveAuthorizerCheckInvo
     conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName());
     conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName());
     conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true);
-    conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
     conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false);
+    conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, true);
+    conf.setVar(ConfVars.HIVE_TXN_MANAGER, DbTxnManager.class.getName());
 
     SessionState.start(conf);
     driver = new Driver(conf);
     runCmd("create table " + tableName
         + " (i int, j int, k string) partitioned by (city string, date string) ");
     runCmd("create database " + dbName);
+    // Need a separate table for ACID testing since it has to be bucketed and it has to be Acid
+    runCmd("create table " + acidTableName + " (i int, j int) clustered by (i) into 2 buckets " +
+        "stored as orc");
   }
 
   private static void runCmd(String cmd) throws CommandNeedRetryException {
@@ -99,6 +108,10 @@ public class TestHiveAuthorizerCheckInvo
 
   @AfterClass
   public static void afterTests() throws Exception {
+    // Drop the tables when we're done.  This makes the test work inside an IDE
+    runCmd("drop table if exists " + acidTableName);
+    runCmd("drop table if exists " + tableName);
+    runCmd("drop database if exists " + dbName);
     driver.close();
   }
 
@@ -244,6 +257,63 @@ public class TestHiveAuthorizerCheckInvo
     assertEquals("db name", null, funcObj.getDbname());
   }
 
+  @Test
+  public void testUpdateSomeColumnsUsed() throws HiveAuthzPluginException,
+      HiveAccessControlException, CommandNeedRetryException {
+    reset(mockedAuthorizer);
+    int status = driver.compile("update " + acidTableName + " set i = 5 where j = 3");
+    assertEquals(0, status);
+
+    Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
+    List<HivePrivilegeObject> outputs = io.getRight();
+    HivePrivilegeObject tableObj = outputs.get(0);
+    LOG.debug("Got privilege object " + tableObj);
+    assertEquals("no of columns used", 1, tableObj.getColumns().size());
+    assertEquals("Column used", "i", tableObj.getColumns().get(0));
+    List<HivePrivilegeObject> inputs = io.getLeft();
+    assertEquals(1, inputs.size());
+    tableObj = inputs.get(0);
+    assertEquals(1, tableObj.getColumns().size());
+    assertEquals("j", tableObj.getColumns().get(0));
+  }
+
+  @Test
+  public void testUpdateSomeColumnsUsedExprInSet() throws HiveAuthzPluginException,
+      HiveAccessControlException, CommandNeedRetryException {
+    reset(mockedAuthorizer);
+    int status = driver.compile("update " + acidTableName + " set i = 5, l = k where j = 3");
+    assertEquals(0, status);
+
+    Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
+    List<HivePrivilegeObject> outputs = io.getRight();
+    HivePrivilegeObject tableObj = outputs.get(0);
+    LOG.debug("Got privilege object " + tableObj);
+    assertEquals("no of columns used", 2, tableObj.getColumns().size());
+    assertEquals("Columns used", Arrays.asList("i", "l"),
+        getSortedList(tableObj.getColumns()));
+    List<HivePrivilegeObject> inputs = io.getLeft();
+    assertEquals(1, inputs.size());
+    tableObj = inputs.get(0);
+    assertEquals(2, tableObj.getColumns().size());
+    assertEquals("Columns used", Arrays.asList("j", "k"),
+        getSortedList(tableObj.getColumns()));
+  }
+
+  @Test
+  public void testDelete() throws HiveAuthzPluginException,
+      HiveAccessControlException, CommandNeedRetryException {
+    reset(mockedAuthorizer);
+    int status = driver.compile("delete from " + acidTableName + " where j = 3");
+    assertEquals(0, status);
+
+    Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectInputs();
+    List<HivePrivilegeObject> inputs = io.getLeft();
+    assertEquals(1, inputs.size());
+    HivePrivilegeObject tableObj = inputs.get(0);
+    assertEquals(1, tableObj.getColumns().size());
+    assertEquals("j", tableObj.getColumns().get(0));
+  }
+
   private void checkSingleTableInput(List<HivePrivilegeObject> inputs) {
     assertEquals("number of inputs", 1, inputs.size());
 

Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java Sat Sep 20 17:34:39 2014
@@ -210,7 +210,7 @@ public class TestBeeLineWithArgs {
     }
     scriptFile.delete();
   }
-  
+
   /**
    * Test that BeeLine will read comment lines that start with whitespace
    * @throws Throwable

Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java Sat Sep 20 17:34:39 2014
@@ -262,10 +262,9 @@ public class TestJdbcDriver2 {
   private void checkBadUrl(String url) throws SQLException {
     try{
       DriverManager.getConnection(url, "", "");
-      fail("should have thrown IllegalArgumentException but did not ");
-    } catch(SQLException i) {
-      assertTrue(i.getMessage().contains("Bad URL format. Hostname not found "
-          + " in authority part of the url"));
+      fail("Should have thrown JdbcUriParseException but did not ");
+    } catch(JdbcUriParseException e) {
+      assertTrue(e.getMessage().contains("Bad URL format"));
     }
   }
 
@@ -736,7 +735,7 @@ public class TestJdbcDriver2 {
     assertTrue(res.next());
     // skip the last (partitioning) column since it is always non-null
     for (int i = 1; i < meta.getColumnCount(); i++) {
-      assertNull(res.getObject(i));
+      assertNull("Column " + i + " should be null", res.getObject(i));
     }
     // getXXX returns 0 for numeric types, false for boolean and null for other
     assertEquals(0, res.getInt(1));
@@ -1618,6 +1617,10 @@ public class TestJdbcDriver2 {
   // [url] [host] [port] [db]
   private static final String[][] URL_PROPERTIES = new String[][] {
     // binary mode
+    // For embedded mode, the JDBC uri is of the form:
+    // jdbc:hive2:///dbName;sess_var_list?hive_conf_list#hive_var_list
+    // and does not contain host:port string.
+    // As a result port is parsed to '-1' per the Java URI conventions
     {"jdbc:hive2://", "", "", "default"},
     {"jdbc:hive2://localhost:10001/default", "localhost", "10001", "default"},
     {"jdbc:hive2://localhost/notdefault", "localhost", "10000", "notdefault"},
@@ -1654,7 +1657,8 @@ public class TestJdbcDriver2 {
   };
 
   @Test
-  public void testParseUrlHttpMode() throws SQLException {
+  public void testParseUrlHttpMode() throws SQLException, JdbcUriParseException,
+      ZooKeeperHiveClientException {
     new HiveDriver();
     for (String[] testValues : HTTP_URL_PROPERTIES) {
       JdbcConnectionParams params = Utils.parseURL(testValues[0]);

Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java Sat Sep 20 17:34:39 2014
@@ -388,7 +388,7 @@ public class TestJdbcWithMiniHS2 {
   }
 
   /**
-   * Tests the creation of the root hdfs scratch dir, which should be writable by all (777).
+   * Tests the creation of the root hdfs scratch dir, which should be writable by all.
    *
    * @throws Exception
    */
@@ -410,7 +410,7 @@ public class TestJdbcWithMiniHS2 {
     hs2Conn = getConnection(miniHS2.getJdbcURL(), userName, "password");
     // FS
     FileSystem fs = miniHS2.getLocalFS();
-    FsPermission expectedFSPermission = new FsPermission("777");
+    FsPermission expectedFSPermission = new FsPermission((short)00733);
     // Verify scratch dir paths and permission
     // HDFS scratch dir
     scratchDirPath = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIR));

Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java Sat Sep 20 17:34:39 2014
@@ -103,6 +103,7 @@ public class QTestUtil {
 
   public static final String UTF_8 = "UTF-8";
   private static final Log LOG = LogFactory.getLog("QTestUtil");
+  private static final String QTEST_LEAVE_FILES = "QTEST_LEAVE_FILES";
   private final String defaultInitScript = "q_test_init.sql";
   private final String defaultCleanupScript = "q_test_cleanup.sql";
 
@@ -537,6 +538,9 @@ public class QTestUtil {
    * Clear out any side effects of running tests
    */
   public void clearTestSideEffects() throws Exception {
+    if (System.getenv(QTEST_LEAVE_FILES) != null) {
+      return;
+    }
     // Delete any tables other than the source tables
     // and any databases other than the default database.
     for (String dbName : db.getAllDatabases()) {
@@ -598,6 +602,9 @@ public class QTestUtil {
     if(!isSessionStateStarted) {
       startSessionState();
     }
+    if (System.getenv(QTEST_LEAVE_FILES) != null) {
+      return;
+    }
 
     SessionState.get().getConf().setBoolean("hive.test.shutdown.phase", true);
 

Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java?rev=1626482&r1=1626481&r2=1626482&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidatorForTest.java Sat Sep 20 17:34:39 2014
@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.securit
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
@@ -38,8 +39,9 @@ public class SQLStdHiveAuthorizationVali
 
   public SQLStdHiveAuthorizationValidatorForTest(HiveMetastoreClientFactory metastoreClientFactory,
       HiveConf conf, HiveAuthenticationProvider authenticator,
-      SQLStdHiveAccessControllerWrapper privController) {
-    super(metastoreClientFactory, conf, authenticator, privController);
+      SQLStdHiveAccessControllerWrapper privController, HiveAuthzSessionContext ctx)
+      throws HiveAuthzPluginException {
+    super(metastoreClientFactory, conf, authenticator, privController, ctx);
   }
 
   @Override



Mime
View raw message