hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From br...@apache.org
Subject svn commit: r1637444 [1/20] - in /hive/branches/spark: ./ cli/src/test/org/apache/hadoop/hive/cli/ common/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/org/apache/hadoop/hive/conf/ common/src/test/org/apache/hadoop/hive/conf/ com...
Date Fri, 07 Nov 2014 20:41:45 GMT
Author: brock
Date: Fri Nov  7 20:41:34 2014
New Revision: 1637444

URL: http://svn.apache.org/r1637444
Log:
HIVE-8775 - Merge from trunk 11/6/14 [SPARK BRANCH] (inital commit)

Added:
    hive/branches/spark/data/conf/fair-scheduler-test.xml
      - copied unchanged from r1636884, hive/trunk/data/conf/fair-scheduler-test.xml
    hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/TaskCommitContextRegistry.java
      - copied unchanged from r1636884, hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/TaskCommitContextRegistry.java
    hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatPartitionSpec.java
      - copied unchanged from r1636884, hive/trunk/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatPartitionSpec.java
    hive/branches/spark/metastore/scripts/upgrade/mssql/004-HIVE-8550.mssql.sql
      - copied unchanged from r1636884, hive/trunk/metastore/scripts/upgrade/mssql/004-HIVE-8550.mssql.sql
    hive/branches/spark/ql/src/test/queries/clientpositive/auto_join33.q
      - copied unchanged from r1637277, hive/trunk/ql/src/test/queries/clientpositive/auto_join33.q
    hive/branches/spark/ql/src/test/queries/clientpositive/cbo_gby.q
      - copied unchanged from r1637277, hive/trunk/ql/src/test/queries/clientpositive/cbo_gby.q
    hive/branches/spark/ql/src/test/queries/clientpositive/cbo_gby_empty.q
      - copied unchanged from r1637277, hive/trunk/ql/src/test/queries/clientpositive/cbo_gby_empty.q
    hive/branches/spark/ql/src/test/queries/clientpositive/cbo_join.q
      - copied unchanged from r1637277, hive/trunk/ql/src/test/queries/clientpositive/cbo_join.q
    hive/branches/spark/ql/src/test/queries/clientpositive/cbo_limit.q
      - copied unchanged from r1637277, hive/trunk/ql/src/test/queries/clientpositive/cbo_limit.q
    hive/branches/spark/ql/src/test/queries/clientpositive/cbo_semijoin.q
      - copied unchanged from r1637277, hive/trunk/ql/src/test/queries/clientpositive/cbo_semijoin.q
    hive/branches/spark/ql/src/test/queries/clientpositive/cbo_simple_select.q
      - copied unchanged from r1637277, hive/trunk/ql/src/test/queries/clientpositive/cbo_simple_select.q
    hive/branches/spark/ql/src/test/queries/clientpositive/cbo_stats.q
      - copied unchanged from r1637277, hive/trunk/ql/src/test/queries/clientpositive/cbo_stats.q
    hive/branches/spark/ql/src/test/queries/clientpositive/cbo_subq_exists.q
      - copied unchanged from r1637277, hive/trunk/ql/src/test/queries/clientpositive/cbo_subq_exists.q
    hive/branches/spark/ql/src/test/queries/clientpositive/cbo_subq_in.q
      - copied unchanged from r1637277, hive/trunk/ql/src/test/queries/clientpositive/cbo_subq_in.q
    hive/branches/spark/ql/src/test/queries/clientpositive/cbo_subq_not_in.q
      - copied unchanged from r1637277, hive/trunk/ql/src/test/queries/clientpositive/cbo_subq_not_in.q
    hive/branches/spark/ql/src/test/queries/clientpositive/cbo_udf_udaf.q
      - copied unchanged from r1637277, hive/trunk/ql/src/test/queries/clientpositive/cbo_udf_udaf.q
    hive/branches/spark/ql/src/test/queries/clientpositive/cbo_union.q
      - copied unchanged from r1637277, hive/trunk/ql/src/test/queries/clientpositive/cbo_union.q
    hive/branches/spark/ql/src/test/queries/clientpositive/cbo_views.q
      - copied unchanged from r1637277, hive/trunk/ql/src/test/queries/clientpositive/cbo_views.q
    hive/branches/spark/ql/src/test/queries/clientpositive/cbo_windowing.q
      - copied unchanged from r1637277, hive/trunk/ql/src/test/queries/clientpositive/cbo_windowing.q
    hive/branches/spark/ql/src/test/queries/clientpositive/dynpart_sort_optimization_acid.q
      - copied unchanged from r1637277, hive/trunk/ql/src/test/queries/clientpositive/dynpart_sort_optimization_acid.q
    hive/branches/spark/ql/src/test/queries/clientpositive/skewjoin_mapjoin1.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/skewjoin_mapjoin1.q
    hive/branches/spark/ql/src/test/queries/clientpositive/skewjoin_mapjoin10.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/skewjoin_mapjoin10.q
    hive/branches/spark/ql/src/test/queries/clientpositive/skewjoin_mapjoin11.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/skewjoin_mapjoin11.q
    hive/branches/spark/ql/src/test/queries/clientpositive/skewjoin_mapjoin2.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/skewjoin_mapjoin2.q
    hive/branches/spark/ql/src/test/queries/clientpositive/skewjoin_mapjoin3.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/skewjoin_mapjoin3.q
    hive/branches/spark/ql/src/test/queries/clientpositive/skewjoin_mapjoin4.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/skewjoin_mapjoin4.q
    hive/branches/spark/ql/src/test/queries/clientpositive/skewjoin_mapjoin5.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/skewjoin_mapjoin5.q
    hive/branches/spark/ql/src/test/queries/clientpositive/skewjoin_mapjoin6.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/skewjoin_mapjoin6.q
    hive/branches/spark/ql/src/test/queries/clientpositive/skewjoin_mapjoin7.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/skewjoin_mapjoin7.q
    hive/branches/spark/ql/src/test/queries/clientpositive/skewjoin_mapjoin8.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/skewjoin_mapjoin8.q
    hive/branches/spark/ql/src/test/queries/clientpositive/skewjoin_mapjoin9.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/skewjoin_mapjoin9.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_aggregate_9.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/vector_aggregate_9.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_decimal_1.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_1.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_decimal_10_0.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_10_0.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_decimal_2.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_2.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_decimal_3.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_3.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_decimal_4.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_4.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_decimal_5.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_5.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_decimal_6.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_6.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_decimal_precision.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_precision.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_decimal_round.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_round.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_decimal_round_2.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_round_2.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_decimal_trailing.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_trailing.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_decimal_udf.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_udf.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_decimal_udf2.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/vector_decimal_udf2.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_multi_insert.q
      - copied unchanged from r1636884, hive/trunk/ql/src/test/queries/clientpositive/vector_multi_insert.q
    hive/branches/spark/ql/src/test/results/clientpositive/auto_join33.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/auto_join33.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cbo_gby.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/cbo_gby.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cbo_gby_empty.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/cbo_gby_empty.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cbo_join.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/cbo_join.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cbo_limit.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/cbo_limit.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cbo_semijoin.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/cbo_semijoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cbo_simple_select.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/cbo_simple_select.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cbo_stats.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/cbo_stats.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cbo_subq_exists.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/cbo_subq_exists.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cbo_subq_in.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/cbo_subq_in.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cbo_subq_not_in.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/cbo_subq_not_in.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cbo_udf_udaf.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/cbo_udf_udaf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cbo_union.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/cbo_union.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cbo_views.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/cbo_views.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/cbo_windowing.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/cbo_windowing.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/dynpart_sort_optimization_acid.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/dynpart_sort_optimization_acid.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_mapjoin1.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/skewjoin_mapjoin1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_mapjoin10.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/skewjoin_mapjoin10.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_mapjoin11.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/skewjoin_mapjoin11.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_mapjoin2.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/skewjoin_mapjoin2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_mapjoin3.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/skewjoin_mapjoin3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_mapjoin4.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/skewjoin_mapjoin4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_mapjoin5.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/skewjoin_mapjoin5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_mapjoin6.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/skewjoin_mapjoin6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_mapjoin7.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/skewjoin_mapjoin7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_mapjoin8.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/skewjoin_mapjoin8.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/skewjoin_mapjoin9.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/skewjoin_mapjoin9.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/acid_vectorization_partition.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/acid_vectorization_partition.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/acid_vectorization_project.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/acid_vectorization_project.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cbo_gby.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/tez/cbo_gby.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cbo_gby_empty.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/tez/cbo_gby_empty.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cbo_join.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/tez/cbo_join.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cbo_limit.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/tez/cbo_limit.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cbo_semijoin.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/tez/cbo_semijoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cbo_simple_select.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/tez/cbo_simple_select.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cbo_stats.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/tez/cbo_stats.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cbo_subq_exists.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/tez/cbo_subq_exists.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cbo_subq_in.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/tez/cbo_subq_in.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cbo_subq_not_in.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/tez/cbo_subq_not_in.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cbo_udf_udaf.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/tez/cbo_udf_udaf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cbo_union.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/tez/cbo_union.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cbo_views.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/tez/cbo_views.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cbo_windowing.q.out
      - copied unchanged from r1637277, hive/trunk/ql/src/test/results/clientpositive/tez/cbo_windowing.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_aggregate_9.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_aggregate_9.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_coalesce.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_coalesce.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_1.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_10_0.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_10_0.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_2.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_3.q.out
      - copied, changed from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_4.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_5.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_6.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_cast.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_cast.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_expressions.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_expressions.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_mapjoin.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_mapjoin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_math_funcs.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_math_funcs.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_precision.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_precision.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_round.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_round.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_round_2.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_round_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_trailing.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_trailing.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_udf.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_udf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_udf2.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vector_decimal_udf2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorization_limit.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vectorization_limit.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_casts.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vectorized_casts.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_date_funcs.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vectorized_date_funcs.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_distinct_gby.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/tez/vectorized_distinct_gby.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_aggregate_9.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/vector_aggregate_9.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_1.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/vector_decimal_1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_10_0.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/vector_decimal_10_0.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_2.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/vector_decimal_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_3.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/vector_decimal_3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_4.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/vector_decimal_4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_5.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/vector_decimal_5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_6.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/vector_decimal_6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_precision.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/vector_decimal_precision.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_round.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/vector_decimal_round.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_round_2.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/vector_decimal_round_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_trailing.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/vector_decimal_trailing.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_udf.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/vector_decimal_udf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_udf2.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/vector_decimal_udf2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_multi_insert.q.out
      - copied unchanged from r1636884, hive/trunk/ql/src/test/results/clientpositive/vector_multi_insert.q.out
    hive/branches/spark/shims/common/src/main/java/org/apache/hadoop/hive/shims/SchedulerShim.java
      - copied unchanged from r1636884, hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/SchedulerShim.java
    hive/branches/spark/shims/scheduler/
      - copied from r1636884, hive/trunk/shims/scheduler/
Removed:
    hive/branches/spark/ql/src/test/queries/clientpositive/cbo_correctness.q
    hive/branches/spark/ql/src/test/results/clientpositive/cbo_correctness.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/cbo_correctness.q.out
Modified:
    hive/branches/spark/   (props changed)
    hive/branches/spark/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
    hive/branches/spark/common/pom.xml
    hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java
    hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/branches/spark/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
    hive/branches/spark/common/src/test/resources/hive-exec-log4j-test.properties
    hive/branches/spark/common/src/test/resources/hive-log4j-test.properties
    hive/branches/spark/data/scripts/q_test_cleanup.sql
    hive/branches/spark/data/scripts/q_test_init.sql
    hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
    hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
    hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
    hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java
    hive/branches/spark/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
    hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
    hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
    hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
    hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderStorer.java
    hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorer.java
    hive/branches/spark/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java
    hive/branches/spark/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java
    hive/branches/spark/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/json/JSONMessageFactory.java
    hive/branches/spark/hcatalog/webhcat/java-client/pom.xml
    hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java
    hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
    hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/MetadataJSONSerializer.java
    hive/branches/spark/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/MetadataSerializer.java
    hive/branches/spark/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
    hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java
    hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
    hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SqoopDelegator.java
    hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
    hive/branches/spark/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
    hive/branches/spark/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestPasswordWithCredentialProvider.java
    hive/branches/spark/itests/hive-unit-hadoop2/src/test/java/org/apache/hive/jdbc/TestSchedulerQueue.java
    hive/branches/spark/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/AbstractHiveService.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthorizationApiAuthorizer.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
    hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java
    hive/branches/spark/itests/src/test/resources/testconfiguration.properties
    hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
    hive/branches/spark/jdbc/pom.xml
    hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
    hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
    hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/Utils.java
    hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java
    hive/branches/spark/metastore/scripts/upgrade/mssql/002-HIVE-7784.mssql.sql
    hive/branches/spark/metastore/scripts/upgrade/mssql/hive-schema-0.14.0.mssql.sql
    hive/branches/spark/metastore/scripts/upgrade/mssql/hive-schema-0.15.0.mssql.sql
    hive/branches/spark/metastore/scripts/upgrade/mssql/hive-txn-schema-0.14.0.mssql.sql
    hive/branches/spark/metastore/scripts/upgrade/mssql/upgrade-0.13.0-to-0.14.0.mssql.sql
    hive/branches/spark/metastore/scripts/upgrade/mysql/019-HIVE-7784.mysql.sql
    hive/branches/spark/metastore/scripts/upgrade/oracle/020-HIVE-7784.oracle.sql
    hive/branches/spark/metastore/scripts/upgrade/postgres/019-HIVE-7784.postgres.sql
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
    hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
    hive/branches/spark/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
    hive/branches/spark/pom.xml
    hive/branches/spark/ql/pom.xml
    hive/branches/spark/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumnDecimal.txt
    hive/branches/spark/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalarDecimal.txt
    hive/branches/spark/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumnDecimal.txt
    hive/branches/spark/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideScalarDecimal.txt
    hive/branches/spark/ql/src/gen/vectorization/ExpressionTemplates/DecimalColumnUnaryFunc.txt
    hive/branches/spark/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnBetween.txt
    hive/branches/spark/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareColumn.txt
    hive/branches/spark/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalColumnCompareScalar.txt
    hive/branches/spark/ql/src/gen/vectorization/ExpressionTemplates/FilterDecimalScalarCompareColumn.txt
    hive/branches/spark/ql/src/gen/vectorization/ExpressionTemplates/ScalarArithmeticColumnDecimal.txt
    hive/branches/spark/ql/src/gen/vectorization/ExpressionTemplates/ScalarDivideColumnDecimal.txt
    hive/branches/spark/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt
    hive/branches/spark/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVarDecimal.txt
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonMergeJoinOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainSQRewriteTask.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezJobMonitor.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/DecimalColumnVector.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFilterOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupKeyHelper.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToBoolean.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDouble.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToLong.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalUtil.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterDecimalColumnInList.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDecimalToLong.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDoubleToDecimal.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToDecimal.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncRoundWithNumDigitsDecimalToDecimal.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IDecimalInExpr.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFAvgDecimal.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFSumDecimal.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroContainerOutputFormat.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SetReducerParallelism.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SkewJoinOptimizer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionOptimizer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/HiveOptiqUtil.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/PartitionPruner.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ExprNodeConverter.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/JoinCondTypeCheckProcFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/SqlFunctionConverter.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/TypeConverter.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSQRewriteSemanticAnalyzer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinCond.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnDesc.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/ProxyUserAuthenticator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStreamingEvaluator.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestConstantVectorExpression.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestDecimalUtil.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmeticExpressions.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorRowBatchFromObjectIterables.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java
    hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessControllerHS2.java
    hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q
    hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_AmbiguousWindowDefn.q
    hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_DistributeByOrderBy.q
    hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_DuplicateWindowAlias.q
    hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithNoGBYNoWindowing.q
    hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithPTF.q
    hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_InvalidValueBoundary.q
    hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_JoinWithAmbigousAlias.q
    hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_PartitionBySortBy.q
    hive/branches/spark/ql/src/test/queries/clientnegative/ptf_negative_WhereWithRankCond.q
    hive/branches/spark/ql/src/test/queries/clientnegative/ptf_window_boundaries.q
    hive/branches/spark/ql/src/test/queries/clientnegative/ptf_window_boundaries2.q
    hive/branches/spark/ql/src/test/queries/clientnegative/subquery_nested_subquery.q
    hive/branches/spark/ql/src/test/queries/clientnegative/subquery_windowing_corr.q
    hive/branches/spark/ql/src/test/queries/clientnegative/windowing_leadlag_in_udaf.q
    hive/branches/spark/ql/src/test/queries/clientnegative/windowing_ll_no_neg.q
    hive/branches/spark/ql/src/test/queries/clientnegative/windowing_ll_no_over.q
    hive/branches/spark/ql/src/test/queries/clientpositive/create_like.q
    hive/branches/spark/ql/src/test/queries/clientpositive/join_alt_syntax.q
    hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_1.q
    hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_2.q
    hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_3.q
    hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_4.q
    hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_unqual1.q
    hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_unqual2.q
    hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_unqual3.q
    hive/branches/spark/ql/src/test/queries/clientpositive/join_cond_pushdown_unqual4.q
    hive/branches/spark/ql/src/test/queries/clientpositive/join_merging.q
    hive/branches/spark/ql/src/test/queries/clientpositive/leadlag.q
    hive/branches/spark/ql/src/test/queries/clientpositive/leadlag_queries.q
    hive/branches/spark/ql/src/test/queries/clientpositive/orc_merge1.q
    hive/branches/spark/ql/src/test/queries/clientpositive/orc_merge2.q
    hive/branches/spark/ql/src/test/queries/clientpositive/orc_merge3.q
    hive/branches/spark/ql/src/test/queries/clientpositive/orc_merge4.q
    hive/branches/spark/ql/src/test/queries/clientpositive/orc_merge5.q
    hive/branches/spark/ql/src/test/queries/clientpositive/orc_merge6.q
    hive/branches/spark/ql/src/test/queries/clientpositive/orc_merge7.q
    hive/branches/spark/ql/src/test/queries/clientpositive/orc_merge_incompat1.q
    hive/branches/spark/ql/src/test/queries/clientpositive/orc_merge_incompat2.q
    hive/branches/spark/ql/src/test/queries/clientpositive/order_within_subquery.q
    hive/branches/spark/ql/src/test/queries/clientpositive/ptf.q
    hive/branches/spark/ql/src/test/queries/clientpositive/ptf_decimal.q
    hive/branches/spark/ql/src/test/queries/clientpositive/ptf_general_queries.q
    hive/branches/spark/ql/src/test/queries/clientpositive/ptf_streaming.q
    hive/branches/spark/ql/src/test/queries/clientpositive/ql_rewrite_gbtoidx.q
    hive/branches/spark/ql/src/test/queries/clientpositive/reducesink_dedup.q
    hive/branches/spark/ql/src/test/queries/clientpositive/stats_noscan_2.q
    hive/branches/spark/ql/src/test/queries/clientpositive/subquery_in.q
    hive/branches/spark/ql/src/test/queries/clientpositive/subquery_in_explain_rewrite.q
    hive/branches/spark/ql/src/test/queries/clientpositive/subquery_in_having.q
    hive/branches/spark/ql/src/test/queries/clientpositive/subquery_notin.q
    hive/branches/spark/ql/src/test/queries/clientpositive/subquery_notin_having.q
    hive/branches/spark/ql/src/test/queries/clientpositive/subquery_unqualcolumnrefs.q
    hive/branches/spark/ql/src/test/queries/clientpositive/temp_table_windowing_expressions.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vector_mapjoin_reduce.q
    hive/branches/spark/ql/src/test/queries/clientpositive/vectorized_ptf.q
    hive/branches/spark/ql/src/test/queries/clientpositive/windowing.q
    hive/branches/spark/ql/src/test/queries/clientpositive/windowing_adjust_rowcontainer_sz.q
    hive/branches/spark/ql/src/test/queries/clientpositive/windowing_columnPruning.q
    hive/branches/spark/ql/src/test/queries/clientpositive/windowing_decimal.q
    hive/branches/spark/ql/src/test/queries/clientpositive/windowing_expressions.q
    hive/branches/spark/ql/src/test/queries/clientpositive/windowing_streaming.q
    hive/branches/spark/ql/src/test/results/clientnegative/limit_partition_stats.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/ptf_negative_AmbiguousWindowDefn.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/ptf_negative_DistributeByOrderBy.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/ptf_negative_DuplicateWindowAlias.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/ptf_negative_HavingLeadWithNoGBYNoWindowing.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/ptf_negative_HavingLeadWithPTF.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/ptf_negative_InvalidValueBoundary.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/ptf_negative_JoinWithAmbigousAlias.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/ptf_negative_PartitionBySortBy.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/ptf_negative_WhereWithRankCond.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/ptf_window_boundaries.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/ptf_window_boundaries2.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/subquery_nested_subquery.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/subquery_windowing_corr.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/udf_assert_true.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/udf_assert_true2.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/windowing_leadlag_in_udaf.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/windowing_ll_no_neg.q.out
    hive/branches/spark/ql/src/test/results/clientnegative/windowing_ll_no_over.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/add_part_exist.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/alter1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/alter2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/alter3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/alter4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/alter5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/alter_index.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/alter_rename_partition.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_groupby.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/describe_table_json.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/index_creation.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/input2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/input3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/input_part10_win.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_alt_syntax.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_unqual1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_unqual2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_unqual3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_unqual4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/join_merging.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/lateral_view.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/lateral_view_noalias.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/lateral_view_ppd.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/leadlag.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/leadlag_queries.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/load_dyn_part14_win.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_merge1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_merge2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_merge3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_merge4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_merge5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_merge6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_merge7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/order_within_subquery.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ptf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ptf_decimal.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ptf_general_queries.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ptf_streaming.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/reducesink_dedup.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/rename_column.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/scriptfile1_win.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/show_tables.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_in.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_in_explain_rewrite.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_in_having.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_notin.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_notin_having.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/temp_table.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/temp_table_windowing_expressions.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_merge1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_merge2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_merge3.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_merge4.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_merge5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_merge6.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_merge7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_merge_incompat1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/orc_merge_incompat2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/ptf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/subquery_in.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/temp_table.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/union5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/union7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_between_in.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_mapjoin_reduce.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udf_explode.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udtf_explode.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/udtf_stack.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union11.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union14.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union15.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union17.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union19.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union21.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union26.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union5.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/union7.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_between_in.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_cast.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vector_mapjoin_reduce.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/vectorized_ptf.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/windowing.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/windowing_adjust_rowcontainer_sz.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/windowing_columnPruning.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/windowing_decimal.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/windowing_expressions.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/windowing_streaming.q.out
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java
    hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java
    hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java
    hive/branches/spark/service/pom.xml
    hive/branches/spark/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
    hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
    hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/OperationLog.java
    hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
    hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
    hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
    hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
    hive/branches/spark/service/src/java/org/apache/hive/service/server/HiveServer2.java
    hive/branches/spark/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
    hive/branches/spark/shims/aggregator/pom.xml
    hive/branches/spark/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
    hive/branches/spark/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java
    hive/branches/spark/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java
    hive/branches/spark/shims/pom.xml
    hive/branches/spark/shims/scheduler/pom.xml

Propchange: hive/branches/spark/
------------------------------------------------------------------------------
  Merged /hive/trunk:r1635378-1636884,1636886-1636887,1636889-1637277

Modified: hive/branches/spark/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java (original)
+++ hive/branches/spark/cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java Fri Nov  7 20:41:34 2014
@@ -58,6 +58,7 @@ import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.service.HiveClient;
 import org.apache.hadoop.hive.service.HiveServerException;
+import org.apache.hadoop.util.Shell;
 import org.apache.thrift.TException;
 
 
@@ -375,8 +376,14 @@ public class TestCliDriverMethods extend
     }
   }
 
-
   private static void setEnv(String key, String value) throws Exception {
+    if (Shell.WINDOWS)
+      setEnvWindows(key, value);
+    else
+      setEnvLinux(key, value);
+  }
+
+  private static void setEnvLinux(String key, String value) throws Exception {
     Class[] classes = Collections.class.getDeclaredClasses();
     Map<String, String> env = (Map<String, String>) System.getenv();
     for (Class cl : classes) {
@@ -394,6 +401,26 @@ public class TestCliDriverMethods extend
     }
   }
 
+  private static void setEnvWindows(String key, String value) throws Exception {
+    Class<?> processEnvironmentClass = Class.forName("java.lang.ProcessEnvironment");
+    Field theEnvironmentField = processEnvironmentClass.getDeclaredField("theEnvironment");
+    theEnvironmentField.setAccessible(true);
+    Map<String, String> env = (Map<String, String>) theEnvironmentField.get(null);
+    if (value == null) {
+      env.remove(key);
+    } else {
+      env.put(key, value);
+    }
+
+    Field theCaseInsensitiveEnvironmentField = processEnvironmentClass.getDeclaredField("theCaseInsensitiveEnvironment");
+    theCaseInsensitiveEnvironmentField.setAccessible(true);
+    Map<String, String> cienv = (Map<String, String>) theCaseInsensitiveEnvironmentField.get(null);
+    if (value == null) {
+      cienv.remove(key);
+    } else {
+      cienv.put(key, value);
+    }
+  }
 
   private static class FakeCliDriver extends CliDriver {
 

Modified: hive/branches/spark/common/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/common/pom.xml?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/common/pom.xml (original)
+++ hive/branches/spark/common/pom.xml Fri Nov  7 20:41:34 2014
@@ -118,14 +118,6 @@
     <profile>
       <id>dist</id>
       <build>
-        <resources>
-          <resource>
-            <directory>../conf/</directory>
-            <includes>
-              <include>hive-default.xml.template</include>
-            </includes>
-          </resource>
-        </resources>
         <plugins>
           <plugin>
             <groupId>org.apache.maven.plugins</groupId>

Modified: hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java (original)
+++ hive/branches/spark/common/src/java/org/apache/hadoop/hive/common/type/HiveDecimal.java Fri Nov  7 20:41:34 2014
@@ -266,4 +266,17 @@ public class HiveDecimal implements Comp
 
     return bd;
   }
+
+  public static HiveDecimal enforcePrecisionScale(HiveDecimal dec, int maxPrecision, int maxScale) {
+    if (dec == null) {
+      return null;
+    }
+
+    BigDecimal bd = enforcePrecisionScale(dec.bd, maxPrecision, maxScale);
+    if (bd == null) {
+      return null;
+    }
+
+    return HiveDecimal.create(bd);
+  }
 }

Modified: hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/spark/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Fri Nov  7 20:41:34 2014
@@ -393,9 +393,11 @@ public class HiveConf extends Configurat
         "the connection URL, before the next metastore query that accesses the\n" +
         "datastore. Once reloaded, this value is reset to false. Used for\n" +
         "testing only."),
+    METASTORESERVERMAXMESSAGESIZE("hive.metastore.server.max.message.size", 100*1024*1024,
+        "Maximum message size in bytes a HMS will accept."),
     METASTORESERVERMINTHREADS("hive.metastore.server.min.threads", 200,
         "Minimum number of worker threads in the Thrift server's pool."),
-    METASTORESERVERMAXTHREADS("hive.metastore.server.max.threads", 100000,
+    METASTORESERVERMAXTHREADS("hive.metastore.server.max.threads", 1000,
         "Maximum number of worker threads in the Thrift server's pool."),
     METASTORE_TCP_KEEP_ALIVE("hive.metastore.server.tcp.keepalive", true,
         "Whether to enable TCP keepalive for the metastore server. Keepalive will prevent accumulation of half-open connections."),
@@ -1597,6 +1599,9 @@ public class HiveConf extends Configurat
         "table. From 0.12 onwards, they are displayed separately. This flag will let you\n" +
         "get old behavior, if desired. See, test-case in patch for HIVE-6689."),
 
+    HIVE_SSL_PROTOCOL_BLACKLIST("hive.ssl.protocol.blacklist", "SSLv2,SSLv2Hello,SSLv3",
+        "SSL Versions to disable for all Hive Servers"),
+
      // HiveServer2 specific configs
     HIVE_SERVER2_MAX_START_ATTEMPTS("hive.server2.max.start.attempts", 30L, new RangeValidator(0L, null),
         "Number of times HiveServer2 will attempt to start before exiting, sleeping 60 seconds " +
@@ -1622,6 +1627,8 @@ public class HiveConf extends Configurat
         "Port number of HiveServer2 Thrift interface when hive.server2.transport.mode is 'http'."),
     HIVE_SERVER2_THRIFT_HTTP_PATH("hive.server2.thrift.http.path", "cliservice",
         "Path component of URL endpoint when in HTTP mode."),
+    HIVE_SERVER2_THRIFT_MAX_MESSAGE_SIZE("hive.server2.thrift.max.message.size", 100*1024*1024,
+        "Maximum message size in bytes a HS2 server will accept."),
     HIVE_SERVER2_THRIFT_HTTP_MIN_WORKER_THREADS("hive.server2.thrift.http.min.worker.threads", 5,
         "Minimum number of worker threads when in HTTP mode."),
     HIVE_SERVER2_THRIFT_HTTP_MAX_WORKER_THREADS("hive.server2.thrift.http.max.worker.threads", 500,
@@ -1922,7 +1929,15 @@ public class HiveConf extends Configurat
     TEZ_SMB_NUMBER_WAVES(
         "hive.tez.smb.number.waves",
         (float) 0.5,
-        "The number of waves in which to run the SMB join. Account for cluster being occupied. Ideally should be 1 wave.")
+        "The number of waves in which to run the SMB join. Account for cluster being occupied. Ideally should be 1 wave."),
+    TEZ_EXEC_SUMMARY(
+        "hive.tez.exec.print.summary",
+        false,
+        "Display breakdown of execution steps, for every query executed by the shell."),
+    TEZ_EXEC_INPLACE_PROGRESS(
+        "hive.tez.exec.inplace.progress",
+        true,
+        "Updates tez job execution progress in-place in the terminal.")
     ;
 
     public final String varname;
@@ -2588,6 +2603,7 @@ public class HiveConf extends Configurat
     "hive\\.auto\\..*",
     "hive\\.cbo\\..*",
     "hive\\.convert\\..*",
+    "hive\\.exec\\.dynamic\\.partition.*",
     "hive\\.exec\\..*\\.dynamic\\.partitions\\..*",
     "hive\\.exec\\.compress\\..*",
     "hive\\.exec\\.infer\\..*",

Modified: hive/branches/spark/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java (original)
+++ hive/branches/spark/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java Fri Nov  7 20:41:34 2014
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hive.conf;
 
 import java.io.BufferedReader;
+import java.io.File;
 import java.io.InputStreamReader;
 
 import junit.framework.TestCase;
@@ -75,31 +76,37 @@ public class TestHiveLogging extends Tes
     assertEquals(true, logCreated);
   }
 
-  private void RunTest(String cleanCmd, String findCmd, String logFile,
+  public void cleanLog(File logFile) {
+    if (logFile.exists()) {
+      logFile.delete();
+    }
+    File logFileDir = logFile.getParentFile();
+    if (logFileDir.exists()) {
+      logFileDir.delete();
+    }
+  }
+
+  private void RunTest(File logFile,
     String hiveLog4jProperty, String hiveExecLog4jProperty) throws Exception {
     // clean test space
-    runCmd(cleanCmd);
+    cleanLog(logFile);
+    assertFalse(logFile + " should not exist", logFile.exists());
 
     // config log4j with customized files
     // check whether HiveConf initialize log4j correctly
     configLog(hiveLog4jProperty, hiveExecLog4jProperty);
 
     // check whether log file is created on test running
-    runCmd(findCmd);
-    getCmdOutput(logFile);
-
-    // clean test space
-    runCmd(cleanCmd);
+    assertTrue(logFile + " should exist", logFile.exists());
   }
 
   public void testHiveLogging() throws Exception {
-    // customized log4j config log file to be: /tmp/TestHiveLogging/hiveLog4jTest.log
-    String customLogPath = "/tmp/" + System.getProperty("user.name") + "-TestHiveLogging/";
+    // customized log4j config log file to be: /${test.tmp.dir}/TestHiveLogging/hiveLog4jTest.log
+    File customLogPath = new File(new File(System.getProperty("test.tmp.dir")),
+        System.getProperty("user.name") + "-TestHiveLogging/");
     String customLogName = "hiveLog4jTest.log";
-    String customLogFile = customLogPath + customLogName;
-    String customCleanCmd = "rm -rf " + customLogFile;
-    String customFindCmd = "find " + customLogPath + " -name " + customLogName;
-    RunTest(customCleanCmd, customFindCmd, customLogFile,
+    File customLogFile = new File(customLogPath, customLogName);
+    RunTest(customLogFile,
       "hive-log4j-test.properties", "hive-exec-log4j-test.properties");
   }
 }

Modified: hive/branches/spark/common/src/test/resources/hive-exec-log4j-test.properties
URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/test/resources/hive-exec-log4j-test.properties?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/common/src/test/resources/hive-exec-log4j-test.properties (original)
+++ hive/branches/spark/common/src/test/resources/hive-exec-log4j-test.properties Fri Nov  7 20:41:34 2014
@@ -1,6 +1,6 @@
 # Define some default values that can be overridden by system properties
 hive.root.logger=INFO,FA
-hive.log.dir=/tmp/${user.name}-TestHiveLogging
+hive.log.dir=/${test.tmp.dir}/${user.name}-TestHiveLogging
 hive.log.file=hiveExecLog4jTest.log
 
 # Define the root logger to the system property "hadoop.root.logger".

Modified: hive/branches/spark/common/src/test/resources/hive-log4j-test.properties
URL: http://svn.apache.org/viewvc/hive/branches/spark/common/src/test/resources/hive-log4j-test.properties?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/common/src/test/resources/hive-log4j-test.properties (original)
+++ hive/branches/spark/common/src/test/resources/hive-log4j-test.properties Fri Nov  7 20:41:34 2014
@@ -1,6 +1,6 @@
 # Define some default values that can be overridden by system properties
 hive.root.logger=WARN,DRFA
-hive.log.dir=/tmp/${user.name}-TestHiveLogging
+hive.log.dir=${test.tmp.dir}/${user.name}-TestHiveLogging
 hive.log.file=hiveLog4jTest.log
 
 # Define the root logger to the system property "hadoop.root.logger".

Modified: hive/branches/spark/data/scripts/q_test_cleanup.sql
URL: http://svn.apache.org/viewvc/hive/branches/spark/data/scripts/q_test_cleanup.sql?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/data/scripts/q_test_cleanup.sql (original)
+++ hive/branches/spark/data/scripts/q_test_cleanup.sql Fri Nov  7 20:41:34 2014
@@ -16,3 +16,10 @@ DROP TABLE IF EXISTS dest_j1;
 DROP TABLE IF EXISTS dest_g1;
 DROP TABLE IF EXISTS dest_g2;
 DROP TABLE IF EXISTS fetchtask_ioexception;
+
+DROP TABLE IF EXISTS cbo_t1;
+DROP TABLE IF EXISTS cbo_t2;
+DROP TABLE IF EXISTS cbo_t3;
+DROP TABLE IF EXISTS src_cbo;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS lineitem;

Modified: hive/branches/spark/data/scripts/q_test_init.sql
URL: http://svn.apache.org/viewvc/hive/branches/spark/data/scripts/q_test_init.sql?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/data/scripts/q_test_init.sql (original)
+++ hive/branches/spark/data/scripts/q_test_init.sql Fri Nov  7 20:41:34 2014
@@ -243,3 +243,79 @@ CREATE TABLE dest4_sequencefile (key STR
 STORED AS
 INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat'
 OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat';
+
+
+--
+-- CBO tables
+--
+
+drop table if exists cbo_t1;
+drop table if exists cbo_t2;
+drop table if exists cbo_t3;
+drop table if exists src_cbo;
+drop table if exists part;
+drop table if exists lineitem;
+
+set hive.cbo.enable=true;
+
+create table cbo_t1(key string, value string, c_int int, c_float float, c_boolean boolean)  partitioned by (dt string) row format delimited fields terminated by ',' STORED AS TEXTFILE;
+create table cbo_t2(key string, value string, c_int int, c_float float, c_boolean boolean)  partitioned by (dt string) row format delimited fields terminated by ',' STORED AS TEXTFILE;
+create table cbo_t3(key string, value string, c_int int, c_float float, c_boolean boolean)  row format delimited fields terminated by ',' STORED AS TEXTFILE;
+
+load data local inpath '../../data/files/cbo_t1.txt' into table cbo_t1 partition (dt='2014');
+load data local inpath '../../data/files/cbo_t2.txt' into table cbo_t2 partition (dt='2014');
+load data local inpath '../../data/files/cbo_t3.txt' into table cbo_t3;
+
+CREATE TABLE part(
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
+
+CREATE TABLE lineitem (L_ORDERKEY      INT,
+                                L_PARTKEY       INT,
+                                L_SUPPKEY       INT,
+                                L_LINENUMBER    INT,
+                                L_QUANTITY      DOUBLE,
+                                L_EXTENDEDPRICE DOUBLE,
+                                L_DISCOUNT      DOUBLE,
+                                L_TAX           DOUBLE,
+                                L_RETURNFLAG    STRING,
+                                L_LINESTATUS    STRING,
+                                l_shipdate      STRING,
+                                L_COMMITDATE    STRING,
+                                L_RECEIPTDATE   STRING,
+                                L_SHIPINSTRUCT  STRING,
+                                L_SHIPMODE      STRING,
+                                L_COMMENT       STRING)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY '|';
+
+LOAD DATA LOCAL INPATH '../../data/files/lineitem.txt' OVERWRITE INTO TABLE lineitem;
+
+create table src_cbo as select * from src;
+
+
+analyze table cbo_t1 partition (dt) compute statistics;
+analyze table cbo_t1 compute statistics for columns key, value, c_int, c_float, c_boolean;
+analyze table cbo_t2 partition (dt) compute statistics;
+analyze table cbo_t2 compute statistics for columns key, value, c_int, c_float, c_boolean;
+analyze table cbo_t3 compute statistics;
+analyze table cbo_t3 compute statistics for columns key, value, c_int, c_float, c_boolean;
+analyze table src_cbo compute statistics;
+analyze table src_cbo compute statistics for columns;
+analyze table part compute statistics;
+analyze table part compute statistics for columns;
+analyze table lineitem compute statistics;
+analyze table lineitem compute statistics for columns;
+
+reset;
+set hive.stats.dbclass=fs;

Modified: hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java (original)
+++ hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java Fri Nov  7 20:41:34 2014
@@ -23,7 +23,7 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
-import java.util.TreeMap;
+import java.util.HashMap;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
@@ -212,7 +212,7 @@ public class HCatRecordSerDe implements 
   private static Map<?, ?> serializeMap(Object f, MapObjectInspector moi) throws SerDeException {
     ObjectInspector koi = moi.getMapKeyObjectInspector();
     ObjectInspector voi = moi.getMapValueObjectInspector();
-    Map<Object, Object> m = new TreeMap<Object, Object>();
+    Map<Object, Object> m = new HashMap<Object, Object>();
 
     Map<?, ?> readMap = moi.getMap(f);
     if (readMap == null) {

Modified: hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java (original)
+++ hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java Fri Nov  7 20:41:34 2014
@@ -27,11 +27,9 @@ import java.util.Map;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.Reporter;
@@ -44,14 +42,16 @@ import org.apache.hadoop.mapreduce.lib.o
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hive.hcatalog.common.ErrorType;
 import org.apache.hive.hcatalog.common.HCatException;
-import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.hive.hcatalog.data.HCatRecord;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Record writer container for tables using dynamic partitioning. See
  * {@link FileOutputFormatContainer} for more information
  */
 class DynamicPartitionFileRecordWriterContainer extends FileRecordWriterContainer {
+  private static final Logger LOG = LoggerFactory.getLogger(DynamicPartitionFileRecordWriterContainer.class);
   private final List<Integer> dynamicPartCols;
   private int maxDynamicPartitions;
 
@@ -97,14 +97,36 @@ class DynamicPartitionFileRecordWriterCo
       // TaskInputOutput.
       bwriter.close(reporter);
     }
-    for (Map.Entry<String, org.apache.hadoop.mapred.OutputCommitter> entry : baseDynamicCommitters
-        .entrySet()) {
-      org.apache.hadoop.mapred.TaskAttemptContext currContext = dynamicContexts.get(entry.getKey());
-      OutputCommitter baseOutputCommitter = entry.getValue();
-      if (baseOutputCommitter.needsTaskCommit(currContext)) {
-        baseOutputCommitter.commitTask(currContext);
+
+    TaskCommitContextRegistry.getInstance().register(context, new TaskCommitContextRegistry.TaskCommitterProxy() {
+      @Override
+      public void abortTask(TaskAttemptContext context) throws IOException {
+        for (Map.Entry<String, OutputJobInfo> outputJobInfoEntry : dynamicOutputJobInfo.entrySet()) {
+          String dynKey = outputJobInfoEntry.getKey();
+          OutputJobInfo outputJobInfo = outputJobInfoEntry.getValue();
+          LOG.info("Aborting task-attempt for " + outputJobInfo.getLocation());
+          baseDynamicCommitters.get(dynKey)
+                               .abortTask(dynamicContexts.get(dynKey));
+        }
       }
-    }
+
+      @Override
+      public void commitTask(TaskAttemptContext context) throws IOException {
+        for (Map.Entry<String, OutputJobInfo> outputJobInfoEntry : dynamicOutputJobInfo.entrySet()) {
+          String dynKey = outputJobInfoEntry.getKey();
+          OutputJobInfo outputJobInfo = outputJobInfoEntry.getValue();
+          LOG.info("Committing task-attempt for " + outputJobInfo.getLocation());
+          TaskAttemptContext dynContext = dynamicContexts.get(dynKey);
+          OutputCommitter dynCommitter = baseDynamicCommitters.get(dynKey);
+          if (dynCommitter.needsTaskCommit(dynContext)) {
+            dynCommitter.commitTask(dynContext);
+          }
+          else {
+            LOG.info("Skipping commitTask() for " + outputJobInfo.getLocation());
+          }
+        }
+      }
+    });
   }
 
   @Override

Modified: hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java (original)
+++ hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java Fri Nov  7 20:41:34 2014
@@ -118,6 +118,13 @@ class FileOutputCommitterContainer exten
   public void abortTask(TaskAttemptContext context) throws IOException {
     if (!dynamicPartitioningUsed) {
       getBaseOutputCommitter().abortTask(HCatMapRedUtil.createTaskAttemptContext(context));
+    } else {
+      try {
+        TaskCommitContextRegistry.getInstance().abortTask(context);
+      }
+      finally {
+        TaskCommitContextRegistry.getInstance().discardCleanupFor(context);
+      }
     }
   }
 
@@ -127,6 +134,13 @@ class FileOutputCommitterContainer exten
          //See HCATALOG-499
       FileOutputFormatContainer.setWorkOutputPath(context);
       getBaseOutputCommitter().commitTask(HCatMapRedUtil.createTaskAttemptContext(context));
+    } else {
+      try {
+        TaskCommitContextRegistry.getInstance().commitTask(context);
+      }
+      finally {
+        TaskCommitContextRegistry.getInstance().discardCleanupFor(context);
+      }
     }
   }
 
@@ -136,7 +150,7 @@ class FileOutputCommitterContainer exten
       return getBaseOutputCommitter().needsTaskCommit(HCatMapRedUtil.createTaskAttemptContext(context));
     } else {
       // called explicitly through FileRecordWriterContainer.close() if dynamic - return false by default
-      return false;
+      return true;
     }
   }
 

Modified: hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java (original)
+++ hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/SpecialCases.java Fri Nov  7 20:41:34 2014
@@ -23,11 +23,20 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
+import org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat;
 import org.apache.hadoop.hive.ql.io.orc.OrcFile;
 import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat;
+import org.apache.hadoop.hive.serde2.avro.AvroSerDe;
+import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.mapred.OutputFormat;
+import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
 
+import java.util.ArrayList;
+import java.util.List;
 import java.util.Map;
+import java.util.Properties;
 
 /**
  * This class is a place to put all the code associated with
@@ -82,6 +91,35 @@ public class SpecialCases {
           jobProperties.put(propName,tableProps.get(propName));
         }
       }
+    } else if (ofclass == AvroContainerOutputFormat.class) {
+      // Special cases for Avro. As with ORC, we make table properties that
+      // Avro is interested in available in jobconf at runtime
+      Map<String, String> tableProps = jobInfo.getTableInfo().getTable().getParameters();
+      for (AvroSerdeUtils.AvroTableProperties property : AvroSerdeUtils.AvroTableProperties.values()) {
+        String propName = property.getPropName();
+        if (tableProps.containsKey(propName)){
+          String propVal = tableProps.get(propName);
+          jobProperties.put(propName,tableProps.get(propName));
+        }
+      }
+
+      Properties properties = new Properties();
+      properties.put("name",jobInfo.getTableName());
+
+      List<String> colNames = jobInfo.getOutputSchema().getFieldNames();
+      List<TypeInfo> colTypes = new ArrayList<TypeInfo>();
+      for (HCatFieldSchema field : jobInfo.getOutputSchema().getFields()){
+        colTypes.add(TypeInfoUtils.getTypeInfoFromTypeString(field.getTypeString()));
+      }
+
+      jobProperties.put(AvroSerdeUtils.AvroTableProperties.SCHEMA_LITERAL.getPropName(),
+          AvroSerDe.getSchemaFromCols(properties, colNames, colTypes, null).toString());
+
+
+      for (String propName : jobProperties.keySet()){
+        String propVal = jobProperties.get(propName);
+      }
+
     }
   }
 

Modified: hive/branches/spark/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java (original)
+++ hive/branches/spark/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestUseDatabase.java Fri Nov  7 20:41:34 2014
@@ -64,7 +64,7 @@ public class TestUseDatabase extends Tes
 
     String tmpDir = System.getProperty("test.tmp.dir");
     File dir = new File(tmpDir + "/hive-junit-" + System.nanoTime());
-    response = hcatDriver.run("alter table " + tblName + " add partition (b='2') location '" + dir.getAbsolutePath() + "'");
+    response = hcatDriver.run("alter table " + tblName + " add partition (b='2') location '" + dir.toURI().getPath() + "'");
     assertEquals(0, response.getResponseCode());
     assertNull(response.getErrorMessage());
 



Mime
View raw message