hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From prasan...@apache.org
Subject svn commit: r1670534 [1/8] - in /hive/branches/llap: ./ beeline/src/java/org/apache/hive/beeline/ conf/ data/files/ hcatalog/hcatalog-pig-adapter/ hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/ hcatalog/src/test/e2e/templeton...
Date Wed, 01 Apr 2015 01:15:52 GMT
Author: prasanthj
Date: Wed Apr  1 01:15:50 2015
New Revision: 1670534

URL: http://svn.apache.org/r1670534
Log:
Merge from trunk to llap (3/31/2015) (Prasanth Jayachandran)

Added:
    hive/branches/llap/conf/ivysettings.xml
      - copied unchanged from r1670533, hive/trunk/conf/ivysettings.xml
    hive/branches/llap/data/files/HiveGroup.parquet
      - copied unchanged from r1670533, hive/trunk/data/files/HiveGroup.parquet
    hive/branches/llap/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
      - copied unchanged from r1670533, hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
    hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java
      - copied unchanged from r1670533, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java
    hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/session/TestAddResource.java
      - copied unchanged from r1670533, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/session/TestAddResource.java
    hive/branches/llap/ql/src/test/queries/clientnegative/ivyDownload.q
      - copied unchanged from r1670533, hive/trunk/ql/src/test/queries/clientnegative/ivyDownload.q
    hive/branches/llap/ql/src/test/queries/clientpositive/avro_comments.q
      - copied unchanged from r1670533, hive/trunk/ql/src/test/queries/clientpositive/avro_comments.q
    hive/branches/llap/ql/src/test/queries/clientpositive/ivyDownload.q
      - copied unchanged from r1670533, hive/trunk/ql/src/test/queries/clientpositive/ivyDownload.q
    hive/branches/llap/ql/src/test/queries/clientpositive/lateral_view_onview.q
      - copied unchanged from r1670533, hive/trunk/ql/src/test/queries/clientpositive/lateral_view_onview.q
    hive/branches/llap/ql/src/test/queries/clientpositive/parquet_schema_evolution.q
      - copied unchanged from r1670533, hive/trunk/ql/src/test/queries/clientpositive/parquet_schema_evolution.q
    hive/branches/llap/ql/src/test/queries/clientpositive/parquet_table_with_subschema.q
      - copied unchanged from r1670533, hive/trunk/ql/src/test/queries/clientpositive/parquet_table_with_subschema.q
    hive/branches/llap/ql/src/test/results/clientnegative/ivyDownload.q.out
      - copied unchanged from r1670533, hive/trunk/ql/src/test/results/clientnegative/ivyDownload.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/avro_comments.q.out
      - copied unchanged from r1670533, hive/trunk/ql/src/test/results/clientpositive/avro_comments.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/ivyDownload.q.out
      - copied unchanged from r1670533, hive/trunk/ql/src/test/results/clientpositive/ivyDownload.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/lateral_view_onview.q.out
      - copied unchanged from r1670533, hive/trunk/ql/src/test/results/clientpositive/lateral_view_onview.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/parquet_schema_evolution.q.out
      - copied unchanged from r1670533, hive/trunk/ql/src/test/results/clientpositive/parquet_schema_evolution.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/parquet_table_with_subschema.q.out
      - copied unchanged from r1670533, hive/trunk/ql/src/test/results/clientpositive/parquet_table_with_subschema.q.out
Modified:
    hive/branches/llap/   (props changed)
    hive/branches/llap/beeline/src/java/org/apache/hive/beeline/BeeLine.java
    hive/branches/llap/hcatalog/hcatalog-pig-adapter/pom.xml
    hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
    hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/env.sh
    hive/branches/llap/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
    hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
    hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
    hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/TempletonDelegator.java
    hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
    hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
    hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
    hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java
    hive/branches/llap/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java
    hive/branches/llap/itests/pom.xml
    hive/branches/llap/packaging/src/main/assembly/bin.xml
    hive/branches/llap/pom.xml
    hive/branches/llap/ql/pom.xml
    hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/DataWritableRecordConverter.java
    hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java
    hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java
    hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
    hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java
    hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java
    hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/PlanModifierForASTConv.java
    hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
    hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
    hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFormatNumber.java
    hive/branches/llap/ql/src/test/queries/clientpositive/avro_compression_enabled.q
    hive/branches/llap/ql/src/test/queries/clientpositive/avro_evolved_schemas.q
    hive/branches/llap/ql/src/test/queries/clientpositive/avro_joins.q
    hive/branches/llap/ql/src/test/queries/clientpositive/leadlag.q
    hive/branches/llap/ql/src/test/queries/clientpositive/parquet_columnar.q
    hive/branches/llap/ql/src/test/queries/clientpositive/udaf_percentile_approx_23.q
    hive/branches/llap/ql/src/test/queries/clientpositive/udf_format_number.q
    hive/branches/llap/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/annotate_stats_select.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/auto_join1.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/auto_join10.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/auto_join11.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/auto_join12.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/auto_join13.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/auto_join14.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/auto_join22.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/auto_join26.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/auto_join_nulls.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/auto_join_without_localtask.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/avro_add_column.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/avro_add_column2.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/avro_add_column3.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/avro_change_schema.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/avro_compression_enabled.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/avro_decimal.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/avro_decimal_native.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/avro_evolved_schemas.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/avro_joins.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/avro_joins_native.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/avro_native.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/avro_partitioned.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/avro_partitioned_native.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/avro_sanity_test.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/avro_schema_evolution_native.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/avro_schema_literal.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/combine2.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/correlationoptimizer1.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/correlationoptimizer12.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/correlationoptimizer3.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/create_like.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/ctas_colname.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/decimal_udf.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/decimal_udf2.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_dynamic.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/explain_logical.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/groupby_map_ppr.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/groupby_ppr.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/groupby_sort_6.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/index_serde.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/input_part1.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/join28.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/join29.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/join31.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/join32.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/join32_lessSize.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/join33.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/join35.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/leadlag.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_dml_1.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_dml_11.q.java1.7.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_dml_12.q.java1.7.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_dml_13.q.java1.7.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_dml_2.q.java1.7.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_dml_3.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_dml_4.q.java1.7.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_dml_5.q.java1.7.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.7.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_dml_7.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.7.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_dml_9.q.java1.7.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_query_multiskew_1.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_query_multiskew_2.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_query_multiskew_3.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_query_oneskew_1.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_query_oneskew_2.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/list_bucket_query_oneskew_3.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/literal_decimal.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/load_dyn_part14.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/louter_join_ppr.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/mapjoin_subquery.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/multiMapJoin2.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/nonblock_op_deduplicate.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/parquet_columnar.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/pcr.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/ppd_vc.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/ql_rewrite_gbtoidx_cbo_1.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/ql_rewrite_gbtoidx_cbo_2.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/skewjoin_mapjoin6.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/skewjoinopt10.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/spark/avro_decimal_native.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/spark/avro_joins.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/spark/avro_joins_native.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/spark/skewjoinopt10.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/str_to_map.q.java1.7.out
    hive/branches/llap/ql/src/test/results/clientpositive/subquery_in.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/subquery_in_having.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/subquery_notin.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/subquery_views.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/table_access_keys_stats.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/tez/subquery_in.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/tez/vector_decimal_expressions.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/tez/vector_decimal_udf.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/tez/vector_decimal_udf2.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/udaf_percentile_approx_23.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/udf7.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/udf_format_number.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/udf_reflect2.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/union24.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/unionDistinct_1.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/vector_decimal_round.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/vector_decimal_round_2.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/vector_decimal_udf.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/vector_decimal_udf2.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/vector_mapjoin_reduce.q.out
    hive/branches/llap/ql/src/test/results/clientpositive/windowing_streaming.q.out
    hive/branches/llap/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorGenerator.java
    hive/branches/llap/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
    hive/branches/llap/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
    hive/branches/llap/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java

Propchange: hive/branches/llap/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Apr  1 01:15:50 2015
@@ -4,4 +4,4 @@
 /hive/branches/spark:1608589-1660298
 /hive/branches/tez:1494760-1622766
 /hive/branches/vectorization:1466908-1527856
-/hive/trunk:1624170-1669495
+/hive/trunk:1624170-1670533

Modified: hive/branches/llap/beeline/src/java/org/apache/hive/beeline/BeeLine.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/beeline/src/java/org/apache/hive/beeline/BeeLine.java?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/beeline/src/java/org/apache/hive/beeline/BeeLine.java (original)
+++ hive/branches/llap/beeline/src/java/org/apache/hive/beeline/BeeLine.java Wed Apr  1 01:15:50 2015
@@ -803,10 +803,14 @@ public class BeeLine implements Closeabl
   }
 
   private int execute(ConsoleReader reader, boolean exitOnError) {
+    String line;
     while (!exit) {
       try {
         // Execute one instruction; terminate on executing a script if there is an error
-        if (!dispatch(reader.readLine(getPrompt())) && exitOnError) {
+        // in silent mode, prevent the query and prompt being echoed back to terminal
+        line = getOpts().isSilent() ? reader.readLine(null, ConsoleReader.NULL_MASK) : reader.readLine(getPrompt());
+
+        if (!dispatch(line) && exitOnError) {
           return ERRNO_OTHER;
         }
       } catch (Throwable t) {

Modified: hive/branches/llap/hcatalog/hcatalog-pig-adapter/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/hcatalog-pig-adapter/pom.xml?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/hcatalog-pig-adapter/pom.xml (original)
+++ hive/branches/llap/hcatalog/hcatalog-pig-adapter/pom.xml Wed Apr  1 01:15:50 2015
@@ -68,7 +68,6 @@
     </dependency>
   </dependencies>
 
-
   <profiles>
     <profile>
       <id>hadoop-1</id>
@@ -79,6 +78,12 @@
           <version>${hadoop-20S.version}</version>
         </dependency>
         <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-test</artifactId>
+          <version>${hadoop-20S.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
           <groupId>org.apache.pig</groupId>
           <artifactId>pig</artifactId>
           <version>${pig.version}</version>
@@ -102,6 +107,11 @@
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+          <version>${hadoop-23.version}</version>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-mapreduce-client-core</artifactId>
           <version>${hadoop-23.version}</version>
         </dependency>
@@ -112,6 +122,12 @@
           <classifier>h2</classifier>
         </dependency>
         <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdfs</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
           <!--this should be automatically brought in by Pig, it's not in Pig 0.12 due to a bug
               in Pig which requires it This is fixed in Pig's pom file in ASF trunk (pig 13)-->
           <groupId>joda-time</groupId>
@@ -121,11 +137,30 @@
         <!-- Test dependencies -->
         <dependency>
           <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdfs</artifactId>
+          <version>${hadoop-23.version}</version>
+          <classifier>tests</classifier>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-mapreduce-client-common</artifactId>
           <version>${hadoop-23.version}</version>
           <optional>true</optional>
           <scope>test</scope>
         </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+          <classifier>tests</classifier>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-servlet</artifactId>
+          <scope>test</scope>
+        </dependency>
       </dependencies>
     </profile>
   </profiles>

Modified: hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml (original)
+++ hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml Wed Apr  1 01:15:50 2015
@@ -35,7 +35,7 @@
 
     <property>
         <name>templeton.libjars</name>
-        <value>${env.TEMPLETON_HOME}/../lib/zookeeper-3.4.5.jar</value>
+        <value>${env.TEMPLETON_HOME}/../lib/zookeeper-3.4.6.jar,${env.TEMPLETON_HOME}/../lib/hive-common-1.2.0-SNAPSHOT.jar</value>
         <description>Jars to add to the classpath.</description>
     </property>
 
@@ -69,6 +69,11 @@
             shipped to the target node in the cluster to execute Pig job which uses 
             HCat, Hive query, etc.</description>
     </property>
+
+    <property>
+      <name>templeton.hive.extra.files</name>
+      <value>${env.TEZ_CLIENT_HOME}/conf/tez-site.xml,${env.TEZ_CLIENT_HOME}/,${env.TEZ_CLIENT_HOME}/lib</value>
+    </property>
     <property>
         <name>templeton.hcat.home</name>
         <value>apache-hive-${env.HIVE_VERSION}-bin.tar.gz/apache-hive-${env.HIVE_VERSION}-bin/hcatalog</value>
@@ -101,7 +106,7 @@
     </property>
 
     <property>
-        <!--\,thrift://127.0.0.1:9933-->
+        <!--\,thrift://127.0.0.1:9933,,hive.execution.engine=tez-->
         <name>templeton.hive.properties</name>
         <value>hive.metastore.uris=thrift://localhost:9933,hive.metastore.sasl.enabled=false</value>
     </property>

Modified: hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/env.sh
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/env.sh?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/env.sh (original)
+++ hive/branches/llap/hcatalog/src/test/e2e/templeton/deployers/env.sh Wed Apr  1 01:15:50 2015
@@ -36,6 +36,10 @@ if [ -z ${PIG_VERSION} ]; then
   export PIG_VERSION=0.12.2-SNAPSHOT
 fi
 
+if [ -z ${TEZ_VERSION} ]; then
+  export TEZ_VERSION=0.5.3
+fi
+
 #Root of project source tree
 if [ -z ${PROJ_HOME} ]; then
   export PROJ_HOME=/Users/${USER}/dev/hive
@@ -46,6 +50,7 @@ if [ -z ${HADOOP_HOME} ]; then
   export HADOOP_HOME=/Users/${USER}/dev/hwxhadoop/hadoop-dist/target/hadoop-${HADOOP_VERSION}
 fi
 
+export TEZ_CLIENT_HOME=/Users/ekoifman/dev/apache-tez-client-${TEZ_VERSION}
 #Make sure Pig is built for the Hadoop version you are running
 export PIG_TAR_PATH=/Users/${USER}/dev/pig-${PIG_VERSION}-src/build
 #this is part of Pig distribution

Modified: hive/branches/llap/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml (original)
+++ hive/branches/llap/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml Wed Apr  1 01:15:50 2015
@@ -39,7 +39,7 @@
 
   <property>
     <name>templeton.libjars</name>
-    <value>${env.TEMPLETON_HOME}/share/webhcat/svr/lib/zookeeper-3.4.3.jar</value>
+    <value>${env.TEMPLETON_HOME}/../lib/zookeeper-3.4.6.jar,${env.TEMPLETON_HOME}/../lib/hive-common-1.2.0-SNAPSHOT.jar</value>
     <description>Jars to add to the classpath.</description>
   </property>
 
@@ -106,7 +106,20 @@
   <property>
     <name>templeton.hive.path</name>
     <value>hive-0.11.0.tar.gz/hive-0.11.0/bin/hive</value>
-    <description>The path to the Hive executable.</description>
+    <description>The path to the Hive executable.  Applies only if templeton.hive.archive is defined.</description>
+  </property>
+
+  <property>
+    <name>templeton.hive.extra.files</name>
+    <value>/tez-client/conf/tez-site.xml,/tez-client/,/tez-client/lib</value>
+    <description>The resources in this list will be localized to the node running LaunchMapper and added to HADOOP_CLASSPTH
+      before launching 'hive' command.  If the path /foo/bar is a directory, the contents of the the entire dir will be localized
+      and ./bar/* will be added to HADOOP_CLASSPATH.  Note that since classpath path processing does not recurse into subdirectories,
+      the paths in this property may be overlapping.  In the example above, "./tez-site.xml:./tez-client/*:./lib/*" will be added to
+      HADOOP_CLASSPATH.
+      This can be used to specify config files, Tez artifacts, etc.  This will be sent -files option of hadoop jar command thus
+      each path is interpreted by Generic Option Parser.  It can be local or hdfs path.
+    </description>
   </property>
 
   <property>
@@ -197,6 +210,32 @@
     </description>
   </property>
 
+  <!--
+  <property>
+    <name>templeton.controller.mr.am.java.opts</name>
+    <value></value>
+    <description>Java options to be set for the templeton controller job's
+        MapReduce application master. When submitting the controller job,
+        Templeton will override yarn.app.mapreduce.am.command-opts with
+        this value.  If this is not specified, Templeton will not set the
+        property and therefore the value will be picked up from
+        mapred-site.xml.
+    </description>
+  </property>
+
+  <property>
+    <name>templeton.mr.am.memory.mb</name>
+    <value></value>
+    <description>Templeton controller job's Application Master's memory
+        limit in MB. When submitting controller job, Templeton will
+        overwrite yarn.app.mapreduce.am.resource.mb with this value. If
+        empty, Templeton will not set yarn.app.mapreduce.am.resource.mb
+        when submitting the controller job, therefore the configuration
+        in mapred-site.xml will be used.
+    </description>
+  </property>
+  -->
+
   <property>
     <name>templeton.exec.envs</name>
     <value>HADOOP_PREFIX,HADOOP_HOME,JAVA_HOME,HIVE_HOME</value>

Modified: hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java (original)
+++ hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java Wed Apr  1 01:15:50 2015
@@ -35,7 +35,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.SystemVariables;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.hive.hcatalog.templeton.tool.JobState;
@@ -104,6 +103,8 @@ public class AppConfig extends Configura
   public static final String HIVE_ARCHIVE_NAME   = "templeton.hive.archive";
   public static final String HIVE_PATH_NAME      = "templeton.hive.path";
   public static final String MAPPER_MEMORY_MB    = "templeton.mapper.memory.mb";
+  public static final String MR_AM_MEMORY_MB     = "templeton.mr.am.memory.mb";
+
   /**
    * see webhcat-default.xml
    */
@@ -130,6 +131,8 @@ public class AppConfig extends Configura
   public static final String OVERRIDE_JARS_ENABLED = "templeton.override.enabled";
   public static final String TEMPLETON_CONTROLLER_MR_CHILD_OPTS 
     = "templeton.controller.mr.child.opts";
+  public static final String TEMPLETON_CONTROLLER_MR_AM_JAVA_OPTS
+    = "templeton.controller.mr.am.java.opts";
 
   public static final String KERBEROS_SECRET     = "templeton.kerberos.secret";
   public static final String KERBEROS_PRINCIPAL  = "templeton.kerberos.principal";
@@ -148,7 +151,14 @@ public class AppConfig extends Configura
     = "mapred.map.tasks.speculative.execution";
   public static final String HADOOP_CHILD_JAVA_OPTS = "mapred.child.java.opts";
   public static final String HADOOP_MAP_MEMORY_MB = "mapreduce.map.memory.mb";
+  public static final String HADOOP_MR_AM_JAVA_OPTS = "yarn.app.mapreduce.am.command-opts";
+  public static final String HADOOP_MR_AM_MEMORY_MB = "yarn.app.mapreduce.am.resource.mb";
   public static final String UNIT_TEST_MODE     = "templeton.unit.test.mode";
+  /**
+   * comma-separated list of artifacts to add to HADOOP_CLASSPATH evn var in
+   * LaunchMapper before launching Hive command
+   */
+  public static final String HIVE_EXTRA_FILES = "templeton.hive.extra.files";
 
 
   private static final Log LOG = LogFactory.getLog(AppConfig.class);
@@ -313,7 +323,13 @@ public class AppConfig extends Configura
   public String controllerMRChildOpts() { 
     return get(TEMPLETON_CONTROLLER_MR_CHILD_OPTS); 
   }
+  public String controllerAMChildOpts() {
+    return get(TEMPLETON_CONTROLLER_MR_AM_JAVA_OPTS);
+  }
   public String mapperMemoryMb()   { return get(MAPPER_MEMORY_MB); }
+  public String amMemoryMb() {
+    return get(MR_AM_MEMORY_MB);
+  }
 
   /**
    * @see  #HIVE_PROPS_NAME

Modified: hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java (original)
+++ hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java Wed Apr  1 01:15:50 2015
@@ -27,6 +27,7 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.exec.ExecuteException;
+import org.apache.hadoop.fs.Path;
 import org.apache.hive.hcatalog.templeton.tool.JobSubmissionConstants;
 import org.apache.hive.hcatalog.templeton.tool.TempletonControllerJob;
 import org.apache.hive.hcatalog.templeton.tool.TempletonUtils;
@@ -117,7 +118,7 @@ public class HiveDelegator extends Launc
   private List<String> makeBasicArgs(String execute, String srcFile, String otherFiles,
                                          String statusdir, String completedUrl,
                                          boolean enablelog)
-    throws URISyntaxException, FileNotFoundException, IOException,
+    throws URISyntaxException, IOException,
     InterruptedException
   {
     ArrayList<String> args = new ArrayList<String>();
@@ -142,6 +143,30 @@ public class HiveDelegator extends Launc
       args.add(appConf.hiveArchive());
     }
 
+    //ship additional artifacts, for example for Tez
+    String extras = appConf.get(AppConfig.HIVE_EXTRA_FILES); 
+    if(extras != null && extras.length() > 0) {
+      boolean foundFiles = false;
+      for(int i = 0; i < args.size(); i++) {
+        if(FILES.equals(args.get(i))) {
+          String value = args.get(i + 1);
+          args.set(i + 1, value + "," + extras);
+          foundFiles = true;
+        }
+      }
+      if(!foundFiles) {
+        args.add(FILES);
+        args.add(extras);
+      }
+      String[] extraFiles = appConf.getStrings(AppConfig.HIVE_EXTRA_FILES);
+      StringBuilder extraFileNames = new StringBuilder();
+      //now tell LaunchMapper which files it should add to HADOOP_CLASSPATH
+      for(String file : extraFiles) {
+        Path p = new Path(file);
+        extraFileNames.append(p.getName()).append(",");
+      }
+      addDef(args, JobSubmissionConstants.HADOOP_CLASSPATH_EXTRAS, extraFileNames.toString());
+    }
     return args;
   }
 }

Modified: hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/TempletonDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/TempletonDelegator.java?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/TempletonDelegator.java (original)
+++ hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/TempletonDelegator.java Wed Apr  1 01:15:50 2015
@@ -28,6 +28,10 @@ public class TempletonDelegator {
    * http://hadoop.apache.org/docs/r1.0.4/commands_manual.html#Generic+Options
    */
   public static final String ARCHIVES = "-archives";
+  /**
+   * http://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/CommandsManual.html#Generic_Options
+   */
+  public static final String FILES = "-files";
   
   protected AppConfig appConf;
 

Modified: hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java (original)
+++ hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java Wed Apr  1 01:15:50 2015
@@ -31,6 +31,12 @@ public interface JobSubmissionConstants
   public static final String EXIT_FNAME = "exit";
   public static final int WATCHER_TIMEOUT_SECS = 10;
   public static final int KEEP_ALIVE_MSEC = 60 * 1000;
+  /**
+   * A comma-separated list of files to be added to HADOOP_CLASSPATH in 
+   * {@link org.apache.hive.hcatalog.templeton.tool.LaunchMapper}.  Used to localize additional
+   * artifacts for job submission requests.
+   */
+  public static final String HADOOP_CLASSPATH_EXTRAS = "templeton.hadoop.classpath.extras";
   /*
    * The = sign in the string for TOKEN_FILE_ARG_PLACEHOLDER is required because
    * org.apache.hadoop.util.GenericOptionsParser.preProcessForWindows() prepares

Modified: hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java (original)
+++ hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java Wed Apr  1 01:15:50 2015
@@ -21,6 +21,7 @@ package org.apache.hive.hcatalog.templet
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
@@ -33,7 +34,6 @@ import org.apache.hadoop.mapreduce.Mappe
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.hive.hcatalog.templeton.AppConfig;
 import org.apache.hive.hcatalog.templeton.BadParam;
 import org.apache.hive.hcatalog.templeton.LauncherDelegator;
 
@@ -115,6 +115,32 @@ public class LaunchMapper extends Mapper
       }
     }
   }
+  private static void handleHadoopClasspathExtras(Configuration conf, Map<String, String> env)
+    throws IOException {
+    if(!TempletonUtils.isset(conf.get(JobSubmissionConstants.HADOOP_CLASSPATH_EXTRAS))) {
+      return;
+    }
+    LOG.debug(HADOOP_CLASSPATH_EXTRAS + "=" + conf.get(HADOOP_CLASSPATH_EXTRAS));
+    String[] files = conf.getStrings(HADOOP_CLASSPATH_EXTRAS);
+    StringBuilder paths = new StringBuilder();
+    FileSystem fs = FileSystem.getLocal(conf);//these have been localized already
+    for(String f : files) {
+      Path p = new Path(f);
+      FileStatus fileStatus = fs.getFileStatus(p);
+      paths.append(f);
+      if(fileStatus.isDirectory()) {
+        paths.append(File.separator).append("*");
+      }
+      paths.append(File.pathSeparator);
+    }
+    paths.setLength(paths.length() - 1);
+    if(TempletonUtils.isset(System.getenv("HADOOP_CLASSPATH"))) {
+      env.put("HADOOP_CLASSPATH", System.getenv("HADOOP_CLASSPATH") + File.pathSeparator + paths);
+    }
+    else {
+      env.put("HADOOP_CLASSPATH", paths.toString());
+    }
+  }
   protected Process startJob(Context context, String user, String overrideClasspath)
     throws IOException, InterruptedException {
     Configuration conf = context.getConfiguration();
@@ -135,6 +161,7 @@ public class LaunchMapper extends Mapper
     Map<String, String> env = TempletonUtils.hadoopUserEnv(user, overrideClasspath);
     handlePigEnvVars(conf, env);
     handleSqoop(conf, env);
+    handleHadoopClasspathExtras(conf, env);    
     List<String> jarArgsList = new LinkedList<String>(Arrays.asList(jarArgs));
     handleTokenFile(jarArgsList, JobSubmissionConstants.TOKEN_FILE_ARG_PLACEHOLDER, "mapreduce.job.credentials.binary");
     handleTokenFile(jarArgsList, JobSubmissionConstants.TOKEN_FILE_ARG_PLACEHOLDER_TEZ, "tez.credentials.path");

Modified: hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java (original)
+++ hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java Wed Apr  1 01:15:50 2015
@@ -18,9 +18,7 @@
  */
 package org.apache.hive.hcatalog.templeton.tool;
 
-import java.io.File;
 import java.io.IOException;
-import java.net.URI;
 import java.security.PrivilegedExceptionAction;
 import java.util.Arrays;
 
@@ -28,13 +26,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobClient;
@@ -47,7 +41,6 @@ import org.apache.hadoop.security.UserGr
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.Tool;
 import org.apache.hive.hcatalog.templeton.AppConfig;
-import org.apache.hive.hcatalog.templeton.Main;
 import org.apache.hive.hcatalog.templeton.SecureProxySupport;
 import org.apache.hive.hcatalog.templeton.UgiFactory;
 import org.apache.thrift.TException;
@@ -114,6 +107,15 @@ public class TempletonControllerJob exte
     if(memoryMb != null && memoryMb.length() != 0) {
       conf.set(AppConfig.HADOOP_MAP_MEMORY_MB, memoryMb);
     }
+    String amMemoryMB = appConf.amMemoryMb();
+    if (amMemoryMB != null && !amMemoryMB.isEmpty()) {
+      conf.set(AppConfig.HADOOP_MR_AM_MEMORY_MB, amMemoryMB);
+    }
+    String amJavaOpts = appConf.controllerAMChildOpts();
+    if (amJavaOpts != null && !amJavaOpts.isEmpty()) {
+      conf.set(AppConfig.HADOOP_MR_AM_JAVA_OPTS, amJavaOpts);
+    }
+
     String user = UserGroupInformation.getCurrentUser().getShortUserName();
     conf.set("user.name", user);
     Job job = new Job(conf);

Modified: hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java (original)
+++ hive/branches/llap/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java Wed Apr  1 01:15:50 2015
@@ -76,32 +76,31 @@ final class TrivialExecService {
     }
   }
   /**
-   * Print files and directories in current directory. Will list files in the sub-directory (only 1 level deep)
-   * time honored tradition in WebHCat of borrowing from Oozie
+   * Print files and directories in current {@code dir}.
    */
-  private static void printContentsOfDir(String dir) {
+  private static StringBuilder printContentsOfDir(String dir, int depth, StringBuilder sb) {
+    StringBuilder indent = new StringBuilder();
+    for(int i = 0; i < depth; i++) {
+      indent.append("--");
+    }
     File folder = new File(dir);
-    StringBuilder sb = new StringBuilder("Files in '").append(dir).append("' dir:").append(folder.getAbsolutePath()).append('\n');
+    sb.append(indent).append("Files in '").append(dir).append("' dir:").append(folder.getAbsolutePath()).append('\n');
 
     File[] listOfFiles = folder.listFiles();
+    if(listOfFiles == null) {
+      return sb;
+    }
     for (File fileName : listOfFiles) {
       if (fileName.isFile()) {
-        sb.append("File: ").append(fileName.getName()).append('\n');
+        sb.append(indent).append("File: ").append(fileName.getName()).append('\n');
       }
       else if (fileName.isDirectory()) {
-        sb.append("Dir: ").append(fileName.getName()).append('\n');
-        File subDir = new File(fileName.getName());
-        File[] moreFiles = subDir.listFiles();
-        for (File subFileName : moreFiles) {
-          if (subFileName.isFile()) {
-            sb.append("--File: ").append(subFileName.getName()).append('\n');
-          }
-          else if (subFileName.isDirectory()) {
-            sb.append("--Dir: ").append(subFileName.getName()).append('\n');
-          }
-        }
+        printContentsOfDir(fileName.getName(), depth+1, sb);
       }
     }
-    LOG.info(sb.toString());
+    return sb;
+  }
+  private static void printContentsOfDir(String dir) {
+    LOG.info(printContentsOfDir(dir, 0, new StringBuilder()).toString());    
   }
 }

Modified: hive/branches/llap/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java (original)
+++ hive/branches/llap/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java Wed Apr  1 01:15:50 2015
@@ -41,26 +41,48 @@ public class TestHiveAuthFactory {
   }
 
   /**
-   * Verify that delegation token manager is started with no exception
+   * Verify that delegation token manager is started with no exception for MemoryTokenStore
    * @throws Exception
    */
   @Test
-  public void testStartTokenManager() throws Exception {
+  public void testStartTokenManagerForMemoryTokenStore() throws Exception {
     hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthFactory.AuthTypes.KERBEROS.getAuthName());
     String principalName = miniHiveKdc.getFullHiveServicePrincipal();
     System.out.println("Principal: " + principalName);
-    
+
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL, principalName);
+    String keyTabFile = miniHiveKdc.getKeyTabFile(miniHiveKdc.getHiveServicePrincipal());
+    System.out.println("keyTabFile: " + keyTabFile);
+    Assert.assertNotNull(keyTabFile);
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB, keyTabFile);
+
+    HiveAuthFactory authFactory = new HiveAuthFactory(hiveConf);
+    Assert.assertNotNull(authFactory);
+    Assert.assertEquals("org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge$Server$TUGIAssumingTransportFactory",
+        authFactory.getAuthTransFactory().getClass().getName());
+  }
+
+  /**
+   * Verify that delegation token manager is started with no exception for DBTokenStore
+   * @throws Exception
+   */
+  @Test
+  public void testStartTokenManagerForDBTokenStore() throws Exception {
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthFactory.AuthTypes.KERBEROS.getAuthName());
+    String principalName = miniHiveKdc.getFullHiveServicePrincipal();
+    System.out.println("Principal: " + principalName);
+
     hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL, principalName);
     String keyTabFile = miniHiveKdc.getKeyTabFile(miniHiveKdc.getHiveServicePrincipal());
     System.out.println("keyTabFile: " + keyTabFile);
     Assert.assertNotNull(keyTabFile);
     hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB, keyTabFile);
 
-    System.out.println("rawStoreClassName =" +  hiveConf.getVar(ConfVars.METASTORE_RAW_STORE_IMPL));
+    hiveConf.setVar(ConfVars.METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS, "org.apache.hadoop.hive.thrift.DBTokenStore");
 
     HiveAuthFactory authFactory = new HiveAuthFactory(hiveConf);
     Assert.assertNotNull(authFactory);
-    Assert.assertEquals("org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge$Server$TUGIAssumingTransportFactory", 
+    Assert.assertEquals("org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge$Server$TUGIAssumingTransportFactory",
         authFactory.getAuthTransFactory().getClass().getName());
   }
 }

Modified: hive/branches/llap/itests/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/llap/itests/pom.xml?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/itests/pom.xml (original)
+++ hive/branches/llap/itests/pom.xml Wed Apr  1 01:15:50 2015
@@ -93,6 +93,9 @@
                   mkdir -p $DOWNLOAD_DIR
                   download "http://d3jw87u4immizc.cloudfront.net/spark-tarball/spark-${spark.version}-bin-hadoop2-without-hive.tgz" "spark"
                   cp -f $HIVE_ROOT/data/conf/spark/log4j.properties $BASE_DIR/spark/conf/
+                  sed '/package /d' ${basedir}/${hive.path.to.root}/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java > /tmp/UDFExampleAdd.java
+                  javac -cp  ${settings.localRepository}/org/apache/hive/hive-exec/${project.version}/hive-exec-${project.version}.jar /tmp/UDFExampleAdd.java -d /tmp
+                  jar -cf /tmp/udfexampleadd-1.0.jar -C /tmp UDFExampleAdd.class
                 </echo>
               </target>
             </configuration>

Modified: hive/branches/llap/packaging/src/main/assembly/bin.xml
URL: http://svn.apache.org/viewvc/hive/branches/llap/packaging/src/main/assembly/bin.xml?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/packaging/src/main/assembly/bin.xml (original)
+++ hive/branches/llap/packaging/src/main/assembly/bin.xml Wed Apr  1 01:15:50 2015
@@ -165,6 +165,7 @@
       <directory>${project.parent.basedir}/conf</directory>
       <includes>
         <include>*.template</include>
+        <include>ivysettings.xml</include>
       </includes>
       <outputDirectory>conf</outputDirectory>
     </fileSet>

Modified: hive/branches/llap/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/llap/pom.xml?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/pom.xml (original)
+++ hive/branches/llap/pom.xml Wed Apr  1 01:15:50 2015
@@ -128,6 +128,7 @@
     <!-- httpcomponents are not always in version sync -->
     <httpcomponents.client.version>4.2.5</httpcomponents.client.version>
     <httpcomponents.core.version>4.2.5</httpcomponents.core.version>
+    <ivy.version>2.4.0</ivy.version>
     <jackson.version>1.9.2</jackson.version>
     <javaewah.version>0.3.2</javaewah.version>
     <javolution.version>5.5.1</javolution.version>
@@ -149,7 +150,7 @@
     <mockito-all.version>1.9.5</mockito-all.version>
     <mina.version>2.0.0-M5</mina.version>
     <netty.version>4.0.23.Final</netty.version>
-    <parquet.version>1.6.0rc3</parquet.version>
+    <parquet.version>1.6.0rc6</parquet.version>
     <pig.version>0.12.0</pig.version>
     <protobuf.version>2.5.0</protobuf.version>
     <stax.version>1.0.1</stax.version>

Modified: hive/branches/llap/ql/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/pom.xml?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/ql/pom.xml (original)
+++ hive/branches/llap/ql/pom.xml Wed Apr  1 01:15:50 2015
@@ -168,6 +168,11 @@
       <version>${libfb303.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.ivy</groupId>
+      <artifactId>ivy</artifactId>
+      <version>${ivy.version}</version>
+    </dependency>
+    <dependency>
       <groupId>org.apache.thrift</groupId>
       <artifactId>libthrift</artifactId>
       <version>${libthrift.version}</version>

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/DataWritableRecordConverter.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/DataWritableRecordConverter.java?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/DataWritableRecordConverter.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/DataWritableRecordConverter.java Wed Apr  1 01:15:50 2015
@@ -18,7 +18,6 @@ import org.apache.hadoop.io.ArrayWritabl
 import parquet.io.api.GroupConverter;
 import parquet.io.api.RecordMaterializer;
 import parquet.schema.GroupType;
-import parquet.schema.MessageType;
 import parquet.schema.MessageTypeParser;
 
 import java.util.Map;
@@ -34,7 +33,7 @@ public class DataWritableRecordConverter
 
   public DataWritableRecordConverter(final GroupType requestedSchema, final Map<String, String> metadata) {
     this.root = new HiveStructConverter(requestedSchema,
-      MessageTypeParser.parseMessageType(metadata.get(DataWritableReadSupport.HIVE_SCHEMA_KEY)), metadata);
+      MessageTypeParser.parseMessageType(metadata.get(DataWritableReadSupport.HIVE_TABLE_AS_PARQUET_SCHEMA)), metadata);
   }
 
   @Override

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java Wed Apr  1 01:15:50 2015
@@ -16,6 +16,7 @@ package org.apache.hadoop.hive.ql.io.par
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
+import java.util.ListIterator;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
@@ -24,17 +25,21 @@ import org.apache.hadoop.hive.ql.io.IOCo
 import org.apache.hadoop.hive.ql.io.parquet.convert.DataWritableRecordConverter;
 import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.io.ArrayWritable;
 import org.apache.hadoop.util.StringUtils;
 
-import parquet.column.ColumnDescriptor;
+import parquet.hadoop.api.InitContext;
 import parquet.hadoop.api.ReadSupport;
 import parquet.io.api.RecordMaterializer;
+import parquet.schema.GroupType;
 import parquet.schema.MessageType;
-import parquet.schema.PrimitiveType;
-import parquet.schema.PrimitiveType.PrimitiveTypeName;
 import parquet.schema.Type;
-import parquet.schema.Type.Repetition;
+import parquet.schema.Types;
+import parquet.schema.PrimitiveType.PrimitiveTypeName;
 
 /**
  *
@@ -45,8 +50,7 @@ import parquet.schema.Type.Repetition;
  */
 public class DataWritableReadSupport extends ReadSupport<ArrayWritable> {
 
-  private static final String TABLE_SCHEMA = "table_schema";
-  public static final String HIVE_SCHEMA_KEY = "HIVE_TABLE_SCHEMA";
+  public static final String HIVE_TABLE_AS_PARQUET_SCHEMA = "HIVE_TABLE_SCHEMA";
   public static final String PARQUET_COLUMN_INDEX_ACCESS = "parquet.column.index.access";
 
   /**
@@ -56,80 +60,176 @@ public class DataWritableReadSupport ext
    * @param columns comma separated list of columns
    * @return list with virtual columns removed
    */
-  private static List<String> getColumns(final String columns) {
+  private static List<String> getColumnNames(final String columns) {
     return (List<String>) VirtualColumn.
         removeVirtualColumns(StringUtils.getStringCollection(columns));
   }
 
   /**
+   * Returns a list of TypeInfo objects from a string which contains column
+   * types strings.
    *
-   * It creates the readContext for Parquet side with the requested schema during the init phase.
+   * @param types Comma separated list of types
+   * @return A list of TypeInfo objects.
+   */
+  private static List<TypeInfo> getColumnTypes(final String types) {
+    return TypeInfoUtils.getTypeInfosFromTypeString(types);
+  }
+
+  /**
+   * Searchs for a fieldName into a parquet GroupType by ignoring string case.
+   * GroupType#getType(String fieldName) is case sensitive, so we use this method.
    *
-   * @param configuration needed to get the wanted columns
-   * @param keyValueMetaData // unused
-   * @param fileSchema parquet file schema
-   * @return the parquet ReadContext
+   * @param groupType Group of field types where to search for fieldName
+   * @param fieldName The field what we are searching
+   * @return The Type object of the field found; null otherwise.
    */
-  @Override
-  public parquet.hadoop.api.ReadSupport.ReadContext init(final Configuration configuration,
-      final Map<String, String> keyValueMetaData, final MessageType fileSchema) {
-    final String columns = configuration.get(IOConstants.COLUMNS);
-    final Map<String, String> contextMetadata = new HashMap<String, String>();
-    final boolean indexAccess = configuration.getBoolean(PARQUET_COLUMN_INDEX_ACCESS, false);
-    if (columns != null) {
-      final List<String> listColumns = getColumns(columns);
-      final Map<String, String> lowerCaseFileSchemaColumns = new HashMap<String,String>();
-      for (ColumnDescriptor c : fileSchema.getColumns()) {
-        lowerCaseFileSchemaColumns.put(c.getPath()[0].toLowerCase(), c.getPath()[0]);
+  private static Type getFieldTypeIgnoreCase(GroupType groupType, String fieldName) {
+    for (Type type : groupType.getFields()) {
+      if (type.getName().equalsIgnoreCase(fieldName)) {
+        return type;
       }
-      final List<Type> typeListTable = new ArrayList<Type>();
-      if(indexAccess) {
-        for (int index = 0; index < listColumns.size(); index++) {
-          //Take columns based on index or pad the field
-          if(index < fileSchema.getFieldCount()) {
-            typeListTable.add(fileSchema.getType(index));
-          } else {
-            //prefixing with '_mask_' to ensure no conflict with named
-            //columns in the file schema
-            typeListTable.add(new PrimitiveType(Repetition.OPTIONAL, PrimitiveTypeName.BINARY, "_mask_"+listColumns.get(index)));
+    }
+
+    return null;
+  }
+
+  /**
+   * Searchs column names by name on a given Parquet schema, and returns its corresponded
+   * Parquet schema types.
+   *
+   * @param schema Group schema where to search for column names.
+   * @param colNames List of column names.
+   * @param colTypes List of column types.
+   * @return List of GroupType objects of projected columns.
+   */
+  private static List<Type> getProjectedGroupFields(GroupType schema, List<String> colNames, List<TypeInfo> colTypes) {
+    List<Type> schemaTypes = new ArrayList<Type>();
+
+    ListIterator columnIterator = colNames.listIterator();
+    while (columnIterator.hasNext()) {
+      TypeInfo colType = colTypes.get(columnIterator.nextIndex());
+      String colName = (String) columnIterator.next();
+
+      Type fieldType = getFieldTypeIgnoreCase(schema, colName);
+      if (fieldType != null) {
+        if (colType.getCategory() == ObjectInspector.Category.STRUCT) {
+          if (fieldType.isPrimitive()) {
+            throw new IllegalStateException("Invalid schema data type, found: PRIMITIVE, expected: STRUCT");
           }
+
+          GroupType groupFieldType = fieldType.asGroupType();
+
+          List<Type> groupFields = getProjectedGroupFields(
+              groupFieldType,
+              ((StructTypeInfo) colType).getAllStructFieldNames(),
+              ((StructTypeInfo) colType).getAllStructFieldTypeInfos()
+          );
+
+          Type[] typesArray = groupFields.toArray(new Type[0]);
+          schemaTypes.add(Types.buildGroup(groupFieldType.getRepetition())
+              .addFields(typesArray)
+              .named(fieldType.getName())
+          );
+        } else {
+          schemaTypes.add(fieldType);
         }
       } else {
-        for (String col : listColumns) {
-          col = col.toLowerCase();
-          // listColumns contains partition columns which are metadata only
-          if (lowerCaseFileSchemaColumns.containsKey(col)) {
-            typeListTable.add(fileSchema.getType(lowerCaseFileSchemaColumns.get(col)));
-          } else {
-            // below allows schema evolution
-            typeListTable.add(new PrimitiveType(Repetition.OPTIONAL, PrimitiveTypeName.BINARY, col));
-          }
-        }
+        // Add type for schema evolution
+        schemaTypes.add(Types.optional(PrimitiveTypeName.BINARY).named(colName));
       }
-      MessageType tableSchema = new MessageType(TABLE_SCHEMA, typeListTable);
-      contextMetadata.put(HIVE_SCHEMA_KEY, tableSchema.toString());
+    }
+
+    return schemaTypes;
+  }
+
+  /**
+   * Searchs column names by name on a given Parquet message schema, and returns its projected
+   * Parquet schema types.
+   *
+   * @param schema Message type schema where to search for column names.
+   * @param colNames List of column names.
+   * @param colTypes List of column types.
+   * @return A MessageType object of projected columns.
+   */
+  private static MessageType getSchemaByName(MessageType schema, List<String> colNames, List<TypeInfo> colTypes) {
+    List<Type> projectedFields = getProjectedGroupFields(schema, colNames, colTypes);
+    Type[] typesArray = projectedFields.toArray(new Type[0]);
+
+    return Types.buildMessage()
+        .addFields(typesArray)
+        .named(schema.getName());
+  }
 
-      final List<Integer> indexColumnsWanted = ColumnProjectionUtils.getReadColumnIDs(configuration);
+  /**
+   * Searchs column names by index on a given Parquet file schema, and returns its corresponded
+   * Parquet schema types.
+   *
+   * @param schema Message schema where to search for column names.
+   * @param colNames List of column names.
+   * @param colIndexes List of column indexes.
+   * @return A MessageType object of the column names found.
+   */
+  private static MessageType getSchemaByIndex(MessageType schema, List<String> colNames, List<Integer> colIndexes) {
+    List<Type> schemaTypes = new ArrayList<Type>();
 
-      final List<Type> typeListWanted = new ArrayList<Type>();
+    for (Integer i : colIndexes) {
+      if (i < colNames.size()) {
+        if (i < schema.getFieldCount()) {
+          schemaTypes.add(schema.getType(i));
+        } else {
+          //prefixing with '_mask_' to ensure no conflict with named
+          //columns in the file schema
+          schemaTypes.add(Types.optional(PrimitiveTypeName.BINARY).named("_mask_" + colNames.get(i)));
+        }
+      }
+    }
 
-      for (final Integer idx : indexColumnsWanted) {
-        if (idx < listColumns.size()) {
-          String col = listColumns.get(idx);
-          if (indexAccess) {
-              typeListWanted.add(fileSchema.getFields().get(idx));
-          } else {
-            col = col.toLowerCase();
-            if (lowerCaseFileSchemaColumns.containsKey(col)) {
-              typeListWanted.add(tableSchema.getType(lowerCaseFileSchemaColumns.get(col)));
-            }
-          }
+    return new MessageType(schema.getName(), schemaTypes);
+  }
+
+  /**
+   * It creates the readContext for Parquet side with the requested schema during the init phase.
+   *
+   * @param context
+   * @return the parquet ReadContext
+   */
+  @Override
+  public parquet.hadoop.api.ReadSupport.ReadContext init(InitContext context) {
+    Configuration configuration = context.getConfiguration();
+    MessageType fileSchema = context.getFileSchema();
+    String columnNames = configuration.get(IOConstants.COLUMNS);
+    Map<String, String> contextMetadata = new HashMap<String, String>();
+    boolean indexAccess = configuration.getBoolean(PARQUET_COLUMN_INDEX_ACCESS, false);
+
+    if (columnNames != null) {
+      List<String> columnNamesList = getColumnNames(columnNames);
+
+      MessageType tableSchema;
+      if (indexAccess) {
+        List<Integer> indexSequence = new ArrayList<Integer>();
+
+        // Generates a sequence list of indexes
+        for(int i = 0; i < columnNamesList.size(); i++) {
+          indexSequence.add(i);
         }
+
+        tableSchema = getSchemaByIndex(fileSchema, columnNamesList, indexSequence);
+      } else {
+        String columnTypes = configuration.get(IOConstants.COLUMNS_TYPES);
+        List<TypeInfo> columnTypesList = getColumnTypes(columnTypes);
+
+        tableSchema = getSchemaByName(fileSchema, columnNamesList, columnTypesList);
       }
-      MessageType requestedSchemaByUser = new MessageType(fileSchema.getName(), typeListWanted);
+
+      contextMetadata.put(HIVE_TABLE_AS_PARQUET_SCHEMA, tableSchema.toString());
+
+      List<Integer> indexColumnsWanted = ColumnProjectionUtils.getReadColumnIDs(configuration);
+      MessageType requestedSchemaByUser = getSchemaByIndex(tableSchema, columnNamesList, indexColumnsWanted);
+
       return new ReadContext(requestedSchemaByUser, contextMetadata);
     } else {
-      contextMetadata.put(HIVE_SCHEMA_KEY, fileSchema.toString());
+      contextMetadata.put(HIVE_TABLE_AS_PARQUET_SCHEMA, fileSchema.toString());
       return new ReadContext(fileSchema, contextMetadata);
     }
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/ParquetRecordReaderWrapper.java Wed Apr  1 01:15:50 2015
@@ -15,7 +15,12 @@ package org.apache.hadoop.hive.ql.io.par
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -42,6 +47,7 @@ import parquet.filter2.predicate.FilterP
 import parquet.hadoop.ParquetFileReader;
 import parquet.hadoop.ParquetInputFormat;
 import parquet.hadoop.ParquetInputSplit;
+import parquet.hadoop.api.InitContext;
 import parquet.hadoop.api.ReadSupport.ReadContext;
 import parquet.hadoop.metadata.BlockMetaData;
 import parquet.hadoop.metadata.FileMetaData;
@@ -243,10 +249,10 @@ public class ParquetRecordReaderWrapper
       final List<BlockMetaData> blocks = parquetMetadata.getBlocks();
       final FileMetaData fileMetaData = parquetMetadata.getFileMetaData();
 
-      final ReadContext readContext = new DataWritableReadSupport()
-          .init(jobConf, fileMetaData.getKeyValueMetaData(), fileMetaData.getSchema());
+      final ReadContext readContext = new DataWritableReadSupport().init(new InitContext(jobConf,
+          null, fileMetaData.getSchema()));
       schemaSize = MessageTypeParser.parseMessageType(readContext.getReadSupportMetadata()
-          .get(DataWritableReadSupport.HIVE_SCHEMA_KEY)).getFieldCount();
+          .get(DataWritableReadSupport.HIVE_TABLE_AS_PARQUET_SCHEMA)).getFieldCount();
       final List<BlockMetaData> splitGroup = new ArrayList<BlockMetaData>();
       final long splitStart = ((FileSplit) oldSplit).getStart();
       final long splitLength = ((FileSplit) oldSplit).getLength();

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java Wed Apr  1 01:15:50 2015
@@ -645,30 +645,30 @@ public final class ColumnPrunerProcFacto
       // get the SEL(*) branch
       Operator<?> select = op.getChildOperators().get(LateralViewJoinOperator.SELECT_TAG);
 
+      // Update the info of SEL operator based on the pruned reordered columns
       // these are from ColumnPrunerSelectProc
       List<String> cols = cppCtx.getPrunedColList(select);
       RowSchema rs = op.getSchema();
-      if (rs.getSignature().size() != cols.size()) {
-        ArrayList<ExprNodeDesc> colList = new ArrayList<ExprNodeDesc>();
-        ArrayList<String> outputColNames = new ArrayList<String>();
-        for (String col : cols) {
-          // revert output cols of SEL(*) to ExprNodeColumnDesc
-          ColumnInfo colInfo = rs.getColumnInfo(col);
-          ExprNodeColumnDesc colExpr = new ExprNodeColumnDesc(colInfo);
-          colList.add(colExpr);
-          outputColNames.add(col);
-        }
-        // replace SEL(*) to SEL(exprs)
-        ((SelectDesc)select.getConf()).setSelStarNoCompute(false);
-        ((SelectDesc)select.getConf()).setColList(colList);
-        ((SelectDesc)select.getConf()).setOutputColumnNames(outputColNames);
-        pruneOperator(ctx, select, outputColNames);
-        
-        Operator<?> udtfPath = op.getChildOperators().get(LateralViewJoinOperator.UDTF_TAG);
-        List<String> lvFCols = new ArrayList<String>(cppCtx.getPrunedColLists().get(udtfPath));
-        lvFCols = Utilities.mergeUniqElems(lvFCols, outputColNames);
-        pruneOperator(ctx, op, lvFCols);
+      ArrayList<ExprNodeDesc> colList = new ArrayList<ExprNodeDesc>();
+      ArrayList<String> outputColNames = new ArrayList<String>();
+      for (String col : cols) {
+        // revert output cols of SEL(*) to ExprNodeColumnDesc
+        ColumnInfo colInfo = rs.getColumnInfo(col);
+        ExprNodeColumnDesc colExpr = new ExprNodeColumnDesc(colInfo);
+        colList.add(colExpr);
+        outputColNames.add(col);
       }
+      // replace SEL(*) to SEL(exprs)
+      ((SelectDesc)select.getConf()).setSelStarNoCompute(false);
+      ((SelectDesc)select.getConf()).setColList(colList);
+      ((SelectDesc)select.getConf()).setOutputColumnNames(outputColNames);
+      pruneOperator(ctx, select, outputColNames);
+      
+      Operator<?> udtfPath = op.getChildOperators().get(LateralViewJoinOperator.UDTF_TAG);
+      List<String> lvFCols = new ArrayList<String>(cppCtx.getPrunedColLists().get(udtfPath));
+      lvFCols = Utilities.mergeUniqElems(lvFCols, outputColNames);
+      pruneOperator(ctx, op, lvFCols);
+      
       return null;
     }
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java Wed Apr  1 01:15:50 2015
@@ -517,16 +517,17 @@ public final class ConstantPropagateProc
       if (PrimitiveObjectInspectorUtils.isPrimitiveWritableClass(clz)) {
         PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
         TypeInfo typeInfo = poi.getTypeInfo();
-
-        // Handling parameterized types (varchar, decimal, etc).
-        if (typeInfo.getTypeName().contains(serdeConstants.DECIMAL_TYPE_NAME)
-            || typeInfo.getTypeName().contains(serdeConstants.VARCHAR_TYPE_NAME)
+        // Handling parameterized types (varchar etc).
+        if (typeInfo.getTypeName().contains(serdeConstants.VARCHAR_TYPE_NAME)
             || typeInfo.getTypeName().contains(serdeConstants.CHAR_TYPE_NAME)) {
 
           // Do not support parameterized types.
           return null;
         }
         o = poi.getPrimitiveJavaObject(o);
+        if (typeInfo.getTypeName().contains(serdeConstants.DECIMAL_TYPE_NAME)) {
+          return new ExprNodeConstantDesc(typeInfo, o);
+        }
       } else if (PrimitiveObjectInspectorUtils.isPrimitiveJavaClass(clz)) {
 
       } else {

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/HiveCalciteUtil.java Wed Apr  1 01:15:50 2015
@@ -32,8 +32,16 @@ import org.apache.calcite.rel.core.RelFa
 import org.apache.calcite.rel.core.Sort;
 import org.apache.calcite.rel.type.RelDataTypeField;
 import org.apache.calcite.rex.RexBuilder;
+import org.apache.calcite.rex.RexCall;
+import org.apache.calcite.rex.RexCorrelVariable;
+import org.apache.calcite.rex.RexDynamicParam;
+import org.apache.calcite.rex.RexFieldAccess;
 import org.apache.calcite.rex.RexInputRef;
+import org.apache.calcite.rex.RexLiteral;
+import org.apache.calcite.rex.RexLocalRef;
 import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.rex.RexOver;
+import org.apache.calcite.rex.RexRangeRef;
 import org.apache.calcite.rex.RexVisitor;
 import org.apache.calcite.rex.RexVisitorImpl;
 import org.apache.calcite.sql.SqlKind;
@@ -535,6 +543,7 @@ public class HiveCalciteUtil {
     boolean deterministic = true;
 
     RexVisitor<Void> visitor = new RexVisitorImpl<Void>(true) {
+      @Override
       public Void visitCall(org.apache.calcite.rex.RexCall call) {
         if (!call.getOperator().isDeterministic()) {
           throw new Util.FoundOne(call);
@@ -551,4 +560,59 @@ public class HiveCalciteUtil {
 
     return deterministic;
   }
+
+  /**
+   * Walks over an expression and determines whether it is constant.
+   */
+  public static class ConstantFinder implements RexVisitor<Boolean> {
+
+    @Override
+    public Boolean visitLiteral(RexLiteral literal) {
+      return true;
+    }
+
+    @Override
+    public Boolean visitInputRef(RexInputRef inputRef) {
+      return false;
+    }
+
+    @Override
+    public Boolean visitLocalRef(RexLocalRef localRef) {
+      throw new RuntimeException("Not expected to be called.");
+    }
+
+    @Override
+    public Boolean visitOver(RexOver over) {
+      return false;
+    }
+
+    @Override
+    public Boolean visitCorrelVariable(RexCorrelVariable correlVariable) {
+      return false;
+    }
+
+    @Override
+    public Boolean visitDynamicParam(RexDynamicParam dynamicParam) {
+      return false;
+    }
+
+    @Override
+    public Boolean visitCall(RexCall call) {
+      // Constant if operator is deterministic and all operands are
+      // constant.
+      return call.getOperator().isDeterministic()
+          && RexVisitorImpl.visitArrayAnd(this, call.getOperands());
+    }
+
+    @Override
+    public Boolean visitRangeRef(RexRangeRef rangeRef) {
+      return false;
+    }
+
+    @Override
+    public Boolean visitFieldAccess(RexFieldAccess fieldAccess) {
+      // "<expr>.FIELD" is constant iff "<expr>" is constant.
+      return fieldAccess.getReferenceExpr().accept(this);
+    }
+  }
 }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/PlanModifierForASTConv.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/PlanModifierForASTConv.java?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/PlanModifierForASTConv.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/PlanModifierForASTConv.java Wed Apr  1 01:15:50 2015
@@ -38,6 +38,9 @@ import org.apache.calcite.rel.core.Sort;
 import org.apache.calcite.rel.rules.MultiJoin;
 import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rex.RexBuilder;
+import org.apache.calcite.rex.RexCall;
+import org.apache.calcite.rex.RexInputRef;
 import org.apache.calcite.rex.RexNode;
 import org.apache.calcite.sql.SqlAggFunction;
 import org.apache.calcite.sql.SqlKind;
@@ -168,7 +171,27 @@ public class PlanModifierForASTConv {
     ImmutableMap.Builder<Integer, RexNode> inputRefToCallMapBldr = ImmutableMap.builder();
     for (int i = resultSchema.size(); i < rt.getFieldCount(); i++) {
       if (collationInputRefs.contains(i)) {
-        inputRefToCallMapBldr.put(i, obChild.getChildExps().get(i));
+        RexNode obyExpr = obChild.getChildExps().get(i);
+        if (obyExpr instanceof RexCall) {
+          int a = -1;
+          List<RexNode> operands = new ArrayList<>();
+          for (int k = 0; k< ((RexCall) obyExpr).operands.size(); k++) {
+            RexNode rn = ((RexCall) obyExpr).operands.get(k);
+            for (int j = 0; j < resultSchema.size(); j++) {
+              if( obChild.getChildExps().get(j).toString().equals(rn.toString())) {
+                a = j;
+                break;
+              }
+            } if (a != -1) {
+              operands.add(new RexInputRef(a, rn.getType()));
+            } else {
+              operands.add(rn);
+            }
+            a = -1;
+          }
+          obyExpr = obChild.getCluster().getRexBuilder().makeCall(((RexCall)obyExpr).getOperator(), operands);
+        }
+        inputRefToCallMapBldr.put(i, obyExpr);
       }
     }
     ImmutableMap<Integer, RexNode> inputRefToCallMap = inputRefToCallMapBldr.build();
@@ -266,7 +289,7 @@ public class PlanModifierForASTConv {
     RelNode select = introduceDerivedTable(rel);
 
     parent.replaceInput(pos, select);
-    
+
     return select;
   }
 
@@ -352,7 +375,7 @@ public class PlanModifierForASTConv {
 
     return validChild;
   }
-  
+
   private static boolean isEmptyGrpAggr(RelNode gbNode) {
     // Verify if both groupset and aggrfunction are empty)
     Aggregate aggrnode = (Aggregate) gbNode;
@@ -361,12 +384,12 @@ public class PlanModifierForASTConv {
     }
     return false;
   }
-  
+
   private static void replaceEmptyGroupAggr(final RelNode rel, RelNode parent) {
     // If this function is called, the parent should only include constant
     List<RexNode> exps = parent.getChildExps();
     for (RexNode rexNode : exps) {
-      if (rexNode.getKind() != SqlKind.LITERAL) {
+      if (!rexNode.accept(new HiveCalciteUtil.ConstantFinder())) {
         throw new RuntimeException("We expect " + parent.toString()
             + " to contain only constants. However, " + rexNode.toString() + " is "
             + rexNode.getKind());
@@ -377,7 +400,7 @@ public class PlanModifierForASTConv {
     RelDataType longType = TypeConverter.convert(TypeInfoFactory.longTypeInfo, typeFactory);
     RelDataType intType = TypeConverter.convert(TypeInfoFactory.intTypeInfo, typeFactory);
     // Create the dummy aggregation.
-    SqlAggFunction countFn = (SqlAggFunction) SqlFunctionConverter.getCalciteAggFn("count",
+    SqlAggFunction countFn = SqlFunctionConverter.getCalciteAggFn("count",
         ImmutableList.of(intType), longType);
     // TODO: Using 0 might be wrong; might need to walk down to find the
     // proper index of a dummy.

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java?rev=1670534&r1=1670533&r2=1670534&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java Wed Apr  1 01:15:50 2015
@@ -69,6 +69,7 @@ import org.apache.calcite.rel.rules.Filt
 import org.apache.calcite.rel.rules.JoinPushTransitivePredicatesRule;
 import org.apache.calcite.rel.rules.JoinToMultiJoinRule;
 import org.apache.calcite.rel.rules.LoptOptimizeJoinRule;
+import org.apache.calcite.rel.rules.ProjectMergeRule;
 import org.apache.calcite.rel.rules.ProjectRemoveRule;
 import org.apache.calcite.rel.rules.ReduceExpressionsRule;
 import org.apache.calcite.rel.rules.SemiJoinFilterTransposeRule;
@@ -721,6 +722,7 @@ public class CalcitePlanner extends Sema
       hepPgmBldr.addRuleInstance(ReduceExpressionsRule.PROJECT_INSTANCE);
       hepPgmBldr.addRuleInstance(ProjectRemoveRule.INSTANCE);
       hepPgmBldr.addRuleInstance(UnionMergeRule.INSTANCE);
+      hepPgmBldr.addRuleInstance(new ProjectMergeRule(false, HiveProject.DEFAULT_PROJECT_FACTORY));
 
       hepPgm = hepPgmBldr.build();
       HepPlanner hepPlanner = new HepPlanner(hepPgm);
@@ -785,8 +787,6 @@ public class CalcitePlanner extends Sema
       // 3. Transitive inference & Partition Pruning
       basePlan = hepPlan(basePlan, false, mdProvider, new JoinPushTransitivePredicatesRule(
           Join.class, HiveFilter.DEFAULT_FILTER_FACTORY),
-      // TODO: Enable it after CALCITE-407 is fixed
-      // RemoveTrivialProjectRule.INSTANCE,
           new HivePartitionPruneRule(conf));
 
       // 4. Projection Pruning



Mime
View raw message