hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gunt...@apache.org
Subject svn commit: r1551769 [1/6] - in /hive/branches/tez: ant/src/org/apache/hadoop/hive/ant/ common/src/java/org/apache/hadoop/hive/conf/ data/conf/tez/ itests/qtest/ itests/util/src/main/java/org/apache/hadoop/hive/ql/ ql/src/test/results/clientpositive/ q...
Date Tue, 17 Dec 2013 23:55:00 GMT
Author: gunther
Date: Tue Dec 17 23:54:59 2013
New Revision: 1551769

URL: http://svn.apache.org/r1551769
Log:
HIVE-5065 (part 2): Create proper (i.e.: non .q file based) junit tests for DagUtils and TezTask (Gunther Hagleitner)

Added:
    hive/branches/tez/data/conf/tez/
    hive/branches/tez/data/conf/tez/hive-site.xml   (with props)
    hive/branches/tez/ql/src/test/results/clientpositive/tez/
    hive/branches/tez/ql/src/test/results/clientpositive/tez/auto_join0.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez/auto_join1.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez/bucket2.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez/bucket3.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez/bucket4.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez/count.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez/create_merge_compressed.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez/cross_join.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez/ctas.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez/custom_input_output_format.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez/join0.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez/join1.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez/mrr.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez/tez_dml.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez/tez_insert_overwrite_local_directory_1.q.out   (with props)
    hive/branches/tez/ql/src/test/results/clientpositive/tez/tez_join_tests.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez/tez_joins_explain.q.out
Removed:
    hive/branches/tez/ql/src/test/results/clientpositive/mrr.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez_dml.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez_insert_overwrite_local_directory_1.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez_join_tests.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/tez_joins_explain.q.out
Modified:
    hive/branches/tez/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
    hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/branches/tez/itests/qtest/pom.xml
    hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
    hive/branches/tez/ql/src/test/templates/TestCliDriver.vm

Modified: hive/branches/tez/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java?rev=1551769&r1=1551768&r2=1551769&view=diff
==============================================================================
--- hive/branches/tez/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java (original)
+++ hive/branches/tez/ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java Tue Dec 17 23:54:59 2013
@@ -135,6 +135,8 @@ public class QTestGenTask extends Task {
 
   private String clusterMode;
 
+  private String hiveConfDir;
+
   private String runDisabled;
   
   private String hadoopVersion;
@@ -146,6 +148,14 @@ public class QTestGenTask extends Task {
   public String getHadoopVersion() {
     return hadoopVersion;
   }
+
+  public void setHiveConfDir(String hiveConfDir) {
+    this.hiveConfDir = hiveConfDir;
+  }
+
+  public String getHiveConfDir() {
+    return hiveConfDir;
+  }
   
   public void setClusterMode(String clusterMode) {
     this.clusterMode = clusterMode;
@@ -414,6 +424,9 @@ public class QTestGenTask extends Task {
       if (hadoopVersion == null) {
         hadoopVersion = "";
       }
+      if (hiveConfDir == null) {
+        hiveConfDir = "";
+      }
 
       // For each of the qFiles generate the test
       VelocityContext ctx = new VelocityContext();
@@ -425,6 +438,7 @@ public class QTestGenTask extends Task {
       ctx.put("resultsDir", relativePath(hiveRootDir, resultsDir));
       ctx.put("logDir", relativePath(hiveRootDir, logDir));
       ctx.put("clusterMode", clusterMode);
+      ctx.put("hiveConfDir", hiveConfDir);
       ctx.put("hadoopVersion", hadoopVersion);
 
       File outFile = new File(outDir, className + ".java");

Modified: hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1551769&r1=1551768&r2=1551769&view=diff
==============================================================================
--- hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Tue Dec 17 23:54:59 2013
@@ -1337,7 +1337,11 @@ public class HiveConf extends Configurat
     return hiveDefaultURL;
   }
 
-  public URL getHiveSiteLocation() {
+  public static void setHiveSiteLocation(URL location) {
+    hiveSiteURL = location;
+  }
+
+  public static URL getHiveSiteLocation() {
     return hiveSiteURL;
   }
 

Added: hive/branches/tez/data/conf/tez/hive-site.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/data/conf/tez/hive-site.xml?rev=1551769&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hive/branches/tez/data/conf/tez/hive-site.xml
------------------------------------------------------------------------------
    svn:mime-type = application/xml

Modified: hive/branches/tez/itests/qtest/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/itests/qtest/pom.xml?rev=1551769&r1=1551768&r2=1551769&view=diff
==============================================================================
--- hive/branches/tez/itests/qtest/pom.xml (original)
+++ hive/branches/tez/itests/qtest/pom.xml Tue Dec 17 23:54:59 2013
@@ -39,6 +39,7 @@
     <minimr.query.files>stats_counter_partitioned.q,list_bucket_dml_10.q,input16_cc.q,scriptfile1.q,scriptfile1_win.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q,bucketmapjoin7.q,optrstat_groupby.q,bucket_num_reducers.q,bucket5.q,load_fs2.q,bucket_num_reducers2.q,infer_bucket_sort_merge.q,infer_bucket_sort_reducers_power_two.q,infer_bucket_sort_dyn_part.q,infer_bucket_sort_bucketed_table.q,infer_bucket_sort_map_operators.q,infer_bucket_sort_num_buckets.q,leftsemijoin_mr.q,schemeAuthority.q,schemeAuthority2.q,truncate_column_buckets.q,remote_script.q,,load_hdfs_file_with_space_in_the_name.q,parallel_orderby.q,import_exported_table.q,stats_counter.q</minimr.query.files>
     <minimr.query.negative.files>cluster_tasklog_retrieval.q,minimr_broken_pipe.q,mapreduce_stack_trace.q,mapreduce_stack_trace_turnoff.q,mapreduce_stack_trace_hadoop20.q,mapreduce_stack_trace_turnoff_hadoop20.q</minimr.query.negative.files>
     <minitez.query.files>tez_join_tests.q,tez_joins_explain.q,mrr.q,tez_dml.q,tez_insert_overwrite_local_directory_1.q</minitez.query.files>
+    <minitez.query.files.shared>join0.q,join1.q,auto_join0.q,auto_join1.q,bucket2.q,bucket3.q,bucket4.q,count.q,create_merge_compressed.q,cross_join.q,ctas.q,custom_input_output_format.q</minitez.query.files.shared>
     <beeline.positive.exclude>add_part_exist.q,alter1.q,alter2.q,alter4.q,alter5.q,alter_rename_partition.q,alter_rename_partition_authorization.q,archive.q,archive_corrupt.q,archive_multi.q,archive_mr_1806.q,archive_multi_mr_1806.q,authorization_1.q,authorization_2.q,authorization_4.q,authorization_5.q,authorization_6.q,authorization_7.q,ba_table1.q,ba_table2.q,ba_table3.q,ba_table_udfs.q,binary_table_bincolserde.q,binary_table_colserde.q,cluster.q,columnarserde_create_shortcut.q,combine2.q,constant_prop.q,create_nested_type.q,create_or_replace_view.q,create_struct_table.q,create_union_table.q,database.q,database_location.q,database_properties.q,ddltime.q,describe_database_json.q,drop_database_removes_partition_dirs.q,escape1.q,escape2.q,exim_00_nonpart_empty.q,exim_01_nonpart.q,exim_02_00_part_empty.q,exim_02_part.q,exim_03_nonpart_over_compat.q,exim_04_all_part.q,exim_04_evolved_parts.q,exim_05_some_part.q,exim_06_one_part.q,exim_07_all_part_over_nonoverlap.q,exim_08_nonpart_rena
 me.q,exim_09_part_spec_nonoverlap.q,exim_10_external_managed.q,exim_11_managed_external.q,exim_12_external_location.q,exim_13_managed_location.q,exim_14_managed_location_over_existing.q,exim_15_external_part.q,exim_16_part_external.q,exim_17_part_managed.q,exim_18_part_external.q,exim_19_00_part_external_location.q,exim_19_part_external_location.q,exim_20_part_managed_location.q,exim_21_export_authsuccess.q,exim_22_import_exist_authsuccess.q,exim_23_import_part_authsuccess.q,exim_24_import_nonexist_authsuccess.q,global_limit.q,groupby_complex_types.q,groupby_complex_types_multi_single_reducer.q,index_auth.q,index_auto.q,index_auto_empty.q,index_bitmap.q,index_bitmap1.q,index_bitmap2.q,index_bitmap3.q,index_bitmap_auto.q,index_bitmap_rc.q,index_compact.q,index_compact_1.q,index_compact_2.q,index_compact_3.q,index_stale_partitioned.q,init_file.q,input16.q,input16_cc.q,input46.q,input_columnarserde.q,input_dynamicserde.q,input_lazyserde.q,input_testxpath3.q,input_testxpath4.q,insert2_o
 verwrite_partitions.q,insertexternal1.q,join_thrift.q,lateral_view.q,load_binary_data.q,load_exist_part_authsuccess.q,load_nonpart_authsuccess.q,load_part_authsuccess.q,loadpart_err.q,lock1.q,lock2.q,lock3.q,lock4.q,merge_dynamic_partition.q,multi_insert.q,multi_insert_move_tasks_share_dependencies.q,null_column.q,ppd_clusterby.q,query_with_semi.q,rename_column.q,sample6.q,sample_islocalmode_hook.q,set_processor_namespaces.q,show_tables.q,source.q,split_sample.q,str_to_map.q,transform1.q,udaf_collect_set.q,udaf_context_ngrams.q,udaf_histogram_numeric.q,udaf_ngrams.q,udaf_percentile_approx.q,udf_array.q,udf_bitmap_and.q,udf_bitmap_or.q,udf_explode.q,udf_format_number.q,udf_map.q,udf_map_keys.q,udf_map_values.q,udf_max.q,udf_min.q,udf_named_struct.q,udf_percentile.q,udf_printf.q,udf_sentences.q,udf_sort_array.q,udf_split.q,udf_struct.q,udf_substr.q,udf_translate.q,udf_union.q,udf_xpath.q,udtf_stack.q,view.q,virtual_column.q</beeline.positive.exclude>
   </properties>
 
@@ -444,11 +445,12 @@
                               templatePath="${basedir}/${hive.path.to.root}/ql/src/test/templates/" template="TestCliDriver.vm"
                               queryDirectory="${basedir}/${hive.path.to.root}/ql/src/test/queries/clientpositive/"
                               queryFile="${qfile}"
-                              includeQueryFile="${minitez.query.files}"
+                              includeQueryFile="${minitez.query.files},${minitez.query.files.shared}"
                               queryFileRegex="${qfile_regex}"
                               clusterMode="tez"
                               runDisabled="${run_disabled}"
-                              resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/" 
+                              hiveConfDir="${basedir}/${hive.path.to.root}/data/conf/tez"
+                              resultsDirectory="${basedir}/${hive.path.to.root}/ql/src/test/results/clientpositive/tez" 
                               className="TestMiniTezCliDriver"
                               logFile="${project.build.directory}/testminitezclidrivergen.log"
                               logDirectory="${project.build.directory}/qfile-results/clientpositive/"

Modified: hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1551769&r1=1551768&r2=1551769&view=diff
==============================================================================
--- hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java Tue Dec 17 23:54:59 2013
@@ -35,6 +35,7 @@ import java.io.PrintStream;
 import java.io.Serializable;
 import java.io.StringWriter;
 import java.io.UnsupportedEncodingException;
+import java.net.URL;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -207,7 +208,7 @@ public class QTestUtil {
   }
 
   public QTestUtil(String outDir, String logDir) throws Exception {
-    this(outDir, logDir, MiniClusterType.none, "0.20");
+    this(outDir, logDir, MiniClusterType.none, null, "0.20");
   }
 
   public String getOutputDirectory() {
@@ -315,10 +316,20 @@ public class QTestUtil {
     }
   }
 
-  public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, String hadoopVer)
+  public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, String hadoopVer) 
+    throws Exception {
+    this(outDir, logDir, clusterType, null, hadoopVer);
+  }
+
+  public QTestUtil(String outDir, String logDir, MiniClusterType clusterType, 
+      String confDir, String hadoopVer)
     throws Exception {
     this.outDir = outDir;
     this.logDir = logDir;
+    if (confDir != null && !confDir.isEmpty()) {
+      HiveConf.setHiveSiteLocation(new URL("file://"+confDir+"/hive-site.xml"));
+      System.out.println("Setting hive-site: "+HiveConf.getHiveSiteLocation());
+    }
     conf = new HiveConf(Driver.class);
     this.miniMr = (clusterType == MiniClusterType.mr);
     this.hadoopVer = getHadoopMainVersion(hadoopVer);
@@ -328,14 +339,18 @@ public class QTestUtil {
     this.clusterType = clusterType;
 
     HadoopShims shims = ShimLoader.getHadoopShims();
+    int numberOfDataNodes = 4;
 
     // can run tez tests only on hadoop 2
     if (clusterType == MiniClusterType.tez) {
       Assume.assumeTrue(ShimLoader.getMajorVersion().equals("0.23"));
+      // this is necessary temporarily - there's a probem with multi datanodes on MiniTezCluster
+      // will be fixed in 0.3
+      numberOfDataNodes = 1;
     }
 
     if (clusterType != MiniClusterType.none) {
-      dfs = shims.getMiniDfs(conf, 4, true, null);
+      dfs = shims.getMiniDfs(conf, numberOfDataNodes, true, null);
       FileSystem fs = dfs.getFileSystem();
       if (clusterType == MiniClusterType.tez) {
         if (!(shims instanceof Hadoop23Shims)) {
@@ -1480,7 +1495,7 @@ public class QTestUtil {
   {
     QTestUtil[] qt = new QTestUtil[qfiles.length];
     for (int i = 0; i < qfiles.length; i++) {
-      qt[i] = new QTestUtil(resDir, logDir, MiniClusterType.none, "0.20");
+      qt[i] = new QTestUtil(resDir, logDir, MiniClusterType.none, null, "0.20");
       qt[i].addFile(qfiles[i]);
       qt[i].clearTestSideEffects();
     }

Added: hive/branches/tez/ql/src/test/results/clientpositive/tez/auto_join0.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/tez/auto_join0.q.out?rev=1551769&view=auto
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/tez/auto_join0.q.out (added)
+++ hive/branches/tez/ql/src/test/results/clientpositive/tez/auto_join0.q.out Tue Dec 17 23:54:59 2013
@@ -0,0 +1,188 @@
+PREHOOK: query: explain 
+select sum(hash(a.k1,a.v1,a.k2, a.v2))
+from (
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+) a
+PREHOOK: type: QUERY
+POSTHOOK: query: explain 
+select sum(hash(a.k1,a.v1,a.k2, a.v2))
+from (
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+) a
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 10)))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 10)))) src2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) k1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value) v1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) key) k2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value) v2)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL k1)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL v1)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL k2)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL v2))))) a)) (TOK_INSERT (TOK_DES
 TINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION sum (TOK_FUNCTION hash (. (TOK_TABLE_OR_COL a) k1) (. (TOK_TABLE_OR_COL a) v1) (. (TOK_TABLE_OR_COL a) k2) (. (TOK_TABLE_OR_COL a) v2)))))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Filter Operator
+              predicate:
+                  expr: (key < 10)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                outputColumnNames: _col0, _col1
+                Reduce Output Operator
+                  sort order: 
+                  tag: 1
+                  value expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col1
+                        type: string
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Filter Operator
+              predicate:
+                  expr: (key < 10)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                outputColumnNames: _col0, _col1
+                Map Join Operator
+                  condition map:
+                       Inner Join 0 to 1
+                  condition expressions:
+                    0 {_col0} {_col1}
+                    1 {_col0} {_col1}
+                  handleSkewJoin: false
+                  keys:
+                    0 []
+                    1 []
+                  outputColumnNames: _col0, _col1, _col2, _col3
+                  Position of Big Table: 0
+                  Select Operator
+                    expressions:
+                          expr: _col0
+                          type: string
+                          expr: _col1
+                          type: string
+                          expr: _col2
+                          type: string
+                          expr: _col3
+                          type: string
+                    outputColumnNames: _col0, _col1, _col2, _col3
+                    Reduce Output Operator
+                      key expressions:
+                            expr: _col0
+                            type: string
+                            expr: _col1
+                            type: string
+                            expr: _col2
+                            type: string
+                            expr: _col3
+                            type: string
+                      sort order: ++++
+                      tag: -1
+                      value expressions:
+                            expr: _col0
+                            type: string
+                            expr: _col1
+                            type: string
+                            expr: _col2
+                            type: string
+                            expr: _col3
+                            type: string
+      Reduce Operator Tree:
+        Extract
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: _col1
+                  type: string
+                  expr: _col2
+                  type: string
+                  expr: _col3
+                  type: string
+            outputColumnNames: _col0, _col1, _col2, _col3
+            Group By Operator
+              aggregations:
+                    expr: sum(hash(_col0,_col1,_col2,_col3))
+              bucketGroup: false
+              mode: hash
+              outputColumnNames: _col0
+              Reduce Output Operator
+                sort order: 
+                tag: -1
+                value expressions:
+                      expr: _col0
+                      type: bigint
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: sum(VALUE._col0)
+          bucketGroup: false
+          mode: mergepartial
+          outputColumnNames: _col0
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: bigint
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: select sum(hash(a.k1,a.v1,a.k2, a.v2))
+from (
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+) a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select sum(hash(a.k1,a.v1,a.k2, a.v2))
+from (
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+) a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+34441656720

Added: hive/branches/tez/ql/src/test/results/clientpositive/tez/auto_join1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/tez/auto_join1.q.out?rev=1551769&view=auto
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/tez/auto_join1.q.out (added)
+++ hive/branches/tez/ql/src/test/results/clientpositive/tez/auto_join1.q.out Tue Dec 17 23:54:59 2013
@@ -0,0 +1,160 @@
+PREHOOK: query: CREATE TABLE dest_j1(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest_j1(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest_j1
+PREHOOK: query: explain
+FROM src src1 JOIN src src2 ON (src1.key = src2.key)
+INSERT OVERWRITE TABLE dest_j1 SELECT src1.key, src2.value
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+FROM src src1 JOIN src src2 ON (src1.key = src2.key)
+INSERT OVERWRITE TABLE dest_j1 SELECT src1.key, src2.value
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) src1) (TOK_TABREF (TOK_TABNAME src) src2) (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest_j1))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-8 depends on stages: Stage-1 , consists of Stage-5, Stage-4, Stage-6
+  Stage-5
+  Stage-2 depends on stages: Stage-5, Stage-4, Stage-7
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+  Stage-4
+  Stage-6
+  Stage-7 depends on stages: Stage-6
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Alias -> Map Operator Tree:
+        src2 
+          TableScan
+            alias: src2
+            Reduce Output Operator
+              key expressions:
+                    expr: key
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: key
+                    type: string
+              tag: 1
+              value expressions:
+                    expr: value
+                    type: string
+      Alias -> Map Operator Tree:
+        src1 
+          TableScan
+            alias: src1
+            Map Join Operator
+              condition map:
+                   Inner Join 0 to 1
+              condition expressions:
+                0 {key}
+                1 {value}
+              handleSkewJoin: false
+              keys:
+                0 [Column[key]]
+                1 [Column[key]]
+              outputColumnNames: _col0, _col5
+              Position of Big Table: 0
+              Select Operator
+                expressions:
+                      expr: UDFToInteger(_col0)
+                      type: int
+                      expr: _col5
+                      type: string
+                outputColumnNames: _col0, _col1
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 1
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.dest_j1
+
+  Stage: Stage-8
+    Conditional Operator
+
+  Stage: Stage-5
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest_j1
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+  Stage: Stage-4
+    Tez
+      Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+          TableScan
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.dest_j1
+
+  Stage: Stage-6
+    Tez
+      Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+          TableScan
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.dest_j1
+
+  Stage: Stage-7
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+PREHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key)
+INSERT OVERWRITE TABLE dest_j1 SELECT src1.key, src2.value
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest_j1
+POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key)
+INSERT OVERWRITE TABLE dest_j1 SELECT src1.key, src2.value
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest_j1
+POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT sum(hash(dest_j1.key,dest_j1.value)) FROM dest_j1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest_j1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT sum(hash(dest_j1.key,dest_j1.value)) FROM dest_j1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest_j1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src)src1.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest_j1.value SIMPLE [(src)src2.FieldSchema(name:value, type:string, comment:default), ]
+101861029915

Added: hive/branches/tez/ql/src/test/results/clientpositive/tez/bucket2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/tez/bucket2.q.out?rev=1551769&view=auto
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/tez/bucket2.q.out (added)
+++ hive/branches/tez/ql/src/test/results/clientpositive/tez/bucket2.q.out Tue Dec 17 23:54:59 2013
@@ -0,0 +1,499 @@
+PREHOOK: query: CREATE TABLE bucket2_1(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE bucket2_1(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@bucket2_1
+PREHOOK: query: explain extended
+insert overwrite table bucket2_1
+select * from src
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+insert overwrite table bucket2_1
+select * from src
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME bucket2_1))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Statistics:
+                numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+              outputColumnNames: _col0, _col1
+              Statistics:
+                  numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+              Reduce Output Operator
+                sort order: 
+                Map-reduce partition columns:
+                      expr: UDFToInteger(_col0)
+                      type: int
+                Statistics:
+                    numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+                tag: -1
+                value expressions:
+                      expr: _col0
+                      type: string
+                      expr: _col1
+                      type: string
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numRows 0
+              rawDataSize 0
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                COLUMN_STATS_ACCURATE true
+                bucket_count -1
+                columns key,value
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numRows 0
+                rawDataSize 0
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Truncated Path -> Alias:
+        /src [src]
+      Needs Tagging: false
+      Reduce Operator Tree:
+        Extract
+          Statistics:
+              numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+          Select Operator
+            expressions:
+                  expr: UDFToInteger(_col0)
+                  type: int
+                  expr: _col1
+                  type: string
+            outputColumnNames: _col0, _col1
+            Statistics:
+                numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+#### A masked pattern was here ####
+              NumFilesPerFileSink: 2
+              Statistics:
+                  numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+#### A masked pattern was here ####
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types int:string
+#### A masked pattern was here ####
+                    name default.bucket2_1
+                    serialization.ddl struct bucket2_1 { i32 key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.bucket2_1
+              TotalFiles: 2
+              GatherStats: true
+              MultiFileSpray: true
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count 2
+                bucket_field_name key
+                columns key,value
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.bucket2_1
+                serialization.ddl struct bucket2_1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.bucket2_1
+#### A masked pattern was here ####
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table bucket2_1
+select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@bucket2_1
+POSTHOOK: query: insert overwrite table bucket2_1
+select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@bucket2_1
+POSTHOOK: Lineage: bucket2_1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: bucket2_1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: explain
+select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: bucket2_1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: bucket2_1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME bucket2_1) (TOK_TABLEBUCKETSAMPLE 1 2) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Alias -> Map Operator Tree:
+        s 
+          TableScan
+            alias: s
+            Filter Operator
+              predicate:
+                  expr: (((hash(key) & 2147483647) % 2) = 0)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: int
+                      expr: value
+                      type: string
+                outputColumnNames: _col0, _col1
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col0
+                        type: int
+                  sort order: +
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: int
+                        expr: _col1
+                        type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@bucket2_1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@bucket2_1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: bucket2_1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: bucket2_1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+0	val_0
+0	val_0
+0	val_0
+2	val_2
+4	val_4
+8	val_8
+10	val_10
+12	val_12
+12	val_12
+18	val_18
+18	val_18
+20	val_20
+24	val_24
+24	val_24
+26	val_26
+26	val_26
+28	val_28
+30	val_30
+34	val_34
+42	val_42
+42	val_42
+44	val_44
+54	val_54
+58	val_58
+58	val_58
+64	val_64
+66	val_66
+70	val_70
+70	val_70
+70	val_70
+72	val_72
+72	val_72
+74	val_74
+76	val_76
+76	val_76
+78	val_78
+80	val_80
+82	val_82
+84	val_84
+84	val_84
+86	val_86
+90	val_90
+90	val_90
+90	val_90
+92	val_92
+96	val_96
+98	val_98
+98	val_98
+100	val_100
+100	val_100
+104	val_104
+104	val_104
+114	val_114
+116	val_116
+118	val_118
+118	val_118
+120	val_120
+120	val_120
+126	val_126
+128	val_128
+128	val_128
+128	val_128
+134	val_134
+134	val_134
+136	val_136
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+146	val_146
+146	val_146
+150	val_150
+152	val_152
+152	val_152
+156	val_156
+158	val_158
+160	val_160
+162	val_162
+164	val_164
+164	val_164
+166	val_166
+168	val_168
+170	val_170
+172	val_172
+172	val_172
+174	val_174
+174	val_174
+176	val_176
+176	val_176
+178	val_178
+180	val_180
+186	val_186
+190	val_190
+192	val_192
+194	val_194
+196	val_196
+200	val_200
+200	val_200
+202	val_202
+208	val_208
+208	val_208
+208	val_208
+214	val_214
+216	val_216
+216	val_216
+218	val_218
+222	val_222
+224	val_224
+224	val_224
+226	val_226
+228	val_228
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+238	val_238
+238	val_238
+242	val_242
+242	val_242
+244	val_244
+248	val_248
+252	val_252
+256	val_256
+256	val_256
+258	val_258
+260	val_260
+262	val_262
+266	val_266
+272	val_272
+272	val_272
+274	val_274
+278	val_278
+278	val_278
+280	val_280
+280	val_280
+282	val_282
+282	val_282
+284	val_284
+286	val_286
+288	val_288
+288	val_288
+292	val_292
+296	val_296
+298	val_298
+298	val_298
+298	val_298
+302	val_302
+306	val_306
+308	val_308
+310	val_310
+316	val_316
+316	val_316
+316	val_316
+318	val_318
+318	val_318
+318	val_318
+322	val_322
+322	val_322
+332	val_332
+336	val_336
+338	val_338
+342	val_342
+342	val_342
+344	val_344
+344	val_344
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+356	val_356
+360	val_360
+362	val_362
+364	val_364
+366	val_366
+368	val_368
+374	val_374
+378	val_378
+382	val_382
+382	val_382
+384	val_384
+384	val_384
+384	val_384
+386	val_386
+392	val_392
+394	val_394
+396	val_396
+396	val_396
+396	val_396
+400	val_400
+402	val_402
+404	val_404
+404	val_404
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+414	val_414
+414	val_414
+418	val_418
+424	val_424
+424	val_424
+430	val_430
+430	val_430
+430	val_430
+432	val_432
+436	val_436
+438	val_438
+438	val_438
+438	val_438
+444	val_444
+446	val_446
+448	val_448
+452	val_452
+454	val_454
+454	val_454
+454	val_454
+458	val_458
+458	val_458
+460	val_460
+462	val_462
+462	val_462
+466	val_466
+466	val_466
+466	val_466
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+470	val_470
+472	val_472
+478	val_478
+478	val_478
+480	val_480
+480	val_480
+480	val_480
+482	val_482
+484	val_484
+490	val_490
+492	val_492
+492	val_492
+494	val_494
+496	val_496
+498	val_498
+498	val_498
+498	val_498

Added: hive/branches/tez/ql/src/test/results/clientpositive/tez/bucket3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/tez/bucket3.q.out?rev=1551769&view=auto
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/tez/bucket3.q.out (added)
+++ hive/branches/tez/ql/src/test/results/clientpositive/tez/bucket3.q.out Tue Dec 17 23:54:59 2013
@@ -0,0 +1,528 @@
+PREHOOK: query: CREATE TABLE bucket3_1(key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE bucket3_1(key int, value string) partitioned by (ds string) CLUSTERED BY (key) INTO 2 BUCKETS
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@bucket3_1
+PREHOOK: query: explain extended
+insert overwrite table bucket3_1 partition (ds='1')
+select * from src
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+insert overwrite table bucket3_1 partition (ds='1')
+select * from src
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME bucket3_1) (TOK_PARTSPEC (TOK_PARTVAL ds '1')))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Statistics:
+                numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+              outputColumnNames: _col0, _col1
+              Statistics:
+                  numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+              Reduce Output Operator
+                sort order: 
+                Map-reduce partition columns:
+                      expr: UDFToInteger(_col0)
+                      type: int
+                Statistics:
+                    numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+                tag: -1
+                value expressions:
+                      expr: _col0
+                      type: string
+                      expr: _col1
+                      type: string
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numRows 0
+              rawDataSize 0
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                COLUMN_STATS_ACCURATE true
+                bucket_count -1
+                columns key,value
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numRows 0
+                rawDataSize 0
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Truncated Path -> Alias:
+        /src [src]
+      Needs Tagging: false
+      Reduce Operator Tree:
+        Extract
+          Statistics:
+              numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+          Select Operator
+            expressions:
+                  expr: UDFToInteger(_col0)
+                  type: int
+                  expr: _col1
+                  type: string
+            outputColumnNames: _col0, _col1
+            Statistics:
+                numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+#### A masked pattern was here ####
+              NumFilesPerFileSink: 2
+              Static Partition Specification: ds=1/
+              Statistics:
+                  numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+#### A masked pattern was here ####
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types int:string
+#### A masked pattern was here ####
+                    name default.bucket3_1
+                    partition_columns ds
+                    serialization.ddl struct bucket3_1 { i32 key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.bucket3_1
+              TotalFiles: 2
+              GatherStats: true
+              MultiFileSpray: true
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 1
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count 2
+                bucket_field_name key
+                columns key,value
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.bucket3_1
+                partition_columns ds
+                serialization.ddl struct bucket3_1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.bucket3_1
+#### A masked pattern was here ####
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table bucket3_1 partition (ds='1')
+select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@bucket3_1@ds=1
+POSTHOOK: query: insert overwrite table bucket3_1 partition (ds='1')
+select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@bucket3_1@ds=1
+POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: insert overwrite table bucket3_1 partition (ds='2')
+select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@bucket3_1@ds=2
+POSTHOOK: query: insert overwrite table bucket3_1 partition (ds='2')
+select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@bucket3_1@ds=2
+POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: explain
+select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1' order by key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1' order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME bucket3_1) (TOK_TABLEBUCKETSAMPLE 1 2) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '1')) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Alias -> Map Operator Tree:
+        s 
+          TableScan
+            alias: s
+            Filter Operator
+              predicate:
+                  expr: (((hash(key) & 2147483647) % 2) = 0)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: int
+                      expr: value
+                      type: string
+                      expr: ds
+                      type: string
+                outputColumnNames: _col0, _col1, _col2
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col0
+                        type: int
+                  sort order: +
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: int
+                        expr: _col1
+                        type: string
+                        expr: _col2
+                        type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1' order by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@bucket3_1
+PREHOOK: Input: default@bucket3_1@ds=1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1' order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@bucket3_1
+POSTHOOK: Input: default@bucket3_1@ds=1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: bucket3_1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: bucket3_1 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+0	val_0	1
+0	val_0	1
+0	val_0	1
+2	val_2	1
+4	val_4	1
+8	val_8	1
+10	val_10	1
+12	val_12	1
+12	val_12	1
+18	val_18	1
+18	val_18	1
+20	val_20	1
+24	val_24	1
+24	val_24	1
+26	val_26	1
+26	val_26	1
+28	val_28	1
+30	val_30	1
+34	val_34	1
+42	val_42	1
+42	val_42	1
+44	val_44	1
+54	val_54	1
+58	val_58	1
+58	val_58	1
+64	val_64	1
+66	val_66	1
+70	val_70	1
+70	val_70	1
+70	val_70	1
+72	val_72	1
+72	val_72	1
+74	val_74	1
+76	val_76	1
+76	val_76	1
+78	val_78	1
+80	val_80	1
+82	val_82	1
+84	val_84	1
+84	val_84	1
+86	val_86	1
+90	val_90	1
+90	val_90	1
+90	val_90	1
+92	val_92	1
+96	val_96	1
+98	val_98	1
+98	val_98	1
+100	val_100	1
+100	val_100	1
+104	val_104	1
+104	val_104	1
+114	val_114	1
+116	val_116	1
+118	val_118	1
+118	val_118	1
+120	val_120	1
+120	val_120	1
+126	val_126	1
+128	val_128	1
+128	val_128	1
+128	val_128	1
+134	val_134	1
+134	val_134	1
+136	val_136	1
+138	val_138	1
+138	val_138	1
+138	val_138	1
+138	val_138	1
+146	val_146	1
+146	val_146	1
+150	val_150	1
+152	val_152	1
+152	val_152	1
+156	val_156	1
+158	val_158	1
+160	val_160	1
+162	val_162	1
+164	val_164	1
+164	val_164	1
+166	val_166	1
+168	val_168	1
+170	val_170	1
+172	val_172	1
+172	val_172	1
+174	val_174	1
+174	val_174	1
+176	val_176	1
+176	val_176	1
+178	val_178	1
+180	val_180	1
+186	val_186	1
+190	val_190	1
+192	val_192	1
+194	val_194	1
+196	val_196	1
+200	val_200	1
+200	val_200	1
+202	val_202	1
+208	val_208	1
+208	val_208	1
+208	val_208	1
+214	val_214	1
+216	val_216	1
+216	val_216	1
+218	val_218	1
+222	val_222	1
+224	val_224	1
+224	val_224	1
+226	val_226	1
+228	val_228	1
+230	val_230	1
+230	val_230	1
+230	val_230	1
+230	val_230	1
+230	val_230	1
+238	val_238	1
+238	val_238	1
+242	val_242	1
+242	val_242	1
+244	val_244	1
+248	val_248	1
+252	val_252	1
+256	val_256	1
+256	val_256	1
+258	val_258	1
+260	val_260	1
+262	val_262	1
+266	val_266	1
+272	val_272	1
+272	val_272	1
+274	val_274	1
+278	val_278	1
+278	val_278	1
+280	val_280	1
+280	val_280	1
+282	val_282	1
+282	val_282	1
+284	val_284	1
+286	val_286	1
+288	val_288	1
+288	val_288	1
+292	val_292	1
+296	val_296	1
+298	val_298	1
+298	val_298	1
+298	val_298	1
+302	val_302	1
+306	val_306	1
+308	val_308	1
+310	val_310	1
+316	val_316	1
+316	val_316	1
+316	val_316	1
+318	val_318	1
+318	val_318	1
+318	val_318	1
+322	val_322	1
+322	val_322	1
+332	val_332	1
+336	val_336	1
+338	val_338	1
+342	val_342	1
+342	val_342	1
+344	val_344	1
+344	val_344	1
+348	val_348	1
+348	val_348	1
+348	val_348	1
+348	val_348	1
+348	val_348	1
+356	val_356	1
+360	val_360	1
+362	val_362	1
+364	val_364	1
+366	val_366	1
+368	val_368	1
+374	val_374	1
+378	val_378	1
+382	val_382	1
+382	val_382	1
+384	val_384	1
+384	val_384	1
+384	val_384	1
+386	val_386	1
+392	val_392	1
+394	val_394	1
+396	val_396	1
+396	val_396	1
+396	val_396	1
+400	val_400	1
+402	val_402	1
+404	val_404	1
+404	val_404	1
+406	val_406	1
+406	val_406	1
+406	val_406	1
+406	val_406	1
+414	val_414	1
+414	val_414	1
+418	val_418	1
+424	val_424	1
+424	val_424	1
+430	val_430	1
+430	val_430	1
+430	val_430	1
+432	val_432	1
+436	val_436	1
+438	val_438	1
+438	val_438	1
+438	val_438	1
+444	val_444	1
+446	val_446	1
+448	val_448	1
+452	val_452	1
+454	val_454	1
+454	val_454	1
+454	val_454	1
+458	val_458	1
+458	val_458	1
+460	val_460	1
+462	val_462	1
+462	val_462	1
+466	val_466	1
+466	val_466	1
+466	val_466	1
+468	val_468	1
+468	val_468	1
+468	val_468	1
+468	val_468	1
+470	val_470	1
+472	val_472	1
+478	val_478	1
+478	val_478	1
+480	val_480	1
+480	val_480	1
+480	val_480	1
+482	val_482	1
+484	val_484	1
+490	val_490	1
+492	val_492	1
+492	val_492	1
+494	val_494	1
+496	val_496	1
+498	val_498	1
+498	val_498	1
+498	val_498	1

Added: hive/branches/tez/ql/src/test/results/clientpositive/tez/bucket4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/tez/bucket4.q.out?rev=1551769&view=auto
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/tez/bucket4.q.out (added)
+++ hive/branches/tez/ql/src/test/results/clientpositive/tez/bucket4.q.out Tue Dec 17 23:54:59 2013
@@ -0,0 +1,491 @@
+PREHOOK: query: CREATE TABLE bucket4_1(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE bucket4_1(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@bucket4_1
+PREHOOK: query: explain extended
+insert overwrite table bucket4_1
+select * from src
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+insert overwrite table bucket4_1
+select * from src
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME bucket4_1))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Statistics:
+                numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+              outputColumnNames: _col0, _col1
+              Statistics:
+                  numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+              Reduce Output Operator
+                key expressions:
+                      expr: UDFToInteger(_col0)
+                      type: int
+                sort order: +
+                Map-reduce partition columns:
+                      expr: UDFToInteger(_col0)
+                      type: int
+                Statistics:
+                    numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+                tag: -1
+                value expressions:
+                      expr: _col0
+                      type: string
+                      expr: _col1
+                      type: string
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numRows 0
+              rawDataSize 0
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                COLUMN_STATS_ACCURATE true
+                bucket_count -1
+                columns key,value
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numRows 0
+                rawDataSize 0
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Truncated Path -> Alias:
+        /src [src]
+      Needs Tagging: false
+      Reduce Operator Tree:
+        Extract
+          Statistics:
+              numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+          Select Operator
+            expressions:
+                  expr: UDFToInteger(_col0)
+                  type: int
+                  expr: _col1
+                  type: string
+            outputColumnNames: _col0, _col1
+            Statistics:
+                numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+#### A masked pattern was here ####
+              NumFilesPerFileSink: 2
+              Statistics:
+                  numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE
+#### A masked pattern was here ####
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    SORTBUCKETCOLSPREFIX TRUE
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types int:string
+#### A masked pattern was here ####
+                    name default.bucket4_1
+                    serialization.ddl struct bucket4_1 { i32 key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.bucket4_1
+              TotalFiles: 2
+              GatherStats: true
+              MultiFileSpray: true
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                SORTBUCKETCOLSPREFIX TRUE
+                bucket_count 2
+                bucket_field_name key
+                columns key,value
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.bucket4_1
+                serialization.ddl struct bucket4_1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.bucket4_1
+#### A masked pattern was here ####
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table bucket4_1
+select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@bucket4_1
+POSTHOOK: query: insert overwrite table bucket4_1
+select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@bucket4_1
+POSTHOOK: Lineage: bucket4_1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: bucket4_1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: explain
+select * from bucket4_1 tablesample (bucket 1 out of 2) s
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select * from bucket4_1 tablesample (bucket 1 out of 2) s
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: bucket4_1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: bucket4_1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME bucket4_1) (TOK_TABLEBUCKETSAMPLE 1 2) s)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Alias -> Map Operator Tree:
+        s 
+          TableScan
+            alias: s
+            Filter Operator
+              predicate:
+                  expr: (((hash(key) & 2147483647) % 2) = 0)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: int
+                      expr: value
+                      type: string
+                outputColumnNames: _col0, _col1
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: select * from bucket4_1 tablesample (bucket 1 out of 2) s
+PREHOOK: type: QUERY
+PREHOOK: Input: default@bucket4_1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from bucket4_1 tablesample (bucket 1 out of 2) s
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@bucket4_1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: bucket4_1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: bucket4_1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+0	val_0
+0	val_0
+0	val_0
+2	val_2
+4	val_4
+8	val_8
+10	val_10
+12	val_12
+12	val_12
+18	val_18
+18	val_18
+20	val_20
+24	val_24
+24	val_24
+26	val_26
+26	val_26
+28	val_28
+30	val_30
+34	val_34
+42	val_42
+42	val_42
+44	val_44
+54	val_54
+58	val_58
+58	val_58
+64	val_64
+66	val_66
+70	val_70
+70	val_70
+70	val_70
+72	val_72
+72	val_72
+74	val_74
+76	val_76
+76	val_76
+78	val_78
+80	val_80
+82	val_82
+84	val_84
+84	val_84
+86	val_86
+90	val_90
+90	val_90
+90	val_90
+92	val_92
+96	val_96
+98	val_98
+98	val_98
+100	val_100
+100	val_100
+104	val_104
+104	val_104
+114	val_114
+116	val_116
+118	val_118
+118	val_118
+120	val_120
+120	val_120
+126	val_126
+128	val_128
+128	val_128
+128	val_128
+134	val_134
+134	val_134
+136	val_136
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+146	val_146
+146	val_146
+150	val_150
+152	val_152
+152	val_152
+156	val_156
+158	val_158
+160	val_160
+162	val_162
+164	val_164
+164	val_164
+166	val_166
+168	val_168
+170	val_170
+172	val_172
+172	val_172
+174	val_174
+174	val_174
+176	val_176
+176	val_176
+178	val_178
+180	val_180
+186	val_186
+190	val_190
+192	val_192
+194	val_194
+196	val_196
+200	val_200
+200	val_200
+202	val_202
+208	val_208
+208	val_208
+208	val_208
+214	val_214
+216	val_216
+216	val_216
+218	val_218
+222	val_222
+224	val_224
+224	val_224
+226	val_226
+228	val_228
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+238	val_238
+238	val_238
+242	val_242
+242	val_242
+244	val_244
+248	val_248
+252	val_252
+256	val_256
+256	val_256
+258	val_258
+260	val_260
+262	val_262
+266	val_266
+272	val_272
+272	val_272
+274	val_274
+278	val_278
+278	val_278
+280	val_280
+280	val_280
+282	val_282
+282	val_282
+284	val_284
+286	val_286
+288	val_288
+288	val_288
+292	val_292
+296	val_296
+298	val_298
+298	val_298
+298	val_298
+302	val_302
+306	val_306
+308	val_308
+310	val_310
+316	val_316
+316	val_316
+316	val_316
+318	val_318
+318	val_318
+318	val_318
+322	val_322
+322	val_322
+332	val_332
+336	val_336
+338	val_338
+342	val_342
+342	val_342
+344	val_344
+344	val_344
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+356	val_356
+360	val_360
+362	val_362
+364	val_364
+366	val_366
+368	val_368
+374	val_374
+378	val_378
+382	val_382
+382	val_382
+384	val_384
+384	val_384
+384	val_384
+386	val_386
+392	val_392
+394	val_394
+396	val_396
+396	val_396
+396	val_396
+400	val_400
+402	val_402
+404	val_404
+404	val_404
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+414	val_414
+414	val_414
+418	val_418
+424	val_424
+424	val_424
+430	val_430
+430	val_430
+430	val_430
+432	val_432
+436	val_436
+438	val_438
+438	val_438
+438	val_438
+444	val_444
+446	val_446
+448	val_448
+452	val_452
+454	val_454
+454	val_454
+454	val_454
+458	val_458
+458	val_458
+460	val_460
+462	val_462
+462	val_462
+466	val_466
+466	val_466
+466	val_466
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+470	val_470
+472	val_472
+478	val_478
+478	val_478
+480	val_480
+480	val_480
+480	val_480
+482	val_482
+484	val_484
+490	val_490
+492	val_492
+492	val_492
+494	val_494
+496	val_496
+498	val_498
+498	val_498
+498	val_498



Mime
View raw message