hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gunt...@apache.org
Subject svn commit: r1517707 [12/17] - in /hive/branches/tez: ./ beeline/src/java/org/apache/hive/beeline/ bin/ bin/ext/ common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/conf/ conf/ contrib/src/java/org/apache/hadoop/hive/c...
Date Mon, 26 Aug 2013 21:42:21 GMT
Modified: hive/branches/tez/ql/src/test/resources/orc-file-dump.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/resources/orc-file-dump.out?rev=1517707&r1=1517706&r2=1517707&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/resources/orc-file-dump.out (original)
+++ hive/branches/tez/ql/src/test/resources/orc-file-dump.out Mon Aug 26 21:42:12 2013
@@ -11,73 +11,73 @@ Statistics:
   Column 3: count: 21000 min: Darkness, max: worst
 
 Stripes:
-  Stripe: offset: 3 data: 69605 rows: 5000 tail: 72 index: 119
+  Stripe: offset: 3 data: 63766 rows: 5000 tail: 74 index: 119
     Stream: column 0 section ROW_INDEX start: 3 length 10
     Stream: column 1 section ROW_INDEX start: 13 length 35
     Stream: column 2 section ROW_INDEX start: 48 length 39
     Stream: column 3 section ROW_INDEX start: 87 length 35
-    Stream: column 1 section DATA start: 122 length 22605
-    Stream: column 2 section DATA start: 22727 length 43426
-    Stream: column 3 section DATA start: 66153 length 3403
-    Stream: column 3 section LENGTH start: 69556 length 38
-    Stream: column 3 section DICTIONARY_DATA start: 69594 length 133
+    Stream: column 1 section DATA start: 122 length 20029
+    Stream: column 2 section DATA start: 20151 length 40035
+    Stream: column 3 section DATA start: 60186 length 3544
+    Stream: column 3 section LENGTH start: 63730 length 25
+    Stream: column 3 section DICTIONARY_DATA start: 63755 length 133
     Encoding column 0: DIRECT
-    Encoding column 1: DIRECT
-    Encoding column 2: DIRECT
-    Encoding column 3: DICTIONARY[35]
-  Stripe: offset: 69799 data: 69584 rows: 5000 tail: 73 index: 118
-    Stream: column 0 section ROW_INDEX start: 69799 length 10
-    Stream: column 1 section ROW_INDEX start: 69809 length 34
-    Stream: column 2 section ROW_INDEX start: 69843 length 39
-    Stream: column 3 section ROW_INDEX start: 69882 length 35
-    Stream: column 1 section DATA start: 69917 length 22597
-    Stream: column 2 section DATA start: 92514 length 43439
-    Stream: column 3 section DATA start: 135953 length 3377
-    Stream: column 3 section LENGTH start: 139330 length 38
-    Stream: column 3 section DICTIONARY_DATA start: 139368 length 133
+    Encoding column 1: DIRECT_V2
+    Encoding column 2: DIRECT_V2
+    Encoding column 3: DICTIONARY_V2
+  Stripe: offset: 63962 data: 63755 rows: 5000 tail: 76 index: 118
+    Stream: column 0 section ROW_INDEX start: 63962 length 10
+    Stream: column 1 section ROW_INDEX start: 63972 length 34
+    Stream: column 2 section ROW_INDEX start: 64006 length 39
+    Stream: column 3 section ROW_INDEX start: 64045 length 35
+    Stream: column 1 section DATA start: 64080 length 20029
+    Stream: column 2 section DATA start: 84109 length 40035
+    Stream: column 3 section DATA start: 124144 length 3533
+    Stream: column 3 section LENGTH start: 127677 length 25
+    Stream: column 3 section DICTIONARY_DATA start: 127702 length 133
     Encoding column 0: DIRECT
-    Encoding column 1: DIRECT
-    Encoding column 2: DIRECT
-    Encoding column 3: DICTIONARY[35]
-  Stripe: offset: 139574 data: 69570 rows: 5000 tail: 73 index: 120
-    Stream: column 0 section ROW_INDEX start: 139574 length 10
-    Stream: column 1 section ROW_INDEX start: 139584 length 36
-    Stream: column 2 section ROW_INDEX start: 139620 length 39
-    Stream: column 3 section ROW_INDEX start: 139659 length 35
-    Stream: column 1 section DATA start: 139694 length 22594
-    Stream: column 2 section DATA start: 162288 length 43415
-    Stream: column 3 section DATA start: 205703 length 3390
-    Stream: column 3 section LENGTH start: 209093 length 38
-    Stream: column 3 section DICTIONARY_DATA start: 209131 length 133
+    Encoding column 1: DIRECT_V2
+    Encoding column 2: DIRECT_V2
+    Encoding column 3: DICTIONARY_V2
+  Stripe: offset: 127911 data: 63766 rows: 5000 tail: 76 index: 120
+    Stream: column 0 section ROW_INDEX start: 127911 length 10
+    Stream: column 1 section ROW_INDEX start: 127921 length 36
+    Stream: column 2 section ROW_INDEX start: 127957 length 39
+    Stream: column 3 section ROW_INDEX start: 127996 length 35
+    Stream: column 1 section DATA start: 128031 length 20029
+    Stream: column 2 section DATA start: 148060 length 40035
+    Stream: column 3 section DATA start: 188095 length 3544
+    Stream: column 3 section LENGTH start: 191639 length 25
+    Stream: column 3 section DICTIONARY_DATA start: 191664 length 133
     Encoding column 0: DIRECT
-    Encoding column 1: DIRECT
-    Encoding column 2: DIRECT
-    Encoding column 3: DICTIONARY[35]
-  Stripe: offset: 209337 data: 69551 rows: 5000 tail: 72 index: 119
-    Stream: column 0 section ROW_INDEX start: 209337 length 10
-    Stream: column 1 section ROW_INDEX start: 209347 length 35
-    Stream: column 2 section ROW_INDEX start: 209382 length 39
-    Stream: column 3 section ROW_INDEX start: 209421 length 35
-    Stream: column 1 section DATA start: 209456 length 22575
-    Stream: column 2 section DATA start: 232031 length 43426
-    Stream: column 3 section DATA start: 275457 length 3379
-    Stream: column 3 section LENGTH start: 278836 length 38
-    Stream: column 3 section DICTIONARY_DATA start: 278874 length 133
+    Encoding column 1: DIRECT_V2
+    Encoding column 2: DIRECT_V2
+    Encoding column 3: DICTIONARY_V2
+  Stripe: offset: 191873 data: 63796 rows: 5000 tail: 74 index: 119
+    Stream: column 0 section ROW_INDEX start: 191873 length 10
+    Stream: column 1 section ROW_INDEX start: 191883 length 35
+    Stream: column 2 section ROW_INDEX start: 191918 length 39
+    Stream: column 3 section ROW_INDEX start: 191957 length 35
+    Stream: column 1 section DATA start: 191992 length 20029
+    Stream: column 2 section DATA start: 212021 length 40035
+    Stream: column 3 section DATA start: 252056 length 3574
+    Stream: column 3 section LENGTH start: 255630 length 25
+    Stream: column 3 section DICTIONARY_DATA start: 255655 length 133
     Encoding column 0: DIRECT
-    Encoding column 1: DIRECT
-    Encoding column 2: DIRECT
-    Encoding column 3: DICTIONARY[35]
-  Stripe: offset: 279079 data: 14096 rows: 1000 tail: 68 index: 120
-    Stream: column 0 section ROW_INDEX start: 279079 length 10
-    Stream: column 1 section ROW_INDEX start: 279089 length 36
-    Stream: column 2 section ROW_INDEX start: 279125 length 39
-    Stream: column 3 section ROW_INDEX start: 279164 length 35
-    Stream: column 1 section DATA start: 279199 length 4529
-    Stream: column 2 section DATA start: 283728 length 8690
-    Stream: column 3 section DATA start: 292418 length 706
-    Stream: column 3 section LENGTH start: 293124 length 38
-    Stream: column 3 section DICTIONARY_DATA start: 293162 length 133
+    Encoding column 1: DIRECT_V2
+    Encoding column 2: DIRECT_V2
+    Encoding column 3: DICTIONARY_V2
+  Stripe: offset: 255862 data: 12940 rows: 1000 tail: 71 index: 120
+    Stream: column 0 section ROW_INDEX start: 255862 length 10
+    Stream: column 1 section ROW_INDEX start: 255872 length 36
+    Stream: column 2 section ROW_INDEX start: 255908 length 39
+    Stream: column 3 section ROW_INDEX start: 255947 length 35
+    Stream: column 1 section DATA start: 255982 length 4007
+    Stream: column 2 section DATA start: 259989 length 8007
+    Stream: column 3 section DATA start: 267996 length 768
+    Stream: column 3 section LENGTH start: 268764 length 25
+    Stream: column 3 section DICTIONARY_DATA start: 268789 length 133
     Encoding column 0: DIRECT
-    Encoding column 1: DIRECT
-    Encoding column 2: DIRECT
-    Encoding column 3: DICTIONARY[35]
+    Encoding column 1: DIRECT_V2
+    Encoding column 2: DIRECT_V2
+    Encoding column 3: DICTIONARY_V2
\ No newline at end of file

Modified: hive/branches/tez/ql/src/test/results/clientnegative/fs_default_name1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/fs_default_name1.q.out?rev=1517707&r1=1517706&r2=1517707&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/fs_default_name1.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/fs_default_name1.q.out Mon Aug 26 21:42:12 2013
@@ -1 +1 @@
-FAILED: IllegalArgumentException null
+FAILED: IllegalArgumentException Illegal character in scheme name at index 0: 'http://www.example.com

Modified: hive/branches/tez/ql/src/test/results/clientnegative/fs_default_name2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/fs_default_name2.q.out?rev=1517707&r1=1517706&r2=1517707&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/fs_default_name2.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/fs_default_name2.q.out Mon Aug 26 21:42:12 2013
@@ -1 +1 @@
-FAILED: IllegalArgumentException null
+FAILED: IllegalArgumentException Illegal character in scheme name at index 0: 'http://www.example.com

Modified: hive/branches/tez/ql/src/test/results/clientnegative/lateral_view_join.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/lateral_view_join.q.out?rev=1517707&r1=1517706&r2=1517707&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/lateral_view_join.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/lateral_view_join.q.out Mon Aug 26 21:42:12 2013
@@ -1,2 +1,2 @@
-FAILED: ParseException line 1:62 missing AS at 'myTable' near '<EOF>'
+FAILED: ParseException line 1:62 missing EOF at 'myTable' near 'AS'
 

Modified: hive/branches/tez/ql/src/test/results/clientnegative/ptf_negative_HavingLeadWithNoGBYNoWindowing.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/ptf_negative_HavingLeadWithNoGBYNoWindowing.q.out?rev=1517707&r1=1517706&r2=1517707&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/ptf_negative_HavingLeadWithNoGBYNoWindowing.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/ptf_negative_HavingLeadWithNoGBYNoWindowing.q.out Mon Aug 26 21:42:12 2013
@@ -27,10 +27,4 @@ POSTHOOK: query: CREATE TABLE part( 
 )
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
-PREHOOK: type: LOAD
-PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
-POSTHOOK: type: LOAD
-POSTHOOK: Output: default@part
 FAILED: SemanticException HAVING specified without GROUP BY

Modified: hive/branches/tez/ql/src/test/results/clientnegative/ptf_negative_WhereWithRankCond.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/ptf_negative_WhereWithRankCond.q.out?rev=1517707&r1=1517706&r2=1517707&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/ptf_negative_WhereWithRankCond.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/ptf_negative_WhereWithRankCond.q.out Mon Aug 26 21:42:12 2013
@@ -27,10 +27,4 @@ POSTHOOK: query: CREATE TABLE part( 
 )
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
-PREHOOK: type: LOAD
-PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part
-POSTHOOK: type: LOAD
-POSTHOOK: Output: default@part
 FAILED: SemanticException [Error 10004]: Line 7:6 Invalid table alias or column reference 'r': (possible column names are: p_partkey, p_name, p_mfgr, p_brand, p_type, p_size, p_container, p_retailprice, p_comment)

Modified: hive/branches/tez/ql/src/test/results/clientnegative/smb_bucketmapjoin.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/smb_bucketmapjoin.q.out?rev=1517707&r1=1517706&r2=1517707&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/smb_bucketmapjoin.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/smb_bucketmapjoin.q.out Mon Aug 26 21:42:12 2013
@@ -34,4 +34,4 @@ POSTHOOK: Lineage: smb_bucket4_1.key EXP
 POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: smb_bucket4_2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-FAILED: SemanticException [Error 10057]: MAPJOIN cannot be performed with OUTER JOIN
+FAILED: SemanticException [Error 10246]: b table chosen for streaming is not valid

Modified: hive/branches/tez/ql/src/test/results/clientpositive/auto_join14.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/auto_join14.q.out?rev=1517707&r1=1517706&r2=1517707&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/auto_join14.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/auto_join14.q.out Mon Aug 26 21:42:12 2013
@@ -37,7 +37,7 @@ STAGE PLANS:
             alias: src
             Filter Operator
               predicate:
-                  expr: (key > 100.0)
+                  expr: (key > 100)
                   type: boolean
               HashTable Sink Operator
                 condition expressions:
@@ -57,7 +57,7 @@ STAGE PLANS:
             alias: srcpart
             Filter Operator
               predicate:
-                  expr: (key > 100.0)
+                  expr: (key > 100)
                   type: boolean
               Map Join Operator
                 condition map:

Modified: hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out?rev=1517707&r1=1517706&r2=1517707&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out Mon Aug 26 21:42:12 2013
@@ -503,6 +503,53 @@ STAGE PLANS:
         b 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_small
+                    numFiles 2
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_small { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 114
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 2
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_small
+                      numFiles 2
+                      numPartitions 1
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_small { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 114
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_small
+                  name: default.bucket_small
       Alias -> Map Local Operator Tree:
         b 
           TableScan
@@ -738,6 +785,101 @@ STAGE PLANS:
         a 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  base file name: ds=2008-04-08
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 4
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_big
+                    numFiles 4
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_big { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 5812
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 4
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_big
+                      numFiles 8
+                      numPartitions 2
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_big { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 11624
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_big
+                  name: default.bucket_big
+                Partition
+                  base file name: ds=2008-04-09
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-09
+                  properties:
+                    bucket_count 4
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_big
+                    numFiles 4
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_big { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 5812
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 4
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_big
+                      numFiles 8
+                      numPartitions 2
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_big { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 11624
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_big
+                  name: default.bucket_big
       Alias -> Map Local Operator Tree:
         a 
           TableScan

Modified: hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out?rev=1517707&r1=1517706&r2=1517707&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out Mon Aug 26 21:42:12 2013
@@ -94,6 +94,53 @@ STAGE PLANS:
         a 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  base file name: ds=2008-04-08
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_small
+                    numFiles 2
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_small { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 114
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      bucket_count 2
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_small
+                      numFiles 2
+                      numPartitions 1
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_small { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 114
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_small
+                  name: default.bucket_small
       Alias -> Map Local Operator Tree:
         a 
           TableScan
@@ -370,6 +417,53 @@ STAGE PLANS:
         a 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  base file name: ds=2008-04-08
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_small
+                    numFiles 2
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_small { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 114
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      bucket_count 2
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_small
+                      numFiles 2
+                      numPartitions 1
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_small { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 114
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_small
+                  name: default.bucket_small
       Alias -> Map Local Operator Tree:
         a 
           TableScan
@@ -640,6 +734,52 @@ STAGE PLANS:
         a 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_small
+                    numFiles 2
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_small { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 114
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      bucket_count 2
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_small
+                      numFiles 2
+                      numPartitions 1
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_small { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 114
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_small
+                  name: default.bucket_small
       Alias -> Map Local Operator Tree:
         a 
           TableScan

Modified: hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out?rev=1517707&r1=1517706&r2=1517707&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out Mon Aug 26 21:42:12 2013
@@ -122,12 +122,156 @@ STAGE PLANS:
         a 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  base file name: ds=2008-04-08
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_small
+                    numFiles 2
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_small { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 114
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 2
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_small
+                      numFiles 2
+                      numPartitions 1
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_small { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 114
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_small
+                  name: default.bucket_small
         b 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  base file name: ds=2008-04-08
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 3
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_medium
+                    numFiles 3
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_medium { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 170
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 3
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_medium
+                      numFiles 3
+                      numPartitions 1
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_medium { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 170
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_medium
+                  name: default.bucket_medium
         d 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  base file name: ds=2008-04-08
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 3
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_medium
+                    numFiles 3
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_medium { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 170
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 3
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_medium
+                      numFiles 3
+                      numPartitions 1
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_medium { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 170
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_medium
+                  name: default.bucket_medium
       Alias -> Map Local Operator Tree:
         a 
           TableScan

Modified: hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out?rev=1517707&r1=1517706&r2=1517707&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out Mon Aug 26 21:42:12 2013
@@ -290,6 +290,53 @@ STAGE PLANS:
         b 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 4
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_small
+                    numFiles 4
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_small { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 226
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 4
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_small
+                      numFiles 4
+                      numPartitions 1
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_small { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 226
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_small
+                  name: default.bucket_small
       Alias -> Map Local Operator Tree:
         b 
           TableScan
@@ -525,6 +572,101 @@ STAGE PLANS:
         a 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  base file name: ds=2008-04-08
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_big
+                    numFiles 2
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_big { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 2750
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 2
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_big
+                      numFiles 4
+                      numPartitions 2
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_big { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 5500
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_big
+                  name: default.bucket_big
+                Partition
+                  base file name: ds=2008-04-09
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-09
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_big
+                    numFiles 2
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_big { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 2750
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 2
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_big
+                      numFiles 4
+                      numPartitions 2
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_big { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 5500
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_big
+                  name: default.bucket_big
       Alias -> Map Local Operator Tree:
         a 
           TableScan

Modified: hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out?rev=1517707&r1=1517706&r2=1517707&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out Mon Aug 26 21:42:12 2013
@@ -389,6 +389,99 @@ STAGE PLANS:
         b 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_small
+                    numFiles 2
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_small { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 114
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 2
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_small
+                      numFiles 4
+                      numPartitions 2
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_small { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 228
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_small
+                  name: default.bucket_small
+                Partition
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_small
+                    numFiles 2
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_small { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 114
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 2
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_small
+                      numFiles 4
+                      numPartitions 2
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_small { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 228
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_small
+                  name: default.bucket_small
       Alias -> Map Local Operator Tree:
         b 
           TableScan
@@ -622,6 +715,54 @@ STAGE PLANS:
         a 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  base file name: ds=2008-04-08
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 4
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_big
+                    numFiles 4
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_big { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 5812
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 4
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_big
+                      numFiles 4
+                      numPartitions 1
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_big { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 5812
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_big
+                  name: default.bucket_big
       Alias -> Map Local Operator Tree:
         a 
           TableScan

Modified: hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out?rev=1517707&r1=1517706&r2=1517707&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out Mon Aug 26 21:42:12 2013
@@ -401,6 +401,99 @@ STAGE PLANS:
         b 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 4
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_small
+                    numFiles 4
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_small { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 226
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 4
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_small
+                      numFiles 8
+                      numPartitions 2
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_small { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 452
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_small
+                  name: default.bucket_small
+                Partition
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 4
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_small
+                    numFiles 4
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_small { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 226
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 4
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_small
+                      numFiles 8
+                      numPartitions 2
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_small { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 452
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_small
+                  name: default.bucket_small
       Alias -> Map Local Operator Tree:
         b 
           TableScan
@@ -634,6 +727,54 @@ STAGE PLANS:
         a 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  base file name: ds=2008-04-08
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_big
+                    numFiles 2
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_big { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 2750
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 2
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_big
+                      numFiles 2
+                      numPartitions 1
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_big { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 2750
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_big
+                  name: default.bucket_big
       Alias -> Map Local Operator Tree:
         a 
           TableScan

Modified: hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out?rev=1517707&r1=1517706&r2=1517707&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out Mon Aug 26 21:42:12 2013
@@ -516,6 +516,99 @@ STAGE PLANS:
         b 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 4
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_small
+                    numFiles 4
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_small { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 226
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 4
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_small
+                      numFiles 8
+                      numPartitions 2
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_small { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 452
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_small
+                  name: default.bucket_small
+                Partition
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 4
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_small
+                    numFiles 4
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_small { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 226
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 4
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_small
+                      numFiles 8
+                      numPartitions 2
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_small { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 452
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_small
+                  name: default.bucket_small
       Alias -> Map Local Operator Tree:
         b 
           TableScan
@@ -798,6 +891,101 @@ STAGE PLANS:
         a 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  base file name: ds=2008-04-08
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_big
+                    numFiles 2
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_big { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 2750
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 2
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_big
+                      numFiles 4
+                      numPartitions 2
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_big { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 5500
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_big
+                  name: default.bucket_big
+                Partition
+                  base file name: ds=2008-04-09
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-09
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_big
+                    numFiles 2
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_big { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 2750
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 2
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_big
+                      numFiles 4
+                      numPartitions 2
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_big { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 5500
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_big
+                  name: default.bucket_big
       Alias -> Map Local Operator Tree:
         a 
           TableScan

Modified: hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out?rev=1517707&r1=1517706&r2=1517707&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out Mon Aug 26 21:42:12 2013
@@ -518,6 +518,99 @@ STAGE PLANS:
         b 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_small
+                    numFiles 2
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_small { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 114
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 2
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_small
+                      numFiles 4
+                      numPartitions 2
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_small { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 228
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_small
+                  name: default.bucket_small
+                Partition
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_small
+                    numFiles 2
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_small { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 114
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 2
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_small
+                      numFiles 4
+                      numPartitions 2
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_small { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 228
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_small
+                  name: default.bucket_small
       Alias -> Map Local Operator Tree:
         b 
           TableScan
@@ -800,6 +893,101 @@ STAGE PLANS:
         a 
           Fetch Operator
             limit: -1
+            Partition Description:
+                Partition
+                  base file name: ds=2008-04-08
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                  properties:
+                    bucket_count 4
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_big
+                    numFiles 4
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_big { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 5812
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 4
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_big
+                      numFiles 8
+                      numPartitions 2
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_big { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 11624
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_big
+                  name: default.bucket_big
+                Partition
+                  base file name: ds=2008-04-09
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-09
+                  properties:
+                    bucket_count 4
+                    bucket_field_name key
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.bucket_big
+                    numFiles 4
+                    numRows 0
+                    partition_columns ds
+                    rawDataSize 0
+                    serialization.ddl struct bucket_big { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 5812
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      SORTBUCKETCOLSPREFIX TRUE
+                      bucket_count 4
+                      bucket_field_name key
+                      columns key,value
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.bucket_big
+                      numFiles 8
+                      numPartitions 2
+                      numRows 0
+                      partition_columns ds
+                      rawDataSize 0
+                      serialization.ddl struct bucket_big { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 11624
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.bucket_big
+                  name: default.bucket_big
       Alias -> Map Local Operator Tree:
         a 
           TableScan



Mime
View raw message