hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From c..@apache.org
Subject svn commit: r1409077 [3/3] - in /hive/trunk: ./ common/ eclipse-templates/ hbase-handler/ ivy/ ql/ ql/src/test/org/apache/hadoop/hive/ql/ ql/src/test/queries/clientpositive/ ql/src/test/resources/ ql/src/test/results/clientnegative/ ql/src/test/results...
Date Wed, 14 Nov 2012 03:51:57 GMT
Added: hive/trunk/ql/src/test/results/clientpositive/input12_hadoop20.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/input12_hadoop20.q.out?rev=1409077&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/input12_hadoop20.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/input12_hadoop20.q.out Wed Nov 14 03:51:52 2012
@@ -0,0 +1,868 @@
+PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest2
+PREHOOK: query: CREATE TABLE dest3(key INT) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest3(key INT) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest3
+PREHOOK: query: EXPLAIN
+FROM src 
+INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
+INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src 
+INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
+INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME src)))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest2))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL src) key) 100) (< (. (TOK_TABLE_OR_COL src) key) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest3) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '12')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key))) (TOK_WHERE (>= (. (TOK_TABLE_OR_COL src) key) 200))))
+
+STAGE DEPENDENCIES:
+  Stage-3 is a root stage
+  Stage-9 depends on stages: Stage-3 , consists of Stage-6, Stage-5, Stage-7
+  Stage-6
+  Stage-0 depends on stages: Stage-6, Stage-5, Stage-8
+  Stage-4 depends on stages: Stage-0
+  Stage-5
+  Stage-7
+  Stage-8 depends on stages: Stage-7
+  Stage-15 depends on stages: Stage-3 , consists of Stage-12, Stage-11, Stage-13
+  Stage-12
+  Stage-1 depends on stages: Stage-12, Stage-11, Stage-14
+  Stage-10 depends on stages: Stage-1
+  Stage-11
+  Stage-13
+  Stage-14 depends on stages: Stage-13
+  Stage-21 depends on stages: Stage-3 , consists of Stage-18, Stage-17, Stage-19
+  Stage-18
+  Stage-2 depends on stages: Stage-18, Stage-17, Stage-20
+  Stage-16 depends on stages: Stage-2
+  Stage-17
+  Stage-19
+  Stage-20 depends on stages: Stage-19
+
+STAGE PLANS:
+  Stage: Stage-3
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Filter Operator
+              predicate:
+                  expr: (key < 100.0)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                outputColumnNames: _col0, _col1
+                Select Operator
+                  expressions:
+                        expr: UDFToInteger(_col0)
+                        type: int
+                        expr: _col1
+                        type: string
+                  outputColumnNames: _col0, _col1
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 1
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest1
+            Filter Operator
+              predicate:
+                  expr: ((key >= 100.0) and (key < 200.0))
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                outputColumnNames: _col0, _col1
+                Select Operator
+                  expressions:
+                        expr: UDFToInteger(_col0)
+                        type: int
+                        expr: _col1
+                        type: string
+                  outputColumnNames: _col0, _col1
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 2
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest2
+            Filter Operator
+              predicate:
+                  expr: (key >= 200.0)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                outputColumnNames: _col0
+                Select Operator
+                  expressions:
+                        expr: UDFToInteger(_col0)
+                        type: int
+                  outputColumnNames: _col0
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 3
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        name: default.dest3
+
+  Stage: Stage-9
+    Conditional Operator
+
+  Stage: Stage-6
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1
+
+  Stage: Stage-4
+    Stats-Aggr Operator
+
+  Stage: Stage-5
+    Map Reduce
+      Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.dest1
+
+  Stage: Stage-7
+    Map Reduce
+      Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.dest1
+
+  Stage: Stage-8
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-15
+    Conditional Operator
+
+  Stage: Stage-12
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest2
+
+  Stage: Stage-10
+    Stats-Aggr Operator
+
+  Stage: Stage-11
+    Map Reduce
+      Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.dest2
+
+  Stage: Stage-13
+    Map Reduce
+      Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.dest2
+
+  Stage: Stage-14
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-21
+    Conditional Operator
+
+  Stage: Stage-18
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-2
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 12
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest3
+
+  Stage: Stage-16
+    Stats-Aggr Operator
+
+  Stage: Stage-17
+    Map Reduce
+      Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.dest3
+
+  Stage: Stage-19
+    Map Reduce
+      Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.dest3
+
+  Stage: Stage-20
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+
+PREHOOK: query: FROM src 
+INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
+INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+PREHOOK: Output: default@dest2
+PREHOOK: Output: default@dest3@ds=2008-04-08/hr=12
+POSTHOOK: query: FROM src 
+INSERT OVERWRITE TABLE dest1 SELECT src.* WHERE src.key < 100
+INSERT OVERWRITE TABLE dest2 SELECT src.key, src.value WHERE src.key >= 100 and src.key < 200
+INSERT OVERWRITE TABLE dest3 PARTITION(ds='2008-04-08', hr='12') SELECT src.key WHERE src.key >= 200
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+POSTHOOK: Output: default@dest2
+POSTHOOK: Output: default@dest3@ds=2008-04-08/hr=12
+POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+86	val_86
+27	val_27
+98	val_98
+66	val_66
+37	val_37
+15	val_15
+82	val_82
+17	val_17
+0	val_0
+57	val_57
+20	val_20
+92	val_92
+47	val_47
+72	val_72
+4	val_4
+35	val_35
+54	val_54
+51	val_51
+65	val_65
+83	val_83
+12	val_12
+67	val_67
+84	val_84
+58	val_58
+8	val_8
+24	val_24
+42	val_42
+0	val_0
+96	val_96
+26	val_26
+51	val_51
+43	val_43
+95	val_95
+98	val_98
+85	val_85
+77	val_77
+0	val_0
+87	val_87
+15	val_15
+72	val_72
+90	val_90
+19	val_19
+10	val_10
+5	val_5
+58	val_58
+35	val_35
+95	val_95
+11	val_11
+34	val_34
+42	val_42
+78	val_78
+76	val_76
+41	val_41
+30	val_30
+64	val_64
+76	val_76
+74	val_74
+69	val_69
+33	val_33
+70	val_70
+5	val_5
+2	val_2
+35	val_35
+80	val_80
+44	val_44
+53	val_53
+90	val_90
+12	val_12
+5	val_5
+70	val_70
+24	val_24
+70	val_70
+83	val_83
+26	val_26
+67	val_67
+18	val_18
+9	val_9
+18	val_18
+97	val_97
+84	val_84
+28	val_28
+37	val_37
+90	val_90
+97	val_97
+PREHOOK: query: SELECT dest2.* FROM dest2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dest2.* FROM dest2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest2
+#### A masked pattern was here ####
+POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+165	val_165
+193	val_193
+150	val_150
+128	val_128
+146	val_146
+152	val_152
+145	val_145
+166	val_166
+153	val_153
+193	val_193
+174	val_174
+199	val_199
+174	val_174
+162	val_162
+167	val_167
+195	val_195
+113	val_113
+155	val_155
+128	val_128
+149	val_149
+129	val_129
+170	val_170
+157	val_157
+111	val_111
+169	val_169
+125	val_125
+192	val_192
+187	val_187
+176	val_176
+138	val_138
+103	val_103
+176	val_176
+137	val_137
+180	val_180
+181	val_181
+138	val_138
+179	val_179
+172	val_172
+129	val_129
+158	val_158
+119	val_119
+197	val_197
+100	val_100
+199	val_199
+191	val_191
+165	val_165
+120	val_120
+131	val_131
+156	val_156
+196	val_196
+197	val_197
+187	val_187
+137	val_137
+169	val_169
+179	val_179
+118	val_118
+134	val_134
+138	val_138
+118	val_118
+177	val_177
+168	val_168
+143	val_143
+160	val_160
+195	val_195
+119	val_119
+149	val_149
+138	val_138
+103	val_103
+113	val_113
+167	val_167
+116	val_116
+191	val_191
+128	val_128
+193	val_193
+104	val_104
+175	val_175
+105	val_105
+190	val_190
+114	val_114
+164	val_164
+125	val_125
+164	val_164
+187	val_187
+104	val_104
+163	val_163
+119	val_119
+199	val_199
+120	val_120
+169	val_169
+178	val_178
+136	val_136
+172	val_172
+133	val_133
+175	val_175
+189	val_189
+134	val_134
+100	val_100
+146	val_146
+186	val_186
+167	val_167
+183	val_183
+152	val_152
+194	val_194
+126	val_126
+169	val_169
+PREHOOK: query: SELECT dest3.* FROM dest3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest3@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT dest3.* FROM dest3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest3@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: dest3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+238	2008-04-08	12
+311	2008-04-08	12
+409	2008-04-08	12
+255	2008-04-08	12
+278	2008-04-08	12
+484	2008-04-08	12
+265	2008-04-08	12
+401	2008-04-08	12
+273	2008-04-08	12
+224	2008-04-08	12
+369	2008-04-08	12
+213	2008-04-08	12
+406	2008-04-08	12
+429	2008-04-08	12
+374	2008-04-08	12
+469	2008-04-08	12
+495	2008-04-08	12
+327	2008-04-08	12
+281	2008-04-08	12
+277	2008-04-08	12
+209	2008-04-08	12
+403	2008-04-08	12
+417	2008-04-08	12
+430	2008-04-08	12
+252	2008-04-08	12
+292	2008-04-08	12
+219	2008-04-08	12
+287	2008-04-08	12
+338	2008-04-08	12
+446	2008-04-08	12
+459	2008-04-08	12
+394	2008-04-08	12
+237	2008-04-08	12
+482	2008-04-08	12
+413	2008-04-08	12
+494	2008-04-08	12
+207	2008-04-08	12
+466	2008-04-08	12
+208	2008-04-08	12
+399	2008-04-08	12
+396	2008-04-08	12
+247	2008-04-08	12
+417	2008-04-08	12
+489	2008-04-08	12
+377	2008-04-08	12
+397	2008-04-08	12
+309	2008-04-08	12
+365	2008-04-08	12
+266	2008-04-08	12
+439	2008-04-08	12
+342	2008-04-08	12
+367	2008-04-08	12
+325	2008-04-08	12
+475	2008-04-08	12
+203	2008-04-08	12
+339	2008-04-08	12
+455	2008-04-08	12
+311	2008-04-08	12
+316	2008-04-08	12
+302	2008-04-08	12
+205	2008-04-08	12
+438	2008-04-08	12
+345	2008-04-08	12
+489	2008-04-08	12
+378	2008-04-08	12
+221	2008-04-08	12
+280	2008-04-08	12
+427	2008-04-08	12
+277	2008-04-08	12
+208	2008-04-08	12
+356	2008-04-08	12
+399	2008-04-08	12
+382	2008-04-08	12
+498	2008-04-08	12
+386	2008-04-08	12
+437	2008-04-08	12
+469	2008-04-08	12
+286	2008-04-08	12
+459	2008-04-08	12
+239	2008-04-08	12
+213	2008-04-08	12
+216	2008-04-08	12
+430	2008-04-08	12
+278	2008-04-08	12
+289	2008-04-08	12
+221	2008-04-08	12
+318	2008-04-08	12
+332	2008-04-08	12
+311	2008-04-08	12
+275	2008-04-08	12
+241	2008-04-08	12
+333	2008-04-08	12
+284	2008-04-08	12
+230	2008-04-08	12
+260	2008-04-08	12
+404	2008-04-08	12
+384	2008-04-08	12
+489	2008-04-08	12
+353	2008-04-08	12
+373	2008-04-08	12
+272	2008-04-08	12
+217	2008-04-08	12
+348	2008-04-08	12
+466	2008-04-08	12
+411	2008-04-08	12
+230	2008-04-08	12
+208	2008-04-08	12
+348	2008-04-08	12
+463	2008-04-08	12
+431	2008-04-08	12
+496	2008-04-08	12
+322	2008-04-08	12
+468	2008-04-08	12
+393	2008-04-08	12
+454	2008-04-08	12
+298	2008-04-08	12
+418	2008-04-08	12
+327	2008-04-08	12
+230	2008-04-08	12
+205	2008-04-08	12
+404	2008-04-08	12
+436	2008-04-08	12
+469	2008-04-08	12
+468	2008-04-08	12
+308	2008-04-08	12
+288	2008-04-08	12
+481	2008-04-08	12
+457	2008-04-08	12
+282	2008-04-08	12
+318	2008-04-08	12
+318	2008-04-08	12
+409	2008-04-08	12
+470	2008-04-08	12
+369	2008-04-08	12
+316	2008-04-08	12
+413	2008-04-08	12
+490	2008-04-08	12
+364	2008-04-08	12
+395	2008-04-08	12
+282	2008-04-08	12
+238	2008-04-08	12
+419	2008-04-08	12
+307	2008-04-08	12
+435	2008-04-08	12
+277	2008-04-08	12
+273	2008-04-08	12
+306	2008-04-08	12
+224	2008-04-08	12
+309	2008-04-08	12
+389	2008-04-08	12
+327	2008-04-08	12
+242	2008-04-08	12
+369	2008-04-08	12
+392	2008-04-08	12
+272	2008-04-08	12
+331	2008-04-08	12
+401	2008-04-08	12
+242	2008-04-08	12
+452	2008-04-08	12
+226	2008-04-08	12
+497	2008-04-08	12
+402	2008-04-08	12
+396	2008-04-08	12
+317	2008-04-08	12
+395	2008-04-08	12
+336	2008-04-08	12
+229	2008-04-08	12
+233	2008-04-08	12
+472	2008-04-08	12
+322	2008-04-08	12
+498	2008-04-08	12
+321	2008-04-08	12
+430	2008-04-08	12
+489	2008-04-08	12
+458	2008-04-08	12
+223	2008-04-08	12
+492	2008-04-08	12
+449	2008-04-08	12
+218	2008-04-08	12
+228	2008-04-08	12
+453	2008-04-08	12
+209	2008-04-08	12
+468	2008-04-08	12
+342	2008-04-08	12
+230	2008-04-08	12
+368	2008-04-08	12
+296	2008-04-08	12
+216	2008-04-08	12
+367	2008-04-08	12
+344	2008-04-08	12
+274	2008-04-08	12
+219	2008-04-08	12
+239	2008-04-08	12
+485	2008-04-08	12
+223	2008-04-08	12
+256	2008-04-08	12
+263	2008-04-08	12
+487	2008-04-08	12
+480	2008-04-08	12
+401	2008-04-08	12
+288	2008-04-08	12
+244	2008-04-08	12
+438	2008-04-08	12
+467	2008-04-08	12
+432	2008-04-08	12
+202	2008-04-08	12
+316	2008-04-08	12
+229	2008-04-08	12
+469	2008-04-08	12
+463	2008-04-08	12
+280	2008-04-08	12
+283	2008-04-08	12
+331	2008-04-08	12
+235	2008-04-08	12
+321	2008-04-08	12
+335	2008-04-08	12
+466	2008-04-08	12
+366	2008-04-08	12
+403	2008-04-08	12
+483	2008-04-08	12
+257	2008-04-08	12
+406	2008-04-08	12
+409	2008-04-08	12
+406	2008-04-08	12
+401	2008-04-08	12
+258	2008-04-08	12
+203	2008-04-08	12
+262	2008-04-08	12
+348	2008-04-08	12
+424	2008-04-08	12
+396	2008-04-08	12
+201	2008-04-08	12
+217	2008-04-08	12
+431	2008-04-08	12
+454	2008-04-08	12
+478	2008-04-08	12
+298	2008-04-08	12
+431	2008-04-08	12
+424	2008-04-08	12
+382	2008-04-08	12
+397	2008-04-08	12
+480	2008-04-08	12
+291	2008-04-08	12
+351	2008-04-08	12
+255	2008-04-08	12
+438	2008-04-08	12
+414	2008-04-08	12
+200	2008-04-08	12
+491	2008-04-08	12
+237	2008-04-08	12
+439	2008-04-08	12
+360	2008-04-08	12
+248	2008-04-08	12
+479	2008-04-08	12
+305	2008-04-08	12
+417	2008-04-08	12
+444	2008-04-08	12
+429	2008-04-08	12
+443	2008-04-08	12
+323	2008-04-08	12
+325	2008-04-08	12
+277	2008-04-08	12
+230	2008-04-08	12
+478	2008-04-08	12
+468	2008-04-08	12
+310	2008-04-08	12
+317	2008-04-08	12
+333	2008-04-08	12
+493	2008-04-08	12
+460	2008-04-08	12
+207	2008-04-08	12
+249	2008-04-08	12
+265	2008-04-08	12
+480	2008-04-08	12
+353	2008-04-08	12
+214	2008-04-08	12
+462	2008-04-08	12
+233	2008-04-08	12
+406	2008-04-08	12
+454	2008-04-08	12
+375	2008-04-08	12
+401	2008-04-08	12
+421	2008-04-08	12
+407	2008-04-08	12
+384	2008-04-08	12
+256	2008-04-08	12
+384	2008-04-08	12
+379	2008-04-08	12
+462	2008-04-08	12
+492	2008-04-08	12
+298	2008-04-08	12
+341	2008-04-08	12
+498	2008-04-08	12
+458	2008-04-08	12
+362	2008-04-08	12
+285	2008-04-08	12
+348	2008-04-08	12
+273	2008-04-08	12
+281	2008-04-08	12
+344	2008-04-08	12
+469	2008-04-08	12
+315	2008-04-08	12
+448	2008-04-08	12
+348	2008-04-08	12
+307	2008-04-08	12
+414	2008-04-08	12
+477	2008-04-08	12
+222	2008-04-08	12
+403	2008-04-08	12
+400	2008-04-08	12
+200	2008-04-08	12

Modified: hive/trunk/ql/src/test/results/clientpositive/input39.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/input39.q.out?rev=1409077&r1=1409076&r2=1409077&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/input39.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/input39.q.out Wed Nov 14 03:51:52 2012
@@ -1,6 +1,12 @@
-PREHOOK: query: create table t1(key string, value string) partitioned by (ds string)
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+
+create table t1(key string, value string) partitioned by (ds string)
 PREHOOK: type: CREATETABLE
-POSTHOOK: query: create table t1(key string, value string) partitioned by (ds string)
+POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+
+create table t1(key string, value string) partitioned by (ds string)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@t1
 PREHOOK: query: create table t2(key string, value string) partitioned by (ds string)
@@ -186,4 +192,5 @@ POSTHOOK: Lineage: t1 PARTITION(ds=2).va
 POSTHOOK: Lineage: t2 PARTITION(ds=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: t2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 18
-mapred.job.tracker=does.notexist.com:666
+mapreduce.framework.name=yarn
+mapreduce.jobtracker.address=local

Added: hive/trunk/ql/src/test/results/clientpositive/input39_hadoop20.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/input39_hadoop20.q.out?rev=1409077&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/input39_hadoop20.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/input39_hadoop20.q.out Wed Nov 14 03:51:52 2012
@@ -0,0 +1,195 @@
+PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+
+create table t1(key string, value string) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+
+create table t1(key string, value string) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@t1
+PREHOOK: query: create table t2(key string, value string) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table t2(key string, value string) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@t2
+PREHOOK: query: insert overwrite table t1 partition (ds='1')
+select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t1@ds=1
+POSTHOOK: query: insert overwrite table t1 partition (ds='1')
+select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t1@ds=1
+POSTHOOK: Lineage: t1 PARTITION(ds=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: t1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: insert overwrite table t1 partition (ds='2')
+select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t1@ds=2
+POSTHOOK: query: insert overwrite table t1 partition (ds='2')
+select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t1@ds=2
+POSTHOOK: Lineage: t1 PARTITION(ds=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: t1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: t1 PARTITION(ds=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: t1 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: insert overwrite table t2 partition (ds='1')
+select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t2@ds=1
+POSTHOOK: query: insert overwrite table t2 partition (ds='1')
+select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t2@ds=1
+POSTHOOK: Lineage: t1 PARTITION(ds=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: t1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: t1 PARTITION(ds=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: t1 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: t2 PARTITION(ds=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: t2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: explain
+select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1'
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: t1 PARTITION(ds=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: t1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: t1 PARTITION(ds=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: t1 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: t2 PARTITION(ds=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: t2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME t1)) (TOK_TABREF (TOK_TABNAME t2)) (= (. (TOK_TABLE_OR_COL t1) key) (. (TOK_TABLE_OR_COL t2) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL t1) ds) '1') (= (. (TOK_TABLE_OR_COL t2) ds) '1')))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        t1 
+          TableScan
+            alias: t1
+            Filter Operator
+              predicate:
+                  expr: (((hash(rand(460476415)) & 2147483647) % 32) = 0)
+                  type: boolean
+              Reduce Output Operator
+                key expressions:
+                      expr: key
+                      type: string
+                sort order: +
+                Map-reduce partition columns:
+                      expr: key
+                      type: string
+                tag: 0
+                value expressions:
+                      expr: ds
+                      type: string
+        t2 
+          TableScan
+            alias: t2
+            Filter Operator
+              predicate:
+                  expr: (((hash(rand(460476415)) & 2147483647) % 32) = 0)
+                  type: boolean
+              Reduce Output Operator
+                key expressions:
+                      expr: key
+                      type: string
+                sort order: +
+                Map-reduce partition columns:
+                      expr: key
+                      type: string
+                tag: 1
+                value expressions:
+                      expr: ds
+                      type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col2}
+            1 {VALUE._col2}
+          handleSkewJoin: false
+          outputColumnNames: _col2, _col7
+          Select Operator
+            Group By Operator
+              aggregations:
+                    expr: count(1)
+              bucketGroup: false
+              mode: hash
+              outputColumnNames: _col0
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+            Reduce Output Operator
+              sort order: 
+              tag: -1
+              value expressions:
+                    expr: _col0
+                    type: bigint
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: count(VALUE._col0)
+          bucketGroup: false
+          mode: mergepartial
+          outputColumnNames: _col0
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: bigint
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t1@ds=1
+PREHOOK: Input: default@t2@ds=1
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t1@ds=1
+POSTHOOK: Input: default@t2@ds=1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: t1 PARTITION(ds=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: t1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: t1 PARTITION(ds=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: t1 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: t2 PARTITION(ds=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: t2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+18
+mapred.job.tracker=localhost:58

Modified: hive/trunk/ql/src/test/results/clientpositive/join14.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/join14.q.out?rev=1409077&r1=1409076&r2=1409077&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/join14.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/join14.q.out Wed Nov 14 03:51:52 2012
@@ -1,6 +1,10 @@
-PREHOOK: query: CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE
 PREHOOK: type: CREATETABLE
-POSTHOOK: query: CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE
+POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@dest1
 PREHOOK: query: EXPLAIN

Added: hive/trunk/ql/src/test/results/clientpositive/join14_hadoop20.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/join14_hadoop20.q.out?rev=1409077&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/join14_hadoop20.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/join14_hadoop20.q.out Wed Nov 14 03:51:52 2012
@@ -0,0 +1,1891 @@
+PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
+FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100
+INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100
+INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src)) (TOK_TABREF (TOK_TABNAME srcpart)) (and (AND (= (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL srcpart) key)) (= (. (TOK_TABLE_OR_COL srcpart) ds) '2008-04-08')) (> (. (TOK_TABLE_OR_COL src) key) 100)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) value)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Filter Operator
+              predicate:
+                  expr: (key > 100.0)
+                  type: boolean
+              Reduce Output Operator
+                key expressions:
+                      expr: key
+                      type: string
+                sort order: +
+                Map-reduce partition columns:
+                      expr: key
+                      type: string
+                tag: 0
+                value expressions:
+                      expr: key
+                      type: string
+        srcpart 
+          TableScan
+            alias: srcpart
+            Filter Operator
+              predicate:
+                  expr: (key > 100.0)
+                  type: boolean
+              Reduce Output Operator
+                key expressions:
+                      expr: key
+                      type: string
+                sort order: +
+                Map-reduce partition columns:
+                      expr: key
+                      type: string
+                tag: 1
+                value expressions:
+                      expr: value
+                      type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col0}
+            1 {VALUE._col1}
+          handleSkewJoin: false
+          outputColumnNames: _col0, _col5
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: _col5
+                  type: string
+            outputColumnNames: _col0, _col1
+            Select Operator
+              expressions:
+                    expr: UDFToInteger(_col0)
+                    type: int
+                    expr: _col1
+                    type: string
+              outputColumnNames: _col0, _col1
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.dest1
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dest1
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+
+
+PREHOOK: query: FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100
+INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100
+INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@dest1
+POSTHOOK: Lineage: dest1.c1 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c2 SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: select dest1.* from dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: select dest1.* from dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: dest1.c1 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest1.c2 SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+103	val_103
+103	val_103
+103	val_103
+103	val_103
+103	val_103
+103	val_103
+103	val_103
+103	val_103
+104	val_104
+104	val_104
+104	val_104
+104	val_104
+104	val_104
+104	val_104
+104	val_104
+104	val_104
+105	val_105
+105	val_105
+111	val_111
+111	val_111
+113	val_113
+113	val_113
+113	val_113
+113	val_113
+113	val_113
+113	val_113
+113	val_113
+113	val_113
+114	val_114
+114	val_114
+116	val_116
+116	val_116
+118	val_118
+118	val_118
+118	val_118
+118	val_118
+118	val_118
+118	val_118
+118	val_118
+118	val_118
+119	val_119
+119	val_119
+119	val_119
+119	val_119
+119	val_119
+119	val_119
+119	val_119
+119	val_119
+119	val_119
+119	val_119
+119	val_119
+119	val_119
+119	val_119
+119	val_119
+119	val_119
+119	val_119
+119	val_119
+119	val_119
+120	val_120
+120	val_120
+120	val_120
+120	val_120
+120	val_120
+120	val_120
+120	val_120
+120	val_120
+125	val_125
+125	val_125
+125	val_125
+125	val_125
+125	val_125
+125	val_125
+125	val_125
+125	val_125
+126	val_126
+126	val_126
+128	val_128
+128	val_128
+128	val_128
+128	val_128
+128	val_128
+128	val_128
+128	val_128
+128	val_128
+128	val_128
+128	val_128
+128	val_128
+128	val_128
+128	val_128
+128	val_128
+128	val_128
+128	val_128
+128	val_128
+128	val_128
+129	val_129
+129	val_129
+129	val_129
+129	val_129
+129	val_129
+129	val_129
+129	val_129
+129	val_129
+131	val_131
+131	val_131
+133	val_133
+133	val_133
+134	val_134
+134	val_134
+134	val_134
+134	val_134
+134	val_134
+134	val_134
+134	val_134
+134	val_134
+136	val_136
+136	val_136
+137	val_137
+137	val_137
+137	val_137
+137	val_137
+137	val_137
+137	val_137
+137	val_137
+137	val_137
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+143	val_143
+143	val_143
+145	val_145
+145	val_145
+146	val_146
+146	val_146
+146	val_146
+146	val_146
+146	val_146
+146	val_146
+146	val_146
+146	val_146
+149	val_149
+149	val_149
+149	val_149
+149	val_149
+149	val_149
+149	val_149
+149	val_149
+149	val_149
+150	val_150
+150	val_150
+152	val_152
+152	val_152
+152	val_152
+152	val_152
+152	val_152
+152	val_152
+152	val_152
+152	val_152
+153	val_153
+153	val_153
+155	val_155
+155	val_155
+156	val_156
+156	val_156
+157	val_157
+157	val_157
+158	val_158
+158	val_158
+160	val_160
+160	val_160
+162	val_162
+162	val_162
+163	val_163
+163	val_163
+164	val_164
+164	val_164
+164	val_164
+164	val_164
+164	val_164
+164	val_164
+164	val_164
+164	val_164
+165	val_165
+165	val_165
+165	val_165
+165	val_165
+165	val_165
+165	val_165
+165	val_165
+165	val_165
+166	val_166
+166	val_166
+167	val_167
+167	val_167
+167	val_167
+167	val_167
+167	val_167
+167	val_167
+167	val_167
+167	val_167
+167	val_167
+167	val_167
+167	val_167
+167	val_167
+167	val_167
+167	val_167
+167	val_167
+167	val_167
+167	val_167
+167	val_167
+168	val_168
+168	val_168
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+170	val_170
+170	val_170
+172	val_172
+172	val_172
+172	val_172
+172	val_172
+172	val_172
+172	val_172
+172	val_172
+172	val_172
+174	val_174
+174	val_174
+174	val_174
+174	val_174
+174	val_174
+174	val_174
+174	val_174
+174	val_174
+175	val_175
+175	val_175
+175	val_175
+175	val_175
+175	val_175
+175	val_175
+175	val_175
+175	val_175
+176	val_176
+176	val_176
+176	val_176
+176	val_176
+176	val_176
+176	val_176
+176	val_176
+176	val_176
+177	val_177
+177	val_177
+178	val_178
+178	val_178
+179	val_179
+179	val_179
+179	val_179
+179	val_179
+179	val_179
+179	val_179
+179	val_179
+179	val_179
+180	val_180
+180	val_180
+181	val_181
+181	val_181
+183	val_183
+183	val_183
+186	val_186
+186	val_186
+187	val_187
+187	val_187
+187	val_187
+187	val_187
+187	val_187
+187	val_187
+187	val_187
+187	val_187
+187	val_187
+187	val_187
+187	val_187
+187	val_187
+187	val_187
+187	val_187
+187	val_187
+187	val_187
+187	val_187
+187	val_187
+189	val_189
+189	val_189
+190	val_190
+190	val_190
+191	val_191
+191	val_191
+191	val_191
+191	val_191
+191	val_191
+191	val_191
+191	val_191
+191	val_191
+192	val_192
+192	val_192
+193	val_193
+193	val_193
+193	val_193
+193	val_193
+193	val_193
+193	val_193
+193	val_193
+193	val_193
+193	val_193
+193	val_193
+193	val_193
+193	val_193
+193	val_193
+193	val_193
+193	val_193
+193	val_193
+193	val_193
+193	val_193
+194	val_194
+194	val_194
+195	val_195
+195	val_195
+195	val_195
+195	val_195
+195	val_195
+195	val_195
+195	val_195
+195	val_195
+196	val_196
+196	val_196
+197	val_197
+197	val_197
+197	val_197
+197	val_197
+197	val_197
+197	val_197
+197	val_197
+197	val_197
+199	val_199
+199	val_199
+199	val_199
+199	val_199
+199	val_199
+199	val_199
+199	val_199
+199	val_199
+199	val_199
+199	val_199
+199	val_199
+199	val_199
+199	val_199
+199	val_199
+199	val_199
+199	val_199
+199	val_199
+199	val_199
+200	val_200
+200	val_200
+200	val_200
+200	val_200
+200	val_200
+200	val_200
+200	val_200
+200	val_200
+201	val_201
+201	val_201
+202	val_202
+202	val_202
+203	val_203
+203	val_203
+203	val_203
+203	val_203
+203	val_203
+203	val_203
+203	val_203
+203	val_203
+205	val_205
+205	val_205
+205	val_205
+205	val_205
+205	val_205
+205	val_205
+205	val_205
+205	val_205
+207	val_207
+207	val_207
+207	val_207
+207	val_207
+207	val_207
+207	val_207
+207	val_207
+207	val_207
+208	val_208
+208	val_208
+208	val_208
+208	val_208
+208	val_208
+208	val_208
+208	val_208
+208	val_208
+208	val_208
+208	val_208
+208	val_208
+208	val_208
+208	val_208
+208	val_208
+208	val_208
+208	val_208
+208	val_208
+208	val_208
+209	val_209
+209	val_209
+209	val_209
+209	val_209
+209	val_209
+209	val_209
+209	val_209
+209	val_209
+213	val_213
+213	val_213
+213	val_213
+213	val_213
+213	val_213
+213	val_213
+213	val_213
+213	val_213
+214	val_214
+214	val_214
+216	val_216
+216	val_216
+216	val_216
+216	val_216
+216	val_216
+216	val_216
+216	val_216
+216	val_216
+217	val_217
+217	val_217
+217	val_217
+217	val_217
+217	val_217
+217	val_217
+217	val_217
+217	val_217
+218	val_218
+218	val_218
+219	val_219
+219	val_219
+219	val_219
+219	val_219
+219	val_219
+219	val_219
+219	val_219
+219	val_219
+221	val_221
+221	val_221
+221	val_221
+221	val_221
+221	val_221
+221	val_221
+221	val_221
+221	val_221
+222	val_222
+222	val_222
+223	val_223
+223	val_223
+223	val_223
+223	val_223
+223	val_223
+223	val_223
+223	val_223
+223	val_223
+224	val_224
+224	val_224
+224	val_224
+224	val_224
+224	val_224
+224	val_224
+224	val_224
+224	val_224
+226	val_226
+226	val_226
+228	val_228
+228	val_228
+229	val_229
+229	val_229
+229	val_229
+229	val_229
+229	val_229
+229	val_229
+229	val_229
+229	val_229
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+230	val_230
+233	val_233
+233	val_233
+233	val_233
+233	val_233
+233	val_233
+233	val_233
+233	val_233
+233	val_233
+235	val_235
+235	val_235
+237	val_237
+237	val_237
+237	val_237
+237	val_237
+237	val_237
+237	val_237
+237	val_237
+237	val_237
+238	val_238
+238	val_238
+238	val_238
+238	val_238
+238	val_238
+238	val_238
+238	val_238
+238	val_238
+239	val_239
+239	val_239
+239	val_239
+239	val_239
+239	val_239
+239	val_239
+239	val_239
+239	val_239
+241	val_241
+241	val_241
+242	val_242
+242	val_242
+242	val_242
+242	val_242
+242	val_242
+242	val_242
+242	val_242
+242	val_242
+244	val_244
+244	val_244
+247	val_247
+247	val_247
+248	val_248
+248	val_248
+249	val_249
+249	val_249
+252	val_252
+252	val_252
+255	val_255
+255	val_255
+255	val_255
+255	val_255
+255	val_255
+255	val_255
+255	val_255
+255	val_255
+256	val_256
+256	val_256
+256	val_256
+256	val_256
+256	val_256
+256	val_256
+256	val_256
+256	val_256
+257	val_257
+257	val_257
+258	val_258
+258	val_258
+260	val_260
+260	val_260
+262	val_262
+262	val_262
+263	val_263
+263	val_263
+265	val_265
+265	val_265
+265	val_265
+265	val_265
+265	val_265
+265	val_265
+265	val_265
+265	val_265
+266	val_266
+266	val_266
+272	val_272
+272	val_272
+272	val_272
+272	val_272
+272	val_272
+272	val_272
+272	val_272
+272	val_272
+273	val_273
+273	val_273
+273	val_273
+273	val_273
+273	val_273
+273	val_273
+273	val_273
+273	val_273
+273	val_273
+273	val_273
+273	val_273
+273	val_273
+273	val_273
+273	val_273
+273	val_273
+273	val_273
+273	val_273
+273	val_273
+274	val_274
+274	val_274
+275	val_275
+275	val_275
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+277	val_277
+278	val_278
+278	val_278
+278	val_278
+278	val_278
+278	val_278
+278	val_278
+278	val_278
+278	val_278
+280	val_280
+280	val_280
+280	val_280
+280	val_280
+280	val_280
+280	val_280
+280	val_280
+280	val_280
+281	val_281
+281	val_281
+281	val_281
+281	val_281
+281	val_281
+281	val_281
+281	val_281
+281	val_281
+282	val_282
+282	val_282
+282	val_282
+282	val_282
+282	val_282
+282	val_282
+282	val_282
+282	val_282
+283	val_283
+283	val_283
+284	val_284
+284	val_284
+285	val_285
+285	val_285
+286	val_286
+286	val_286
+287	val_287
+287	val_287
+288	val_288
+288	val_288
+288	val_288
+288	val_288
+288	val_288
+288	val_288
+288	val_288
+288	val_288
+289	val_289
+289	val_289
+291	val_291
+291	val_291
+292	val_292
+292	val_292
+296	val_296
+296	val_296
+298	val_298
+298	val_298
+298	val_298
+298	val_298
+298	val_298
+298	val_298
+298	val_298
+298	val_298
+298	val_298
+298	val_298
+298	val_298
+298	val_298
+298	val_298
+298	val_298
+298	val_298
+298	val_298
+298	val_298
+298	val_298
+302	val_302
+302	val_302
+305	val_305
+305	val_305
+306	val_306
+306	val_306
+307	val_307
+307	val_307
+307	val_307
+307	val_307
+307	val_307
+307	val_307
+307	val_307
+307	val_307
+308	val_308
+308	val_308
+309	val_309
+309	val_309
+309	val_309
+309	val_309
+309	val_309
+309	val_309
+309	val_309
+309	val_309
+310	val_310
+310	val_310
+311	val_311
+311	val_311
+311	val_311
+311	val_311
+311	val_311
+311	val_311
+311	val_311
+311	val_311
+311	val_311
+311	val_311
+311	val_311
+311	val_311
+311	val_311
+311	val_311
+311	val_311
+311	val_311
+311	val_311
+311	val_311
+315	val_315
+315	val_315
+316	val_316
+316	val_316
+316	val_316
+316	val_316
+316	val_316
+316	val_316
+316	val_316
+316	val_316
+316	val_316
+316	val_316
+316	val_316
+316	val_316
+316	val_316
+316	val_316
+316	val_316
+316	val_316
+316	val_316
+316	val_316
+317	val_317
+317	val_317
+317	val_317
+317	val_317
+317	val_317
+317	val_317
+317	val_317
+317	val_317
+318	val_318
+318	val_318
+318	val_318
+318	val_318
+318	val_318
+318	val_318
+318	val_318
+318	val_318
+318	val_318
+318	val_318
+318	val_318
+318	val_318
+318	val_318
+318	val_318
+318	val_318
+318	val_318
+318	val_318
+318	val_318
+321	val_321
+321	val_321
+321	val_321
+321	val_321
+321	val_321
+321	val_321
+321	val_321
+321	val_321
+322	val_322
+322	val_322
+322	val_322
+322	val_322
+322	val_322
+322	val_322
+322	val_322
+322	val_322
+323	val_323
+323	val_323
+325	val_325
+325	val_325
+325	val_325
+325	val_325
+325	val_325
+325	val_325
+325	val_325
+325	val_325
+327	val_327
+327	val_327
+327	val_327
+327	val_327
+327	val_327
+327	val_327
+327	val_327
+327	val_327
+327	val_327
+327	val_327
+327	val_327
+327	val_327
+327	val_327
+327	val_327
+327	val_327
+327	val_327
+327	val_327
+327	val_327
+331	val_331
+331	val_331
+331	val_331
+331	val_331
+331	val_331
+331	val_331
+331	val_331
+331	val_331
+332	val_332
+332	val_332
+333	val_333
+333	val_333
+333	val_333
+333	val_333
+333	val_333
+333	val_333
+333	val_333
+333	val_333
+335	val_335
+335	val_335
+336	val_336
+336	val_336
+338	val_338
+338	val_338
+339	val_339
+339	val_339
+341	val_341
+341	val_341
+342	val_342
+342	val_342
+342	val_342
+342	val_342
+342	val_342
+342	val_342
+342	val_342
+342	val_342
+344	val_344
+344	val_344
+344	val_344
+344	val_344
+344	val_344
+344	val_344
+344	val_344
+344	val_344
+345	val_345
+345	val_345
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+348	val_348
+351	val_351
+351	val_351
+353	val_353
+353	val_353
+353	val_353
+353	val_353
+353	val_353
+353	val_353
+353	val_353
+353	val_353
+356	val_356
+356	val_356
+360	val_360
+360	val_360
+362	val_362
+362	val_362
+364	val_364
+364	val_364
+365	val_365
+365	val_365
+366	val_366
+366	val_366
+367	val_367
+367	val_367
+367	val_367
+367	val_367
+367	val_367
+367	val_367
+367	val_367
+367	val_367
+368	val_368
+368	val_368
+369	val_369
+369	val_369
+369	val_369
+369	val_369
+369	val_369
+369	val_369
+369	val_369
+369	val_369
+369	val_369
+369	val_369
+369	val_369
+369	val_369
+369	val_369
+369	val_369
+369	val_369
+369	val_369
+369	val_369
+369	val_369
+373	val_373
+373	val_373
+374	val_374
+374	val_374
+375	val_375
+375	val_375
+377	val_377
+377	val_377
+378	val_378
+378	val_378
+379	val_379
+379	val_379
+382	val_382
+382	val_382
+382	val_382
+382	val_382
+382	val_382
+382	val_382
+382	val_382
+382	val_382
+384	val_384
+384	val_384
+384	val_384
+384	val_384
+384	val_384
+384	val_384
+384	val_384
+384	val_384
+384	val_384
+384	val_384
+384	val_384
+384	val_384
+384	val_384
+384	val_384
+384	val_384
+384	val_384
+384	val_384
+384	val_384
+386	val_386
+386	val_386
+389	val_389
+389	val_389
+392	val_392
+392	val_392
+393	val_393
+393	val_393
+394	val_394
+394	val_394
+395	val_395
+395	val_395
+395	val_395
+395	val_395
+395	val_395
+395	val_395
+395	val_395
+395	val_395
+396	val_396
+396	val_396
+396	val_396
+396	val_396
+396	val_396
+396	val_396
+396	val_396
+396	val_396
+396	val_396
+396	val_396
+396	val_396
+396	val_396
+396	val_396
+396	val_396
+396	val_396
+396	val_396
+396	val_396
+396	val_396
+397	val_397
+397	val_397
+397	val_397
+397	val_397
+397	val_397
+397	val_397
+397	val_397
+397	val_397
+399	val_399
+399	val_399
+399	val_399
+399	val_399
+399	val_399
+399	val_399
+399	val_399
+399	val_399
+400	val_400
+400	val_400
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+401	val_401
+402	val_402
+402	val_402
+403	val_403
+403	val_403
+403	val_403
+403	val_403
+403	val_403
+403	val_403
+403	val_403
+403	val_403
+403	val_403
+403	val_403
+403	val_403
+403	val_403
+403	val_403
+403	val_403
+403	val_403
+403	val_403
+403	val_403
+403	val_403
+404	val_404
+404	val_404
+404	val_404
+404	val_404
+404	val_404
+404	val_404
+404	val_404
+404	val_404
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+406	val_406
+407	val_407
+407	val_407
+409	val_409
+409	val_409
+409	val_409
+409	val_409
+409	val_409
+409	val_409
+409	val_409
+409	val_409
+409	val_409
+409	val_409
+409	val_409
+409	val_409
+409	val_409
+409	val_409
+409	val_409
+409	val_409
+409	val_409
+409	val_409
+411	val_411
+411	val_411
+413	val_413
+413	val_413
+413	val_413
+413	val_413
+413	val_413
+413	val_413
+413	val_413
+413	val_413
+414	val_414
+414	val_414
+414	val_414
+414	val_414
+414	val_414
+414	val_414
+414	val_414
+414	val_414
+417	val_417
+417	val_417
+417	val_417
+417	val_417
+417	val_417
+417	val_417
+417	val_417
+417	val_417
+417	val_417
+417	val_417
+417	val_417
+417	val_417
+417	val_417
+417	val_417
+417	val_417
+417	val_417
+417	val_417
+417	val_417
+418	val_418
+418	val_418
+419	val_419
+419	val_419
+421	val_421
+421	val_421
+424	val_424
+424	val_424
+424	val_424
+424	val_424
+424	val_424
+424	val_424
+424	val_424
+424	val_424
+427	val_427
+427	val_427
+429	val_429
+429	val_429
+429	val_429
+429	val_429
+429	val_429
+429	val_429
+429	val_429
+429	val_429
+430	val_430
+430	val_430
+430	val_430
+430	val_430
+430	val_430
+430	val_430
+430	val_430
+430	val_430
+430	val_430
+430	val_430
+430	val_430
+430	val_430
+430	val_430
+430	val_430
+430	val_430
+430	val_430
+430	val_430
+430	val_430
+431	val_431
+431	val_431
+431	val_431
+431	val_431
+431	val_431
+431	val_431
+431	val_431
+431	val_431
+431	val_431
+431	val_431
+431	val_431
+431	val_431
+431	val_431
+431	val_431
+431	val_431
+431	val_431
+431	val_431
+431	val_431
+432	val_432
+432	val_432
+435	val_435
+435	val_435
+436	val_436
+436	val_436
+437	val_437
+437	val_437
+438	val_438
+438	val_438
+438	val_438
+438	val_438
+438	val_438
+438	val_438
+438	val_438
+438	val_438
+438	val_438
+438	val_438
+438	val_438
+438	val_438
+438	val_438
+438	val_438
+438	val_438
+438	val_438
+438	val_438
+438	val_438
+439	val_439
+439	val_439
+439	val_439
+439	val_439
+439	val_439
+439	val_439
+439	val_439
+439	val_439
+443	val_443
+443	val_443
+444	val_444
+444	val_444
+446	val_446
+446	val_446
+448	val_448
+448	val_448
+449	val_449
+449	val_449
+452	val_452
+452	val_452
+453	val_453
+453	val_453
+454	val_454
+454	val_454
+454	val_454
+454	val_454
+454	val_454
+454	val_454
+454	val_454
+454	val_454
+454	val_454
+454	val_454
+454	val_454
+454	val_454
+454	val_454
+454	val_454
+454	val_454
+454	val_454
+454	val_454
+454	val_454
+455	val_455
+455	val_455
+457	val_457
+457	val_457
+458	val_458
+458	val_458
+458	val_458
+458	val_458
+458	val_458
+458	val_458
+458	val_458
+458	val_458
+459	val_459
+459	val_459
+459	val_459
+459	val_459
+459	val_459
+459	val_459
+459	val_459
+459	val_459
+460	val_460
+460	val_460
+462	val_462
+462	val_462
+462	val_462
+462	val_462
+462	val_462
+462	val_462
+462	val_462
+462	val_462
+463	val_463
+463	val_463
+463	val_463
+463	val_463
+463	val_463
+463	val_463
+463	val_463
+463	val_463
+466	val_466
+466	val_466
+466	val_466
+466	val_466
+466	val_466
+466	val_466
+466	val_466
+466	val_466
+466	val_466
+466	val_466
+466	val_466
+466	val_466
+466	val_466
+466	val_466
+466	val_466
+466	val_466
+466	val_466
+466	val_466
+467	val_467
+467	val_467
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+468	val_468
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+469	val_469
+470	val_470
+470	val_470
+472	val_472
+472	val_472
+475	val_475
+475	val_475
+477	val_477
+477	val_477
+478	val_478
+478	val_478
+478	val_478
+478	val_478
+478	val_478
+478	val_478
+478	val_478
+478	val_478
+479	val_479
+479	val_479
+480	val_480
+480	val_480
+480	val_480
+480	val_480
+480	val_480
+480	val_480
+480	val_480
+480	val_480
+480	val_480
+480	val_480
+480	val_480
+480	val_480
+480	val_480
+480	val_480
+480	val_480
+480	val_480
+480	val_480
+480	val_480
+481	val_481
+481	val_481
+482	val_482
+482	val_482
+483	val_483
+483	val_483
+484	val_484
+484	val_484
+485	val_485
+485	val_485
+487	val_487
+487	val_487
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+489	val_489
+490	val_490
+490	val_490
+491	val_491
+491	val_491
+492	val_492
+492	val_492
+492	val_492
+492	val_492
+492	val_492
+492	val_492
+492	val_492
+492	val_492
+493	val_493
+493	val_493
+494	val_494
+494	val_494
+495	val_495
+495	val_495
+496	val_496
+496	val_496
+497	val_497
+497	val_497
+498	val_498
+498	val_498
+498	val_498
+498	val_498
+498	val_498
+498	val_498
+498	val_498
+498	val_498
+498	val_498
+498	val_498
+498	val_498
+498	val_498
+498	val_498
+498	val_498
+498	val_498
+498	val_498
+498	val_498
+498	val_498

Modified: hive/trunk/ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out?rev=1409077&r1=1409076&r2=1409077&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out Wed Nov 14 03:51:52 2012
@@ -1,11 +1,11 @@
-PREHOOK: query: USE default
-PREHOOK: type: SWITCHDATABASE
-POSTHOOK: query: USE default
-POSTHOOK: type: SWITCHDATABASE
-PREHOOK: query: -- create file inputs
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+-- create file inputs
 create table sih_i_part (key int, value string) partitioned by (p string)
 PREHOOK: type: CREATETABLE
-POSTHOOK: query: -- create file inputs
+POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+
+-- create file inputs
 create table sih_i_part (key int, value string) partitioned by (p string)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@sih_i_part
@@ -75,29 +75,28 @@ POSTHOOK: Lineage: sih_i_part PARTITION(
 POSTHOOK: Lineage: sih_i_part PARTITION(p=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: sih_i_part PARTITION(p=3).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: sih_i_part PARTITION(p=3).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.22)
--- This test sets mapred.max.split.size=300 and hive.merge.smallfiles.avgsize=1
--- in an attempt to force the generation of multiple splits and multiple output files.
--- However, Hadoop 0.20 is incapable of generating splits smaller than the block size
--- when using CombineFileInputFormat, so only one split is generated. This has a
--- significant impact on the results of the TABLESAMPLE(x PERCENT). This issue was
--- fixed in MAPREDUCE-2046 which is included in 0.22.
--- Sample split, running locally limited by num tasks
+PREHOOK: query: -- Relaxing hive.exec.mode.local.auto.input.files.max=1.
+-- Hadoop20 will not generate more splits than there are files (one).
+-- Hadoop23 generate splits correctly (four), hence the max needs to be adjusted to ensure running in local mode.
+-- Default value is hive.exec.mode.local.auto.input.files.max=4 which produces expected behavior on Hadoop23.
+-- hive.sample.seednumber is required because Hadoop23 generates multiple splits and tablesample is non-repeatable without it.
+
+-- sample split, running locally limited by num tasks
 select count(1) from sih_src tablesample(1 percent)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@sih_src
 #### A masked pattern was here ####
-1500
+25
 PREHOOK: query: -- sample two tables
-select count(1) from sih_src tablesample(1 percent)a join sih_src2 tablesample(1 percent)b on a.key = b.key
+select count(1) from sih_src tablesample(1 percent) a join sih_src2 tablesample(1 percent) b on a.key = b.key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@sih_src
 PREHOOK: Input: default@sih_src2
 #### A masked pattern was here ####
-3084
+49
 PREHOOK: query: -- sample split, running locally limited by max bytes
 select count(1) from sih_src tablesample(1 percent)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@sih_src
 #### A masked pattern was here ####
-1500
+25

Added: hive/trunk/ql/src/test/results/clientpositive/sample_islocalmode_hook_hadoop20.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/sample_islocalmode_hook_hadoop20.q.out?rev=1409077&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/sample_islocalmode_hook_hadoop20.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/sample_islocalmode_hook_hadoop20.q.out Wed Nov 14 03:51:52 2012
@@ -0,0 +1,112 @@
+PREHOOK: query: USE default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: USE default
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+-- This test sets mapred.max.split.size=300 and hive.merge.smallfiles.avgsize=1
+-- in an attempt to force the generation of multiple splits and multiple output files.
+-- However, Hadoop 0.20 is incapable of generating splits smaller than the block size
+-- when using CombineFileInputFormat, so only one split is generated. This has a
+-- significant impact on the results of the TABLESAMPLE(x PERCENT). This issue was
+-- fixed in MAPREDUCE-2046 which is included in 0.22.
+
+-- create file inputs
+create table sih_i_part (key int, value string) partitioned by (p string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+-- This test sets mapred.max.split.size=300 and hive.merge.smallfiles.avgsize=1
+-- in an attempt to force the generation of multiple splits and multiple output files.
+-- However, Hadoop 0.20 is incapable of generating splits smaller than the block size
+-- when using CombineFileInputFormat, so only one split is generated. This has a
+-- significant impact on the results of the TABLESAMPLE(x PERCENT). This issue was
+-- fixed in MAPREDUCE-2046 which is included in 0.22.
+
+-- create file inputs
+create table sih_i_part (key int, value string) partitioned by (p string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@sih_i_part
+PREHOOK: query: insert overwrite table sih_i_part partition (p='1') select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@sih_i_part@p=1
+POSTHOOK: query: insert overwrite table sih_i_part partition (p='1') select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@sih_i_part@p=1
+POSTHOOK: Lineage: sih_i_part PARTITION(p=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: insert overwrite table sih_i_part partition (p='2') select key+10000, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@sih_i_part@p=2
+POSTHOOK: query: insert overwrite table sih_i_part partition (p='2') select key+10000, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@sih_i_part@p=2
+POSTHOOK: Lineage: sih_i_part PARTITION(p=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: insert overwrite table sih_i_part partition (p='3') select key+20000, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@sih_i_part@p=3
+POSTHOOK: query: insert overwrite table sih_i_part partition (p='3') select key+20000, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@sih_i_part@p=3
+POSTHOOK: Lineage: sih_i_part PARTITION(p=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=3).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=3).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: create table sih_src as select key, value from sih_i_part order by key, value
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@sih_i_part@p=1
+PREHOOK: Input: default@sih_i_part@p=2
+PREHOOK: Input: default@sih_i_part@p=3
+POSTHOOK: query: create table sih_src as select key, value from sih_i_part order by key, value
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@sih_i_part@p=1
+POSTHOOK: Input: default@sih_i_part@p=2
+POSTHOOK: Input: default@sih_i_part@p=3
+POSTHOOK: Output: default@sih_src
+POSTHOOK: Lineage: sih_i_part PARTITION(p=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=3).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=3).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: create table sih_src2 as select key, value from sih_src order by key, value
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@sih_src
+POSTHOOK: query: create table sih_src2 as select key, value from sih_src order by key, value
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@sih_src
+POSTHOOK: Output: default@sih_src2
+POSTHOOK: Lineage: sih_i_part PARTITION(p=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=3).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: sih_i_part PARTITION(p=3).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- Sample split, running locally limited by num tasks
+select count(1) from sih_src tablesample(1 percent)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@sih_src
+#### A masked pattern was here ####
+1500
+PREHOOK: query: -- sample two tables
+select count(1) from sih_src tablesample(1 percent)a join sih_src2 tablesample(1 percent)b on a.key = b.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@sih_src
+PREHOOK: Input: default@sih_src2
+#### A masked pattern was here ####
+3084
+PREHOOK: query: -- sample split, running locally limited by max bytes
+select count(1) from sih_src tablesample(1 percent)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@sih_src
+#### A masked pattern was here ####
+1500

Modified: hive/trunk/ql/src/test/results/clientpositive/split_sample.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/split_sample.q.out?rev=1409077&r1=1409076&r2=1409077&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/split_sample.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/split_sample.q.out Wed Nov 14 03:51:52 2012
@@ -2,10 +2,26 @@ PREHOOK: query: USE default
 PREHOOK: type: SWITCHDATABASE
 POSTHOOK: query: USE default
 POSTHOOK: type: SWITCHDATABASE
-PREHOOK: query: -- create multiple file inputs (two enable multiple splits)
+PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+-- This test sets mapred.max.split.size=300 and hive.merge.smallfiles.avgsize=1
+-- in an attempt to force the generation of multiple splits and multiple output files.
+-- However, Hadoop 0.20 is incapable of generating splits smaller than the block size
+-- when using CombineFileInputFormat, so only one split is generated. This has a
+-- significant impact on the results of the TABLESAMPLE(x PERCENT). This issue was
+-- fixed in MAPREDUCE-2046 which is included in 0.22.
+
+-- create multiple file inputs (two enable multiple splits)
 create table ss_i_part (key int, value string) partitioned by (p string)
 PREHOOK: type: CREATETABLE
-POSTHOOK: query: -- create multiple file inputs (two enable multiple splits)
+POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
+-- This test sets mapred.max.split.size=300 and hive.merge.smallfiles.avgsize=1
+-- in an attempt to force the generation of multiple splits and multiple output files.
+-- However, Hadoop 0.20 is incapable of generating splits smaller than the block size
+-- when using CombineFileInputFormat, so only one split is generated. This has a
+-- significant impact on the results of the TABLESAMPLE(x PERCENT). This issue was
+-- fixed in MAPREDUCE-2046 which is included in 0.22.
+
+-- create multiple file inputs (two enable multiple splits)
 create table ss_i_part (key int, value string) partitioned by (p string)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@ss_i_part
@@ -62,27 +78,11 @@ POSTHOOK: Lineage: ss_i_part PARTITION(p
 POSTHOOK: Lineage: ss_i_part PARTITION(p=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: ss_i_part PARTITION(p=3).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: ss_i_part PARTITION(p=3).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.22)
--- This test sets mapred.max.split.size=300 and hive.merge.smallfiles.avgsize=1
--- in an attempt to force the generation of multiple splits and multiple output files.
--- However, Hadoop 0.20 is incapable of generating splits smaller than the block size
--- when using CombineFileInputFormat, so only one split is generated. This has a
--- significant impact on the results of the TABLESAMPLE(x PERCENT). This issue was
--- fixed in MAPREDUCE-2046 which is included in 0.22.
-
-select count(1) from ss_src2 tablesample(1 percent)
+PREHOOK: query: select count(1) from ss_src2 tablesample(1 percent)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@ss_src2
 #### A masked pattern was here ####
-POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.22)
--- This test sets mapred.max.split.size=300 and hive.merge.smallfiles.avgsize=1
--- in an attempt to force the generation of multiple splits and multiple output files.
--- However, Hadoop 0.20 is incapable of generating splits smaller than the block size
--- when using CombineFileInputFormat, so only one split is generated. This has a
--- significant impact on the results of the TABLESAMPLE(x PERCENT). This issue was
--- fixed in MAPREDUCE-2046 which is included in 0.22.
-
-select count(1) from ss_src2 tablesample(1 percent)
+POSTHOOK: query: select count(1) from ss_src2 tablesample(1 percent)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@ss_src2
 #### A masked pattern was here ####

Modified: hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServerSessions.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServerSessions.java?rev=1409077&r1=1409076&r2=1409077&view=diff
==============================================================================
--- hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServerSessions.java (original)
+++ hive/trunk/service/src/test/org/apache/hadoop/hive/service/TestHiveServerSessions.java Wed Nov 14 03:51:52 2012
@@ -52,7 +52,7 @@ public class TestHiveServerSessions exte
       }
     });
     server.start();
-    Thread.sleep(1000);
+    Thread.sleep(5000);
 
     for (int i = 0; i < transports.length ; i++) {
       TSocket transport = new TSocket("localhost", port);

Modified: hive/trunk/shims/ivy.xml
URL: http://svn.apache.org/viewvc/hive/trunk/shims/ivy.xml?rev=1409077&r1=1409076&r2=1409077&view=diff
==============================================================================
--- hive/trunk/shims/ivy.xml (original)
+++ hive/trunk/shims/ivy.xml Wed Nov 14 03:51:52 2012
@@ -38,12 +38,12 @@
                 transitive="false"/>
     <dependency org="commons-logging" name="commons-logging-api" rev="${commons-logging-api.version}"
                 transitive="false"/>
-    <dependency org="com.google.guava" name="guava" rev="${guava.version}"
-      transitive="false"/>
     <dependency org="org.codehaus.jackson" name="jackson-core-asl" rev="${jackson.version}"/>
     <dependency org="org.codehaus.jackson" name="jackson-mapper-asl" rev="${jackson.version}"/>
 
     <!-- Hadoop 0.23 dependencies. Used both for shims and for building against Hadoop 0.23. -->
+    <dependency org="com.google.guava" name="guava" rev="${guava-hadoop23.version}"
+                conf="hadoop0.23.shim->default" transitive="false"/>
     <dependency org="org.apache.hadoop" name="hadoop-common"
                 rev="${hadoop-0.23.version}"
                 conf="hadoop0.23.shim->default">
@@ -90,8 +90,9 @@
       <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
     </dependency>
 
-
     <!-- Hadoop 0.20 shim dependencies. Used for building 0.20 shims. -->
+    <dependency org="com.google.guava" name="guava" rev="${guava-hadoop20.version}"
+                conf="hadoop0.20.shim->default" transitive="false"/>
     <dependency org="org.apache.hadoop" name="hadoop-core"
                 rev="${hadoop-0.20.version}"
                 conf="hadoop0.20.shim->default">
@@ -115,6 +116,8 @@
     </dependency>
 
     <!-- Hadoop 0.20S (or 1.0.0) shim dependencies. Used for building 0.20S shims. -->
+    <dependency org="com.google.guava" name="guava" rev="${guava-hadoop20.version}"
+                conf="hadoop0.20S.shim->default" transitive="false"/>
     <dependency org="org.apache.hadoop" name="hadoop-core"
                 rev="${hadoop-0.20S.version}"
                 conf="hadoop0.20S.shim->default">



Mime
View raw message