hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rmur...@apache.org
Subject svn commit: r819792 [13/24] - in /hadoop/hive/trunk: ./ common/src/java/org/apache/hadoop/hive/conf/ contrib/src/test/results/clientnegative/ contrib/src/test/results/clientpositive/ data/conf/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apa...
Date Tue, 29 Sep 2009 01:25:30 GMT
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join35.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join35.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join35.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join35.q.out Tue Sep 29 01:25:15 2009
@@ -1,6 +1,13 @@
-query: drop table dest_j1
-query: CREATE TABLE dest_j1(key STRING, value STRING, val2 INT) STORED AS TEXTFILE
-query: EXPLAIN EXTENDED
+PREHOOK: query: drop table dest_j1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table dest_j1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE dest_j1(key STRING, value STRING, val2 INT) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest_j1(key STRING, value STRING, val2 INT) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest_j1
+PREHOOK: query: EXPLAIN EXTENDED
 INSERT OVERWRITE TABLE dest_j1
 SELECT /*+ MAPJOIN(x) */ x.key, x.value, subq1.cnt
 FROM 
@@ -9,6 +16,17 @@
   SELECT x1.key as key, count(1) as cnt from src x1 where x1.key > 100 group by x1.key
 ) subq1
 JOIN src1 x ON (x.key = subq1.key)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest_j1
+SELECT /*+ MAPJOIN(x) */ x.key, x.value, subq1.cnt
+FROM 
+( SELECT x.key as key, count(1) as cnt from src x where x.key < 20 group by x.key
+     UNION ALL
+  SELECT x1.key as key, count(1) as cnt from src x1 where x1.key > 100 group by x1.key
+) subq1
+JOIN src1 x ON (x.key = subq1.key)
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF src x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key) key) (TOK_SELEXPR (TOK_FUNCTION count 1) cnt)) (TOK_WHERE (< (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_GROUPBY (. (TOK_TABLE_OR_COL x) key)))) (TOK_QUERY (TOK_FROM (TOK_TABREF src x1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x1) key) key) (TOK_SELEXPR (TOK_FUNCTION count 1) cnt)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL x1) key) 100)) (TOK_GROUPBY (. (TOK_TABLE_OR_COL x1) key))))) subq1) (TOK_TABREF src1 x) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL subq1) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest_j1)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST x))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL subq1) cnt))))
 )
 
@@ -66,9 +84,9 @@
                             type: bigint
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src [null-subquery1:subq1-subquery1:x]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src [null-subquery1:subq1-subquery1:x]
       Path -> Partition:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src 
           Partition
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -83,7 +101,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: src
       Reduce Operator Tree:
@@ -105,7 +123,7 @@
             File Output Operator
               compressed: false
               GlobalTableId: 0
-              directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10002
+              directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10002
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -117,7 +135,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10002 
+        file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10002 
           Union
             Common Join Operator
               condition map:
@@ -160,7 +178,7 @@
                     File Output Operator
                       compressed: false
                       GlobalTableId: 1
-                      directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10003
+                      directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10003
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -174,10 +192,10 @@
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             file.inputformat org.apache.hadoop.mapred.TextInputFormat
                             file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                            location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest_j1
+                            location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest_j1
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: dest_j1
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10004 
+        file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10004 
           Union
             Common Join Operator
               condition map:
@@ -220,7 +238,7 @@
                     File Output Operator
                       compressed: false
                       GlobalTableId: 1
-                      directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10003
+                      directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10003
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -234,7 +252,7 @@
                             serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             file.inputformat org.apache.hadoop.mapred.TextInputFormat
                             file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                            location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest_j1
+                            location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest_j1
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                           name: dest_j1
       Local Work:
@@ -288,7 +306,7 @@
                         File Output Operator
                           compressed: false
                           GlobalTableId: 1
-                          directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10003
+                          directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10003
                           table:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -302,15 +320,15 @@
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest_j1
+                                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest_j1
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: dest_j1
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10002 [file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10002]
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10004 [file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10004]
+        file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10002 [file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10002]
+        file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10004 [file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10004]
       Path -> Partition:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10002 
+        file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10002 
           Partition
           
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -319,7 +337,7 @@
                 columns _col0,_col1
                 columns.types string,bigint
                 escape.delim \
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10004 
+        file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10004 
           Partition
           
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -335,11 +353,11 @@
           Move Operator
             files:
                 hdfs directory: true
-                source: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10003
-                destination: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/429216624/10000
+                source: file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10003
+                destination: file:/data/users/njain/hive5/hive5/build/ql/tmp/2094683634/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10003 
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10003 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -355,9 +373,9 @@
                           type: int
             Needs Tagging: false
             Path -> Alias:
-              file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10003 [file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10003]
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10003 [file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10003]
             Path -> Partition:
-              file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10003 
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10003 
                 Partition
                 
                     input format: org.apache.hadoop.mapred.TextInputFormat
@@ -372,7 +390,7 @@
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       file.inputformat org.apache.hadoop.mapred.TextInputFormat
                       file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest_j1
+                      location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest_j1
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest_j1
             Reduce Operator Tree:
@@ -380,7 +398,7 @@
                 File Output Operator
                   compressed: false
                   GlobalTableId: 0
-                  directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/429216624/10000
+                  directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/2094683634/10000
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -393,7 +411,7 @@
                         bucket_count -1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                        location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest_j1
+                        location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest_j1
                         file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: dest_j1
@@ -402,7 +420,7 @@
     Move Operator
       tables:
           replace: true
-          source: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/429216624/10000
+          source: file:/data/users/njain/hive5/hive5/build/ql/tmp/2094683634/10000
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -416,10 +434,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest_j1
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest_j1
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: dest_j1
-          tmp directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/429216624/10001
+          tmp directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/2094683634/10001
 
   Stage: Stage-6
     Map Reduce
@@ -464,9 +482,9 @@
                             type: bigint
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src [null-subquery2:subq1-subquery2:x1]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src [null-subquery2:subq1-subquery2:x1]
       Path -> Partition:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src 
           Partition
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -481,7 +499,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: src
       Reduce Operator Tree:
@@ -503,7 +521,7 @@
             File Output Operator
               compressed: false
               GlobalTableId: 0
-              directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/795699336/10004
+              directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/1041790676/10004
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -513,7 +531,19 @@
                     escape.delim \
 
 
-query: INSERT OVERWRITE TABLE dest_j1
+PREHOOK: query: INSERT OVERWRITE TABLE dest_j1
+SELECT /*+ MAPJOIN(x) */ x.key, x.value, subq1.cnt
+FROM 
+( SELECT x.key as key, count(1) as cnt from src x where x.key < 20 group by x.key
+     UNION ALL
+  SELECT x1.key as key, count(1) as cnt from src x1 where x1.key > 100 group by x1.key
+) subq1
+JOIN src1 x ON (x.key = subq1.key)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@src1
+PREHOOK: Output: default@dest_j1
+POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1
 SELECT /*+ MAPJOIN(x) */ x.key, x.value, subq1.cnt
 FROM 
 ( SELECT x.key as key, count(1) as cnt from src x where x.key < 20 group by x.key
@@ -521,12 +551,18 @@
   SELECT x1.key as key, count(1) as cnt from src x1 where x1.key > 100 group by x1.key
 ) subq1
 JOIN src1 x ON (x.key = subq1.key)
-Input: default/src
-Input: default/src1
-Output: default/dest_j1
-query: select * from dest_j1 x order by x.key
-Input: default/dest_j1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/392273807/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@src1
+POSTHOOK: Output: default@dest_j1
+PREHOOK: query: select * from dest_j1 x order by x.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest_j1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2131943789/10000
+POSTHOOK: query: select * from dest_j1 x order by x.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest_j1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2131943789/10000
 128		3
 146	val_146	2
 150	val_150	1
@@ -540,4 +576,8 @@
 369		3
 401	val_401	5
 406	val_406	4
-query: drop table dest_j1
+PREHOOK: query: drop table dest_j1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table dest_j1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@dest_j1

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join36.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join36.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join36.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join36.q.out Tue Sep 29 01:25:15 2009
@@ -1,21 +1,60 @@
-query: drop table dest_j1
-query: drop table tmp1
-query: drop table tmp2
-query: CREATE TABLE tmp1(key INT, cnt INT)
-query: CREATE TABLE tmp2(key INT, cnt INT)
-query: CREATE TABLE dest_j1(key INT, value INT, val2 INT)
-query: INSERT OVERWRITE TABLE tmp1
+PREHOOK: query: drop table dest_j1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table dest_j1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table tmp1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tmp1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table tmp2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tmp2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE tmp1(key INT, cnt INT)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE tmp1(key INT, cnt INT)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tmp1
+PREHOOK: query: CREATE TABLE tmp2(key INT, cnt INT)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE tmp2(key INT, cnt INT)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tmp2
+PREHOOK: query: CREATE TABLE dest_j1(key INT, value INT, val2 INT)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest_j1(key INT, value INT, val2 INT)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest_j1
+PREHOOK: query: INSERT OVERWRITE TABLE tmp1
 SELECT key, count(1) from src group by key
-Input: default/src
-Output: default/tmp1
-query: INSERT OVERWRITE TABLE tmp2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@tmp1
+POSTHOOK: query: INSERT OVERWRITE TABLE tmp1
 SELECT key, count(1) from src group by key
-Input: default/src
-Output: default/tmp2
-query: EXPLAIN
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@tmp1
+PREHOOK: query: INSERT OVERWRITE TABLE tmp2
+SELECT key, count(1) from src group by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@tmp2
+POSTHOOK: query: INSERT OVERWRITE TABLE tmp2
+SELECT key, count(1) from src group by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@tmp2
+PREHOOK: query: EXPLAIN
 INSERT OVERWRITE TABLE dest_j1 
 SELECT /*+ MAPJOIN(x) */ x.key, x.cnt, y.cnt
 FROM tmp1 x JOIN tmp2 y ON (x.key = y.key)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+INSERT OVERWRITE TABLE dest_j1 
+SELECT /*+ MAPJOIN(x) */ x.key, x.cnt, y.cnt
+FROM tmp1 x JOIN tmp2 y ON (x.key = y.key)
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF tmp1 x) (TOK_TABREF tmp2 y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest_j1)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST x))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) cnt)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL y) cnt)))))
 
@@ -122,10 +161,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_2/build/ql/tmp/1012006682/10000
+                destination: file:/data/users/njain/hive5/hive5/build/ql/tmp/288849428/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_2/build/ql/tmp/697544308/10002 
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/343369089/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -161,15 +200,28 @@
               name: dest_j1
 
 
-query: INSERT OVERWRITE TABLE dest_j1 
+PREHOOK: query: INSERT OVERWRITE TABLE dest_j1 
+SELECT /*+ MAPJOIN(x) */ x.key, x.cnt, y.cnt
+FROM tmp1 x JOIN tmp2 y ON (x.key = y.key)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tmp2
+PREHOOK: Input: default@tmp1
+PREHOOK: Output: default@dest_j1
+POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 
 SELECT /*+ MAPJOIN(x) */ x.key, x.cnt, y.cnt
 FROM tmp1 x JOIN tmp2 y ON (x.key = y.key)
-Input: default/tmp2
-Input: default/tmp1
-Output: default/dest_j1
-query: select * from dest_j1 x order by x.key
-Input: default/dest_j1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_2/build/ql/tmp/1124582882/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tmp2
+POSTHOOK: Input: default@tmp1
+POSTHOOK: Output: default@dest_j1
+PREHOOK: query: select * from dest_j1 x order by x.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest_j1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1835102638/10000
+POSTHOOK: query: select * from dest_j1 x order by x.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest_j1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1835102638/10000
 0	3	3
 2	1	1
 4	1	1
@@ -479,4 +531,8 @@
 496	1	1
 497	1	1
 498	3	3
-query: drop table dest_j1
+PREHOOK: query: drop table dest_j1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table dest_j1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@dest_j1

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join37.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join37.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join37.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join37.q.out Tue Sep 29 01:25:15 2009
@@ -1,9 +1,22 @@
-query: drop table dest_j1
-query: CREATE TABLE dest_j1(key INT, value STRING, val2 STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: drop table dest_j1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table dest_j1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE dest_j1(key INT, value STRING, val2 STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest_j1(key INT, value STRING, val2 STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest_j1
+PREHOOK: query: EXPLAIN
 INSERT OVERWRITE TABLE dest_j1 
 SELECT /*+ MAPJOIN(X) */ x.key, x.value, y.value
 FROM src1 x JOIN src y ON (x.key = y.key)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+INSERT OVERWRITE TABLE dest_j1 
+SELECT /*+ MAPJOIN(X) */ x.key, x.value, y.value
+FROM src1 x JOIN src y ON (x.key = y.key)
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF src1 x) (TOK_TABREF src y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest_j1)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST X))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL y) value)))))
 
@@ -128,10 +141,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/njain/hive1/hive1/build/ql/tmp/468428349/10000
+                destination: file:/data/users/njain/hive5/hive5/build/ql/tmp/1084434064/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/njain/hive1/hive1/build/ql/tmp/2029373808/10002 
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/811526799/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -167,15 +180,28 @@
               name: dest_j1
 
 
-query: INSERT OVERWRITE TABLE dest_j1 
+PREHOOK: query: INSERT OVERWRITE TABLE dest_j1 
+SELECT /*+ MAPJOIN(X) */ x.key, x.value, y.value
+FROM src1 x JOIN src y ON (x.key = y.key)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@src1
+PREHOOK: Output: default@dest_j1
+POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 
 SELECT /*+ MAPJOIN(X) */ x.key, x.value, y.value
 FROM src1 x JOIN src y ON (x.key = y.key)
-Input: default/src
-Input: default/src1
-Output: default/dest_j1
-query: select * from dest_j1 x order by x.key
-Input: default/dest_j1
-Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/1043653472/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@src1
+POSTHOOK: Output: default@dest_j1
+PREHOOK: query: select * from dest_j1 x order by x.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest_j1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2091374759/10000
+POSTHOOK: query: select * from dest_j1 x order by x.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest_j1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/2091374759/10000
 66	val_66	val_66
 98	val_98	val_98
 98	val_98	val_98
@@ -213,4 +239,8 @@
 406	val_406	val_406
 406	val_406	val_406
 406	val_406	val_406
-query: drop table dest_j1
+PREHOOK: query: drop table dest_j1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table dest_j1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@dest_j1

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join4.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join4.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join4.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join4.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,9 @@
-query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM (
  FROM 
   (
@@ -13,6 +17,22 @@
  SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
 ) c
 INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM (
+ FROM 
+  (
+  FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+  ) a
+ LEFT OUTER JOIN 
+ (
+  FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b 
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value) c2)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src1) key) 10) (< (. (TOK_TABLE_OR_COL src1) key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value) c4)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src2) key) 15) (< (. (TOK_TABLE_OR_COL src2) key) 25))))) b) (= (. (TOK_TABLE_OR_COL a) c1) (. (TOK_TABLE_OR_COL b) c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c1) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c2) c2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c3) c3) 
 (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c4) c4)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c2)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c3)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c4)))))
 
@@ -149,7 +169,23 @@
               name: dest1
 
 
-query: FROM (
+PREHOOK: query: FROM (
+ FROM 
+  (
+  FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+  ) a
+ LEFT OUTER JOIN 
+ (
+  FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b 
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM (
  FROM 
   (
   FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
@@ -162,11 +198,17 @@
  SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
 ) c
 INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
-Input: default/src
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/2044738575/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/936247839/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/936247839/10000
 11	val_11	NULL	NULL
 12	val_12	NULL	NULL
 12	val_12	NULL	NULL

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join5.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join5.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join5.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join5.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,9 @@
-query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM (
  FROM 
   (
@@ -13,6 +17,22 @@
  SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
 ) c
 INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM (
+ FROM 
+  (
+  FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+  ) a
+ RIGHT OUTER JOIN 
+ (
+  FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b 
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value) c2)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src1) key) 10) (< (. (TOK_TABLE_OR_COL src1) key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value) c4)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src2) key) 15) (< (. (TOK_TABLE_OR_COL src2) key) 25))))) b) (= (. (TOK_TABLE_OR_COL a) c1) (. (TOK_TABLE_OR_COL b) c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c1) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c2) c2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c3) c3)
  (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c4) c4)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c2)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c3)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c4)))))
 
@@ -149,7 +169,23 @@
               name: dest1
 
 
-query: FROM (
+PREHOOK: query: FROM (
+ FROM 
+  (
+  FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+  ) a
+ RIGHT OUTER JOIN 
+ (
+  FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b 
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM (
  FROM 
   (
   FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
@@ -162,11 +198,17 @@
  SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
 ) c
 INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
-Input: default/src
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/1525120145/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/310187664/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/310187664/10000
 17	val_17	17	val_17
 18	val_18	18	val_18
 18	val_18	18	val_18

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join6.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join6.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join6.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join6.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,9 @@
-query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM (
  FROM 
   (
@@ -13,6 +17,22 @@
  SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
 ) c
 INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM (
+ FROM 
+  (
+  FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+  ) a
+ FULL OUTER JOIN 
+ (
+  FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b 
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_FULLOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value) c2)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src1) key) 10) (< (. (TOK_TABLE_OR_COL src1) key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value) c4)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src2) key) 15) (< (. (TOK_TABLE_OR_COL src2) key) 25))))) b) (= (. (TOK_TABLE_OR_COL a) c1) (. (TOK_TABLE_OR_COL b) c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c1) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c2) c2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c3) c3) 
 (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c4) c4)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c2)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c3)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c4)))))
 
@@ -149,7 +169,23 @@
               name: dest1
 
 
-query: FROM (
+PREHOOK: query: FROM (
+ FROM 
+  (
+  FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+  ) a
+ FULL OUTER JOIN 
+ (
+  FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b 
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM (
  FROM 
   (
   FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
@@ -162,11 +198,17 @@
  SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
 ) c
 INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4
-Input: default/src
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_2/build/ql/tmp/35651409/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/990359387/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/990359387/10000
 11	val_11	NULL	NULL
 12	val_12	NULL	NULL
 12	val_12	NULL	NULL

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join7.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join7.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join7.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join7.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,9 @@
-query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING, c5 INT, c6 STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING, c5 INT, c6 STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING, c5 INT, c6 STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM (
  FROM 
   (
@@ -18,6 +22,27 @@
  SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4, c.c5 AS c5, c.c6 AS c6
 ) c
 INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4, c.c5, c.c6
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM (
+ FROM 
+  (
+  FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+  ) a
+ FULL OUTER JOIN 
+ (
+  FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b 
+ ON (a.c1 = b.c3)
+ LEFT OUTER JOIN 
+ (
+  FROM src src3 SELECT src3.key AS c5, src3.value AS c6 WHERE src3.key > 20 and src3.key < 25
+ ) c
+ ON (a.c1 = c.c5)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4, c.c5 AS c5, c.c6 AS c6
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4, c.c5, c.c6
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_FULLOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value) c2)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src1) key) 10) (< (. (TOK_TABLE_OR_COL src1) key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value) c4)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src2) key) 15) (< (. (TOK_TABLE_OR_COL src2) key) 25))))) b) (= (. (TOK_TABLE_OR_COL a) c1) (. (TOK_TABLE_OR_COL b) c3))) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src3)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src3) key) c5) (TOK_SELEX
 PR (. (TOK_TABLE_OR_COL src3) value) c6)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src3) key) 20) (< (. (TOK_TABLE_OR_COL src3) key) 25))))) c) (= (. (TOK_TABLE_OR_COL a) c1) (. (TOK_TABLE_OR_COL c) c5)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c1) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c2) c2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c3) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c4) c4) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c5) c5) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c6) c6)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c2)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c3)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c4)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c5)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c6)))))
 
@@ -200,7 +225,28 @@
               name: dest1
 
 
-query: FROM (
+PREHOOK: query: FROM (
+ FROM 
+  (
+  FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+  ) a
+ FULL OUTER JOIN 
+ (
+  FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b 
+ ON (a.c1 = b.c3)
+ LEFT OUTER JOIN 
+ (
+  FROM src src3 SELECT src3.key AS c5, src3.value AS c6 WHERE src3.key > 20 and src3.key < 25
+ ) c
+ ON (a.c1 = c.c5)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4, c.c5 AS c5, c.c6 AS c6
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4, c.c5, c.c6
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM (
  FROM 
   (
   FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
@@ -218,11 +264,17 @@
  SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4, c.c5 AS c5, c.c6 AS c6
 ) c
 INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4, c.c5, c.c6
-Input: default/src
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_0/build/ql/tmp/350263108/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1362605307/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1362605307/10000
 11	val_11	NULL	NULL	NULL	NULL
 12	val_12	NULL	NULL	NULL	NULL
 12	val_12	NULL	NULL	NULL	NULL

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join8.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join8.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join8.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join8.q.out Tue Sep 29 01:25:15 2009
@@ -1,5 +1,9 @@
-query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE
-query: EXPLAIN
+PREHOOK: query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(c1 INT, c2 STRING, c3 INT, c4 STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
 FROM (
  FROM 
   (
@@ -13,6 +17,22 @@
  SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
 ) c
 INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4 where c.c3 IS NULL AND c.c1 IS NOT NULL
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+FROM (
+ FROM 
+  (
+  FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+  ) a
+ LEFT OUTER JOIN 
+ (
+  FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b 
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4 where c.c3 IS NULL AND c.c1 IS NOT NULL
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) value) c2)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src1) key) 10) (< (. (TOK_TABLE_OR_COL src1) key) 20))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src src2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value) c4)) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src2) key) 15) (< (. (TOK_TABLE_OR_COL src2) key) 25))))) b) (= (. (TOK_TABLE_OR_COL a) c1) (. (TOK_TABLE_OR_COL b) c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c1) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) c2) c2) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c3) c3) 
 (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) c4) c4)))) c)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c2)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c3)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) c4))) (TOK_WHERE (AND (TOK_FUNCTION TOK_ISNULL (. (TOK_TABLE_OR_COL c) c3)) (TOK_FUNCTION TOK_ISNOTNULL (. (TOK_TABLE_OR_COL c) c1))))))
 
@@ -153,7 +173,23 @@
               name: dest1
 
 
-query: FROM (
+PREHOOK: query: FROM (
+ FROM 
+  (
+  FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
+  ) a
+ LEFT OUTER JOIN 
+ (
+  FROM src src2 SELECT src2.key AS c3, src2.value AS c4 WHERE src2.key > 15 and src2.key < 25
+ ) b 
+ ON (a.c1 = b.c3)
+ SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
+) c
+INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4 where c.c3 IS NULL AND c.c1 IS NOT NULL
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM (
  FROM 
   (
   FROM src src1 SELECT src1.key AS c1, src1.value AS c2 WHERE src1.key > 10 and src1.key < 20
@@ -166,11 +202,17 @@
  SELECT a.c1 AS c1, a.c2 AS c2, b.c3 AS c3, b.c4 AS c4
 ) c
 INSERT OVERWRITE TABLE dest1 SELECT c.c1, c.c2, c.c3, c.c4 where c.c3 IS NULL AND c.c1 IS NOT NULL
-Input: default/src
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/2034601866/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1940291969/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1940291969/10000
 11	val_11	NULL	NULL
 12	val_12	NULL	NULL
 12	val_12	NULL	NULL

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join9.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join9.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join9.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join9.q.out Tue Sep 29 01:25:15 2009
@@ -1,7 +1,16 @@
-query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
-query: EXPLAIN EXTENDED
+PREHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN EXTENDED
 FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key)
 INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value where src1.ds = '2008-04-08' and src1.hr = '12'
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED
+FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key)
+INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value where src1.ds = '2008-04-08' and src1.hr = '12'
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF srcpart src1) (TOK_TABREF src src2) (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL src1) ds) '2008-04-08') (= (. (TOK_TABLE_OR_COL src1) hr) '12')))))
 
@@ -54,10 +63,10 @@
                       type: string
       Needs Tagging: true
       Path -> Alias:
-        file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src 
-        file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src [src2]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [src1]
       Path -> Partition:
-        file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src 
           Partition
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -72,10 +81,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/src
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/src
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: src
-        file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
           Partition
             partition values:
               ds 2008-04-08
@@ -94,7 +103,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
       Reduce Operator Tree:
@@ -127,7 +136,7 @@
                 File Output Operator
                   compressed: false
                   GlobalTableId: 1
-                  directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/863266158/10000
+                  directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/408629641/10000
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -141,7 +150,7 @@
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         file.inputformat org.apache.hadoop.mapred.TextInputFormat
                         file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1
+                        location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest1
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: dest1
 
@@ -149,7 +158,7 @@
     Move Operator
       tables:
           replace: true
-          source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/863266158/10000
+          source: file:/data/users/njain/hive5/hive5/build/ql/tmp/408629641/10000
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -163,20 +172,32 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest1
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: dest1
-          tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/863266158/10001
+          tmp directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/408629641/10001
 
 
-query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key)
+PREHOOK: query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key)
+INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value where src1.ds = '2008-04-08' and src1.hr = '12'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@dest1
+POSTHOOK: query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key)
 INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value where src1.ds = '2008-04-08' and src1.hr = '12'
-Input: default/src
-Input: default/srcpart/ds=2008-04-08/hr=12
-Output: default/dest1
-query: SELECT dest1.* FROM dest1
-Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/367697399/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@dest1
+PREHOOK: query: SELECT dest1.* FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/731400018/10000
+POSTHOOK: query: SELECT dest1.* FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/731400018/10000
 0	val_0
 0	val_0
 0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join_hive_626.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join_hive_626.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join_hive_626.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join_hive_626.q.out Tue Sep 29 01:25:15 2009
@@ -1,20 +1,63 @@
-query: drop table hive_foo
-query: drop table hive_bar
-query: drop table hive_count
-query: create table hive_foo (foo_id int, foo_name string, foo_a string, foo_b string, 
+PREHOOK: query: drop table hive_foo
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table hive_foo
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table hive_bar
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table hive_bar
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table hive_count
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table hive_count
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table hive_foo (foo_id int, foo_name string, foo_a string, foo_b string, 
 foo_c string, foo_d string) row format delimited fields terminated by ','
 stored as textfile
-query: create table hive_bar (bar_id int, bar_0 int, foo_id int, bar_1 int, bar_name
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table hive_foo (foo_id int, foo_name string, foo_a string, foo_b string, 
+foo_c string, foo_d string) row format delimited fields terminated by ','
+stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@hive_foo
+PREHOOK: query: create table hive_bar (bar_id int, bar_0 int, foo_id int, bar_1 int, bar_name
+string, bar_a string, bar_b string, bar_c string, bar_d string) row format 
+delimited fields terminated by ',' stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table hive_bar (bar_id int, bar_0 int, foo_id int, bar_1 int, bar_name
 string, bar_a string, bar_b string, bar_c string, bar_d string) row format 
 delimited fields terminated by ',' stored as textfile
-query: create table hive_count (bar_id int, n int) row format delimited fields 
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@hive_bar
+PREHOOK: query: create table hive_count (bar_id int, n int) row format delimited fields 
 terminated by ',' stored as textfile
-query: load data local inpath '../data/files/hive_626_foo.txt' overwrite into table hive_foo
-query: load data local inpath '../data/files/hive_626_bar.txt' overwrite into table hive_bar
-query: load data local inpath '../data/files/hive_626_count.txt' overwrite into table hive_count
-query: explain
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table hive_count (bar_id int, n int) row format delimited fields 
+terminated by ',' stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@hive_count
+PREHOOK: query: load data local inpath '../data/files/hive_626_foo.txt' overwrite into table hive_foo
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/hive_626_foo.txt' overwrite into table hive_foo
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@hive_foo
+PREHOOK: query: load data local inpath '../data/files/hive_626_bar.txt' overwrite into table hive_bar
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/hive_626_bar.txt' overwrite into table hive_bar
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@hive_bar
+PREHOOK: query: load data local inpath '../data/files/hive_626_count.txt' overwrite into table hive_count
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/hive_626_count.txt' overwrite into table hive_count
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@hive_count
+PREHOOK: query: explain
+select hive_foo.foo_name, hive_bar.bar_name, n from hive_foo join hive_bar on hive_foo.foo_id =
+hive_bar.foo_id join hive_count on hive_count.bar_id = hive_bar.bar_id
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
 select hive_foo.foo_name, hive_bar.bar_name, n from hive_foo join hive_bar on hive_foo.foo_id =
 hive_bar.foo_id join hive_count on hive_count.bar_id = hive_bar.bar_id
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_TABREF hive_foo) (TOK_TABREF hive_bar) (= (. (TOK_TABLE_OR_COL hive_foo) foo_id) (. (TOK_TABLE_OR_COL hive_bar) foo_id))) (TOK_TABREF hive_count) (= (. (TOK_TABLE_OR_COL hive_count) bar_id) (. (TOK_TABLE_OR_COL hive_bar) bar_id)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL hive_foo) foo_name)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL hive_bar) bar_name)) (TOK_SELEXPR (TOK_TABLE_OR_COL n)))))
 
@@ -136,13 +179,33 @@
       limit: -1
 
 
-query: select hive_foo.foo_name, hive_bar.bar_name, n from hive_foo join hive_bar on hive_foo.foo_id =
+PREHOOK: query: select hive_foo.foo_name, hive_bar.bar_name, n from hive_foo join hive_bar on hive_foo.foo_id =
+hive_bar.foo_id join hive_count on hive_count.bar_id = hive_bar.bar_id
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hive_foo
+PREHOOK: Input: default@hive_count
+PREHOOK: Input: default@hive_bar
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1875895607/10000
+POSTHOOK: query: select hive_foo.foo_name, hive_bar.bar_name, n from hive_foo join hive_bar on hive_foo.foo_id =
 hive_bar.foo_id join hive_count on hive_count.bar_id = hive_bar.bar_id
-Input: default/hive_foo
-Input: default/hive_count
-Input: default/hive_bar
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/168017101/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hive_foo
+POSTHOOK: Input: default@hive_count
+POSTHOOK: Input: default@hive_bar
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/1875895607/10000
 foo1	bar10	2
-query: drop table hive_foo
-query: drop table hive_bar
-query: drop table hive_count
+PREHOOK: query: drop table hive_foo
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table hive_foo
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@hive_foo
+PREHOOK: query: drop table hive_bar
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table hive_bar
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@hive_bar
+PREHOOK: query: drop table hive_count
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table hive_count
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@hive_count

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join_map_ppr.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join_map_ppr.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join_map_ppr.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join_map_ppr.q.out Tue Sep 29 01:25:15 2009
@@ -1,10 +1,22 @@
-query: CREATE TABLE dest_j1(key STRING, value STRING, val2 STRING) STORED AS TEXTFILE
-query: EXPLAIN EXTENDED
+PREHOOK: query: CREATE TABLE dest_j1(key STRING, value STRING, val2 STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE dest_j1(key STRING, value STRING, val2 STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest_j1
+PREHOOK: query: EXPLAIN EXTENDED
 INSERT OVERWRITE TABLE dest_j1
 SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value
 FROM src1 x JOIN src y ON (x.key = y.key) 
 JOIN srcpart z ON (x.key = z.key)
 WHERE z.ds='2008-04-08' and z.hr=11
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED
+INSERT OVERWRITE TABLE dest_j1
+SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value
+FROM src1 x JOIN src y ON (x.key = y.key) 
+JOIN srcpart z ON (x.key = z.key)
+WHERE z.ds='2008-04-08' and z.hr=11
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_TABREF src1 x) (TOK_TABREF src y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL y) key))) (TOK_TABREF srcpart z) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL z) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest_j1)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST x y))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL z) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL y) value))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL z) ds) '2008-04-08') (= (. (TOK_TABLE_OR_COL z) hr) 11)))))
 
@@ -69,7 +81,7 @@
                       File Output Operator
                         compressed: false
                         GlobalTableId: 1
-                        directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/523022753/10002
+                        directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/1154503161/10002
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -83,7 +95,7 @@
                               serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               file.inputformat org.apache.hadoop.mapred.TextInputFormat
                               file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                              location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest_j1
+                              location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest_j1
                             serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                             name: dest_j1
       Local Work:
@@ -143,7 +155,7 @@
                         File Output Operator
                           compressed: false
                           GlobalTableId: 1
-                          directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/523022753/10002
+                          directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/1154503161/10002
                           table:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -157,7 +169,7 @@
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest_j1
+                                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest_j1
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: dest_j1
             x 
@@ -207,7 +219,7 @@
                         File Output Operator
                           compressed: false
                           GlobalTableId: 1
-                          directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/523022753/10002
+                          directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/1154503161/10002
                           table:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -221,14 +233,14 @@
                                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest_j1
+                                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest_j1
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                               name: dest_j1
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z]
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [z]
       Path -> Partition:
-        file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
           Partition
             partition values:
               ds 2008-04-08
@@ -247,7 +259,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
 
@@ -257,11 +269,11 @@
           Move Operator
             files:
                 hdfs directory: true
-                source: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/523022753/10002
-                destination: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/627855363/10000
+                source: file:/data/users/njain/hive5/hive5/build/ql/tmp/1154503161/10002
+                destination: file:/data/users/njain/hive5/hive5/build/ql/tmp/1394964157/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/523022753/10002 
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/1154503161/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -277,9 +289,9 @@
                           type: string
             Needs Tagging: false
             Path -> Alias:
-              file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/523022753/10002 [file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/523022753/10002]
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/1154503161/10002 [file:/data/users/njain/hive5/hive5/build/ql/tmp/1154503161/10002]
             Path -> Partition:
-              file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/523022753/10002 
+              file:/data/users/njain/hive5/hive5/build/ql/tmp/1154503161/10002 
                 Partition
                 
                     input format: org.apache.hadoop.mapred.TextInputFormat
@@ -294,7 +306,7 @@
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       file.inputformat org.apache.hadoop.mapred.TextInputFormat
                       file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest_j1
+                      location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest_j1
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest_j1
             Reduce Operator Tree:
@@ -302,7 +314,7 @@
                 File Output Operator
                   compressed: false
                   GlobalTableId: 0
-                  directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/627855363/10000
+                  directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/1394964157/10000
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -315,7 +327,7 @@
                         bucket_count -1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                        location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest_j1
+                        location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest_j1
                         file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: dest_j1
@@ -324,7 +336,7 @@
     Move Operator
       tables:
           replace: true
-          source: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/627855363/10000
+          source: file:/data/users/njain/hive5/hive5/build/ql/tmp/1394964157/10000
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -338,24 +350,40 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/dest_j1
+                location file:/data/users/njain/hive5/hive5/build/ql/test/data/warehouse/dest_j1
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: dest_j1
-          tmp directory: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/627855363/10001
+          tmp directory: file:/data/users/njain/hive5/hive5/build/ql/tmp/1394964157/10001
 
 
-query: INSERT OVERWRITE TABLE dest_j1
+PREHOOK: query: INSERT OVERWRITE TABLE dest_j1
+SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value
+FROM src1 x JOIN src y ON (x.key = y.key) 
+JOIN srcpart z ON (x.key = z.key)
+WHERE z.ds='2008-04-08' and z.hr=11
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@src
+PREHOOK: Input: default@src1
+PREHOOK: Output: default@dest_j1
+POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1
 SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value
 FROM src1 x JOIN src y ON (x.key = y.key) 
 JOIN srcpart z ON (x.key = z.key)
 WHERE z.ds='2008-04-08' and z.hr=11
-Input: default/srcpart/ds=2008-04-08/hr=11
-Input: default/src
-Input: default/src1
-Output: default/dest_j1
-query: select * from dest_j1 x order by x.key
-Input: default/dest_j1
-Output: file:/data/users/zshao/tools/699-trunk-apache-hive/.ptest_1/build/ql/tmp/1625644055/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@src1
+POSTHOOK: Output: default@dest_j1
+PREHOOK: query: select * from dest_j1 x order by x.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest_j1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/797537756/10000
+POSTHOOK: query: select * from dest_j1 x order by x.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest_j1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/797537756/10000
 128	val_128	val_128
 128	val_128	val_128
 128	val_128	val_128
@@ -463,4 +491,8 @@
 98	val_98	val_98
 98	val_98	val_98
 98	val_98	val_98
-query: drop table dest_j1
+PREHOOK: query: drop table dest_j1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table dest_j1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@dest_j1

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join_rc.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join_rc.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join_rc.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join_rc.q.out Tue Sep 29 01:25:15 2009
@@ -1,16 +1,45 @@
-query: drop table join_rc1
-query: drop table join_rc2
-query: create table join_rc1(key string, value string) stored as RCFile
-query: create table join_rc2(key string, value string) stored as RCFile
-query: insert overwrite table join_rc1 select * from src
-Input: default/src
-Output: default/join_rc1
-query: insert overwrite table join_rc2 select * from src
-Input: default/src
-Output: default/join_rc2
-query: explain
+PREHOOK: query: drop table join_rc1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table join_rc1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table join_rc2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table join_rc2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table join_rc1(key string, value string) stored as RCFile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table join_rc1(key string, value string) stored as RCFile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@join_rc1
+PREHOOK: query: create table join_rc2(key string, value string) stored as RCFile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table join_rc2(key string, value string) stored as RCFile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@join_rc2
+PREHOOK: query: insert overwrite table join_rc1 select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@join_rc1
+POSTHOOK: query: insert overwrite table join_rc1 select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@join_rc1
+PREHOOK: query: insert overwrite table join_rc2 select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@join_rc2
+POSTHOOK: query: insert overwrite table join_rc2 select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@join_rc2
+PREHOOK: query: explain
 select join_rc1.key, join_rc2.value
 FROM join_rc1 JOIN join_rc2 ON join_rc1.key = join_rc2.key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select join_rc1.key, join_rc2.value
+FROM join_rc1 JOIN join_rc2 ON join_rc1.key = join_rc2.key
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF join_rc1) (TOK_TABREF join_rc2) (= (. (TOK_TABLE_OR_COL join_rc1) key) (. (TOK_TABLE_OR_COL join_rc2) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL join_rc1) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL join_rc2) value)))))
 
@@ -79,11 +108,18 @@
       limit: -1
 
 
-query: select join_rc1.key, join_rc2.value
+PREHOOK: query: select join_rc1.key, join_rc2.value
+FROM join_rc1 JOIN join_rc2 ON join_rc1.key = join_rc2.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@join_rc2
+PREHOOK: Input: default@join_rc1
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/476047237/10000
+POSTHOOK: query: select join_rc1.key, join_rc2.value
 FROM join_rc1 JOIN join_rc2 ON join_rc1.key = join_rc2.key
-Input: default/join_rc2
-Input: default/join_rc1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/206616711/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@join_rc2
+POSTHOOK: Input: default@join_rc1
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/476047237/10000
 0	val_0
 0	val_0
 0	val_0
@@ -1112,5 +1148,13 @@
 98	val_98
 98	val_98
 98	val_98
-query: drop table join_rc1
-query: drop table join_rc2
+PREHOOK: query: drop table join_rc1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table join_rc1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@join_rc1
+PREHOOK: query: drop table join_rc2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table join_rc2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@join_rc2

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join_thrift.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join_thrift.q.out?rev=819792&r1=819791&r2=819792&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join_thrift.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join_thrift.q.out Tue Sep 29 01:25:15 2009
@@ -1,15 +1,25 @@
-query: DESCRIBE src_thrift
+PREHOOK: query: DESCRIBE src_thrift
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE src_thrift
+POSTHOOK: type: DESCTABLE
 aint	int	from deserializer
 astring	string	from deserializer
 lint	array<int>	from deserializer
 lstring	array<string>	from deserializer
 lintstring	array<org.apache.hadoop.hive.serde2.thrift.test.IntString>	from deserializer
 mstringstring	map<string,string>	from deserializer
-query: EXPLAIN
+PREHOOK: query: EXPLAIN
 SELECT s1.aint, s2.lintstring
 FROM src_thrift s1
 JOIN src_thrift s2
 ON s1.aint = s2.aint
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT s1.aint, s2.lintstring
+FROM src_thrift s1
+JOIN src_thrift s2
+ON s1.aint = s2.aint
+POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF src_thrift s1) (TOK_TABREF src_thrift s2) (= (. (TOK_TABLE_OR_COL s1) aint) (. (TOK_TABLE_OR_COL s2) aint)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL s1) aint)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL s2) lintstring)))))
 
@@ -78,12 +88,20 @@
       limit: -1
 
 
-query: SELECT s1.aint, s2.lintstring
+PREHOOK: query: SELECT s1.aint, s2.lintstring
+FROM src_thrift s1
+JOIN src_thrift s2
+ON s1.aint = s2.aint
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_thrift
+PREHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/895447236/10000
+POSTHOOK: query: SELECT s1.aint, s2.lintstring
 FROM src_thrift s1
 JOIN src_thrift s2
 ON s1.aint = s2.aint
-Input: default/src_thrift
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/589957555/10000
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_thrift
+POSTHOOK: Output: file:/data/users/njain/hive5/hive5/build/ql/tmp/895447236/10000
 -1952710710	[{"myint":25,"mystring":"125","underscore_int":5}]
 -1461153973	[{"myint":49,"mystring":"343","underscore_int":7}]
 -751827638	[{"myint":4,"mystring":"8","underscore_int":2}]



Mime
View raw message