hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sd...@apache.org
Subject svn commit: r1145411 [2/2] - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/exec/ java/org/apache/hadoop/hive/ql/parse/ test/queries/clientnegative/ test/queries/clientpositive/ test/results/clientnegative/ test/results/clientpositive/ test/resu...
Date Tue, 12 Jul 2011 00:46:54 GMT
Added: hive/trunk/ql/src/test/results/clientpositive/insert_into4.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/insert_into4.q.out?rev=1145411&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/insert_into4.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/insert_into4.q.out Tue Jul 12 00:46:53 2011
@@ -0,0 +1,353 @@
+PREHOOK: query: DROP TABLE insert_into4a
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE insert_into4a
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE insert_into4b
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE insert_into4b
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE insert_into4a (key int, value string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE insert_into4a (key int, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@insert_into4a
+PREHOOK: query: CREATE TABLE insert_into4b (key int, value string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE insert_into4b (key int, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@insert_into4b
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME insert_into4a))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 10)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+              outputColumnNames: _col0, _col1
+              Limit
+                Reduce Output Operator
+                  sort order: 
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col1
+                        type: string
+      Reduce Operator Tree:
+        Extract
+          Limit
+            Select Operator
+              expressions:
+                    expr: UDFToInteger(_col0)
+                    type: int
+                    expr: _col1
+                    type: string
+              outputColumnNames: _col0, _col1
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.insert_into4a
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into4a
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+
+
+PREHOOK: query: INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into4a
+POSTHOOK: query: INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into4a
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into4a
+PREHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-43-53_842_4827314529931121961/-mr-10000
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into4a
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-43-53_842_4827314529931121961/-mr-10000
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+-826625916
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME insert_into4a))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 10)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+              outputColumnNames: _col0, _col1
+              Limit
+                Reduce Output Operator
+                  sort order: 
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col1
+                        type: string
+      Reduce Operator Tree:
+        Extract
+          Limit
+            Select Operator
+              expressions:
+                    expr: UDFToInteger(_col0)
+                    type: int
+                    expr: _col1
+                    type: string
+              outputColumnNames: _col0, _col1
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.insert_into4a
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into4a
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+
+
+PREHOOK: query: INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into4a
+POSTHOOK: query: INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into4a
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into4a
+PREHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-44-01_693_9165512113663581328/-mr-10000
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into4a
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-44-01_693_9165512113663581328/-mr-10000
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+-1653251832
+PREHOOK: query: --At this point insert_into4a has 2 files (if INSERT INTO merges isn't fixed)
+
+EXPLAIN INSERT INTO TABLE insert_into4b SELECT * FROM insert_into4a
+PREHOOK: type: QUERY
+POSTHOOK: query: --At this point insert_into4a has 2 files (if INSERT INTO merges isn't fixed)
+
+EXPLAIN INSERT INTO TABLE insert_into4b SELECT * FROM insert_into4a
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME insert_into4a))) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME insert_into4b))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-5 depends on stages: Stage-1 , consists of Stage-4, Stage-3
+  Stage-4
+  Stage-0 depends on stages: Stage-4, Stage-3
+  Stage-2 depends on stages: Stage-0
+  Stage-3
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        insert_into4a 
+          TableScan
+            alias: insert_into4a
+            Select Operator
+              expressions:
+                    expr: key
+                    type: int
+                    expr: value
+                    type: string
+              outputColumnNames: _col0, _col1
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.insert_into4b
+
+  Stage: Stage-5
+    Conditional Operator
+
+  Stage: Stage-4
+    Move Operator
+      files:
+          hdfs directory: true
+          destination: pfile:/data/users/franklin/hive-insert-into/build/ql/scratchdir/hive_2011-06-06_17-44-04_977_134122812108754380/-ext-10000
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into4b
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+
+  Stage: Stage-3
+    Map Reduce
+      Alias -> Map Operator Tree:
+        pfile:/data/users/franklin/hive-insert-into/build/ql/scratchdir/hive_2011-06-06_17-44-04_977_134122812108754380/-ext-10002 
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.insert_into4b
+
+
+PREHOOK: query: INSERT INTO TABLE insert_into4b SELECT * FROM insert_into4a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into4a
+PREHOOK: Output: default@insert_into4b
+POSTHOOK: query: INSERT INTO TABLE insert_into4b SELECT * FROM insert_into4a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into4a
+POSTHOOK: Output: default@insert_into4b
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4b.key SIMPLE [(insert_into4a)insert_into4a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into4b.value SIMPLE [(insert_into4a)insert_into4a.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4b
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into4b
+PREHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-44-09_414_913152519785640895/-mr-10000
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4b
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into4b
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-44-09_414_913152519785640895/-mr-10000
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4b.key SIMPLE [(insert_into4a)insert_into4a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into4b.value SIMPLE [(insert_into4a)insert_into4a.FieldSchema(name:value, type:string, comment:null), ]
+-1653251832
+PREHOOK: query: DROP TABLE insert_into4a
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@insert_into4a
+PREHOOK: Output: default@insert_into4a
+POSTHOOK: query: DROP TABLE insert_into4a
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@insert_into4a
+POSTHOOK: Output: default@insert_into4a
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4b.key SIMPLE [(insert_into4a)insert_into4a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into4b.value SIMPLE [(insert_into4a)insert_into4a.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: DROP TABLE insert_into4b
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@insert_into4b
+PREHOOK: Output: default@insert_into4b
+POSTHOOK: query: DROP TABLE insert_into4b
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@insert_into4b
+POSTHOOK: Output: default@insert_into4b
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4a.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into4b.key SIMPLE [(insert_into4a)insert_into4a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into4b.value SIMPLE [(insert_into4a)insert_into4a.FieldSchema(name:value, type:string, comment:null), ]

Added: hive/trunk/ql/src/test/results/clientpositive/insert_into5.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/insert_into5.q.out?rev=1145411&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/insert_into5.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/insert_into5.q.out Tue Jul 12 00:46:53 2011
@@ -0,0 +1,463 @@
+PREHOOK: query: DROP TABLE insert_into5a
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE insert_into5a
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE insert_into5b
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE insert_into5b
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE insert_into5a (key int, value string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE insert_into5a (key int, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@insert_into5a
+PREHOOK: query: CREATE TABLE insert_into5b (key int, value string) PARTITIONED BY (ds string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE insert_into5b (key int, value string) PARTITIONED BY (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@insert_into5b
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5a SELECT 1, 'one' FROM src LIMIT 10
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5a SELECT 1, 'one' FROM src LIMIT 10
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME insert_into5a))) (TOK_SELECT (TOK_SELEXPR 1) (TOK_SELEXPR 'one')) (TOK_LIMIT 10)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: 1
+                    type: int
+                    expr: 'one'
+                    type: string
+              outputColumnNames: _col0, _col1
+              Limit
+                Reduce Output Operator
+                  sort order: 
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: int
+                        expr: _col1
+                        type: string
+      Reduce Operator Tree:
+        Extract
+          Limit
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.insert_into5a
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into5a
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+
+
+PREHOOK: query: INSERT INTO TABLE insert_into5a SELECT 1, 'one' FROM src LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into5a
+POSTHOOK: query: INSERT INTO TABLE insert_into5a SELECT 1, 'one' FROM src LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into5a
+POSTHOOK: Lineage: insert_into5a.key SIMPLE []
+POSTHOOK: Lineage: insert_into5a.value SIMPLE []
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into5a
+PREHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-44-17_923_6176118754536072048/-mr-10000
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into5a
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-44-17_923_6176118754536072048/-mr-10000
+POSTHOOK: Lineage: insert_into5a.key SIMPLE []
+POSTHOOK: Lineage: insert_into5a.value SIMPLE []
+481928560
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5a SELECT * FROM insert_into5a
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5a SELECT * FROM insert_into5a
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: insert_into5a.key SIMPLE []
+POSTHOOK: Lineage: insert_into5a.value SIMPLE []
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME insert_into5a))) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME insert_into5a))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-5 depends on stages: Stage-1 , consists of Stage-4, Stage-3
+  Stage-4
+  Stage-0 depends on stages: Stage-4, Stage-3
+  Stage-2 depends on stages: Stage-0
+  Stage-3
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        insert_into5a 
+          TableScan
+            alias: insert_into5a
+            Select Operator
+              expressions:
+                    expr: key
+                    type: int
+                    expr: value
+                    type: string
+              outputColumnNames: _col0, _col1
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.insert_into5a
+
+  Stage: Stage-5
+    Conditional Operator
+
+  Stage: Stage-4
+    Move Operator
+      files:
+          hdfs directory: true
+          destination: pfile:/data/users/franklin/hive-insert-into/build/ql/scratchdir/hive_2011-06-06_17-44-21_154_3370686062288658221/-ext-10000
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into5a
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+
+  Stage: Stage-3
+    Map Reduce
+      Alias -> Map Operator Tree:
+        pfile:/data/users/franklin/hive-insert-into/build/ql/scratchdir/hive_2011-06-06_17-44-21_154_3370686062288658221/-ext-10002 
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.insert_into5a
+
+
+PREHOOK: query: INSERT INTO TABLE insert_into5a SELECT * FROM insert_into5a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into5a
+PREHOOK: Output: default@insert_into5a
+POSTHOOK: query: INSERT INTO TABLE insert_into5a SELECT * FROM insert_into5a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into5a
+POSTHOOK: Output: default@insert_into5a
+POSTHOOK: Lineage: insert_into5a.key SIMPLE []
+POSTHOOK: Lineage: insert_into5a.key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5a.value SIMPLE []
+POSTHOOK: Lineage: insert_into5a.value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into5a
+PREHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-44-25_647_5025300298938986801/-mr-10000
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into5a
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-44-25_647_5025300298938986801/-mr-10000
+POSTHOOK: Lineage: insert_into5a.key SIMPLE []
+POSTHOOK: Lineage: insert_into5a.key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5a.value SIMPLE []
+POSTHOOK: Lineage: insert_into5a.value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
+963857120
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5b PARTITION (ds='1') 
+  SELECT * FROM insert_into5a
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5b PARTITION (ds='1') 
+  SELECT * FROM insert_into5a
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: insert_into5a.key SIMPLE []
+POSTHOOK: Lineage: insert_into5a.key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5a.value SIMPLE []
+POSTHOOK: Lineage: insert_into5a.value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME insert_into5a))) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME insert_into5b) (TOK_PARTSPEC (TOK_PARTVAL ds '1')))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-5 depends on stages: Stage-1 , consists of Stage-4, Stage-3
+  Stage-4
+  Stage-0 depends on stages: Stage-4, Stage-3
+  Stage-2 depends on stages: Stage-0
+  Stage-3
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        insert_into5a 
+          TableScan
+            alias: insert_into5a
+            Select Operator
+              expressions:
+                    expr: key
+                    type: int
+                    expr: value
+                    type: string
+              outputColumnNames: _col0, _col1
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.insert_into5b
+
+  Stage: Stage-5
+    Conditional Operator
+
+  Stage: Stage-4
+    Move Operator
+      files:
+          hdfs directory: true
+          destination: pfile:/data/users/franklin/hive-insert-into/build/ql/scratchdir/hive_2011-06-06_17-44-29_142_2886364868937075209/-ext-10000
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 1
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into5b
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+
+  Stage: Stage-3
+    Map Reduce
+      Alias -> Map Operator Tree:
+        pfile:/data/users/franklin/hive-insert-into/build/ql/scratchdir/hive_2011-06-06_17-44-29_142_2886364868937075209/-ext-10002 
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.insert_into5b
+
+
+PREHOOK: query: INSERT INTO TABLE insert_into5b PARTITION (ds='1') SELECT * FROM insert_into5a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into5a
+PREHOOK: Output: default@insert_into5b@ds=1
+POSTHOOK: query: INSERT INTO TABLE insert_into5b PARTITION (ds='1') SELECT * FROM insert_into5a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into5a
+POSTHOOK: Output: default@insert_into5b@ds=1
+POSTHOOK: Lineage: insert_into5a.key SIMPLE []
+POSTHOOK: Lineage: insert_into5a.key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5a.value SIMPLE []
+POSTHOOK: Lineage: insert_into5a.value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5b
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into5b@ds=1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-44-34_795_7519132910183003017/-mr-10000
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5b
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into5b@ds=1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-44-34_795_7519132910183003017/-mr-10000
+POSTHOOK: Lineage: insert_into5a.key SIMPLE []
+POSTHOOK: Lineage: insert_into5a.key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5a.value SIMPLE []
+POSTHOOK: Lineage: insert_into5a.value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
+-18626052920
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5b PARTITION (ds='1')
+  SELECT key, value FROM insert_into5b
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into5b PARTITION (ds='1')
+  SELECT key, value FROM insert_into5b
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: insert_into5a.key SIMPLE []
+POSTHOOK: Lineage: insert_into5a.key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5a.value SIMPLE []
+POSTHOOK: Lineage: insert_into5a.value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME insert_into5b))) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME insert_into5b) (TOK_PARTSPEC (TOK_PARTVAL ds '1')))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-5 depends on stages: Stage-1 , consists of Stage-4, Stage-3
+  Stage-4
+  Stage-0 depends on stages: Stage-4, Stage-3
+  Stage-2 depends on stages: Stage-0
+  Stage-3
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        insert_into5b 
+          TableScan
+            alias: insert_into5b
+            Select Operator
+              expressions:
+                    expr: key
+                    type: int
+                    expr: value
+                    type: string
+              outputColumnNames: _col0, _col1
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.insert_into5b
+
+  Stage: Stage-5
+    Conditional Operator
+
+  Stage: Stage-4
+    Move Operator
+      files:
+          hdfs directory: true
+          destination: pfile:/data/users/franklin/hive-insert-into/build/ql/scratchdir/hive_2011-06-06_17-44-38_473_3874374994199534320/-ext-10000
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 1
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into5b
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+
+  Stage: Stage-3
+    Map Reduce
+      Alias -> Map Operator Tree:
+        pfile:/data/users/franklin/hive-insert-into/build/ql/scratchdir/hive_2011-06-06_17-44-38_473_3874374994199534320/-ext-10002 
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.insert_into5b
+
+
+PREHOOK: query: INSERT INTO TABLE insert_into5b PARTITION (ds='1') 
+  SELECT key, value FROM insert_into5b
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into5b@ds=1
+PREHOOK: Output: default@insert_into5b@ds=1
+POSTHOOK: query: INSERT INTO TABLE insert_into5b PARTITION (ds='1') 
+  SELECT key, value FROM insert_into5b
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into5b@ds=1
+POSTHOOK: Output: default@insert_into5b@ds=1
+POSTHOOK: Lineage: insert_into5a.key SIMPLE []
+POSTHOOK: Lineage: insert_into5a.key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5a.value SIMPLE []
+POSTHOOK: Lineage: insert_into5a.value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5b
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into5b@ds=1
+PREHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-44-43_118_6250546739034036776/-mr-10000
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5b
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into5b@ds=1
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-44-43_118_6250546739034036776/-mr-10000
+POSTHOOK: Lineage: insert_into5a.key SIMPLE []
+POSTHOOK: Lineage: insert_into5a.key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5a.value SIMPLE []
+POSTHOOK: Lineage: insert_into5a.value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
+-37252105840
+PREHOOK: query: DROP TABLE insert_into5a
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@insert_into5a
+PREHOOK: Output: default@insert_into5a
+POSTHOOK: query: DROP TABLE insert_into5a
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@insert_into5a
+POSTHOOK: Output: default@insert_into5a
+POSTHOOK: Lineage: insert_into5a.key SIMPLE []
+POSTHOOK: Lineage: insert_into5a.key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5a.value SIMPLE []
+POSTHOOK: Lineage: insert_into5a.value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ]

Added: hive/trunk/ql/src/test/results/clientpositive/insert_into6.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/insert_into6.q.out?rev=1145411&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/insert_into6.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/insert_into6.q.out Tue Jul 12 00:46:53 2011
@@ -0,0 +1,303 @@
+PREHOOK: query: DROP TABLE insert_into6a
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE insert_into6a
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE insert_into6b
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE insert_into6b
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE insert_into6a (key int, value string) PARTITIONED BY (ds string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE insert_into6a (key int, value string) PARTITIONED BY (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@insert_into6a
+PREHOOK: query: CREATE TABLE insert_into6b (key int, value string) PARTITIONED BY (ds string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE insert_into6b (key int, value string) PARTITIONED BY (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@insert_into6b
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into6a PARTITION (ds='1') 
+    SELECT * FROM src LIMIT 150
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into6a PARTITION (ds='1') 
+    SELECT * FROM src LIMIT 150
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME insert_into6a) (TOK_PARTSPEC (TOK_PARTVAL ds '1')))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 150)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+              outputColumnNames: _col0, _col1
+              Limit
+                Reduce Output Operator
+                  sort order: 
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col1
+                        type: string
+      Reduce Operator Tree:
+        Extract
+          Limit
+            Select Operator
+              expressions:
+                    expr: UDFToInteger(_col0)
+                    type: int
+                    expr: _col1
+                    type: string
+              outputColumnNames: _col0, _col1
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.insert_into6a
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 1
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into6a
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+
+
+PREHOOK: query: INSERT INTO TABLE insert_into6a PARTITION (ds='1') SELECT * FROM src LIMIT 150
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into6a@ds=1
+POSTHOOK: query: INSERT INTO TABLE insert_into6a PARTITION (ds='1') SELECT * FROM src LIMIT 150
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into6a@ds=1
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: INSERT INTO TABLE insert_into6a PARTITION (ds='2') SELECT * FROM src LIMIT 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert_into6a@ds=2
+POSTHOOK: query: INSERT INTO TABLE insert_into6a PARTITION (ds='2') SELECT * FROM src LIMIT 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert_into6a@ds=2
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into6a
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into6a@ds=1
+PREHOOK: Input: default@insert_into6a@ds=2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-44-56_119_7127779350945311846/-mr-10000
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into6a
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into6a@ds=1
+POSTHOOK: Input: default@insert_into6a@ds=2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-44-56_119_7127779350945311846/-mr-10000
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+-35226404960
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into6b PARTITION (ds) 
+    SELECT * FROM insert_into6a
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into6b PARTITION (ds) 
+    SELECT * FROM insert_into6a
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME insert_into6a))) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME insert_into6b) (TOK_PARTSPEC (TOK_PARTVAL ds)))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-5 depends on stages: Stage-1 , consists of Stage-4, Stage-3
+  Stage-4
+  Stage-0 depends on stages: Stage-4, Stage-3
+  Stage-2 depends on stages: Stage-0
+  Stage-3
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        insert_into6a 
+          TableScan
+            alias: insert_into6a
+            Select Operator
+              expressions:
+                    expr: key
+                    type: int
+                    expr: value
+                    type: string
+                    expr: ds
+                    type: string
+              outputColumnNames: _col0, _col1, _col2
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.insert_into6b
+
+  Stage: Stage-5
+    Conditional Operator
+
+  Stage: Stage-4
+    Move Operator
+      files:
+          hdfs directory: true
+          destination: pfile:/data/users/franklin/hive-insert-into/build/ql/scratchdir/hive_2011-06-06_17-44-59_517_7330492427149801802/-ext-10000
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.insert_into6b
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+
+  Stage: Stage-3
+    Map Reduce
+      Alias -> Map Operator Tree:
+        pfile:/data/users/franklin/hive-insert-into/build/ql/scratchdir/hive_2011-06-06_17-44-59_517_7330492427149801802/-ext-10002 
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.insert_into6b
+
+
+PREHOOK: query: INSERT INTO TABLE insert_into6b PARTITION (ds) SELECT * FROM insert_into6a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into6a@ds=1
+PREHOOK: Input: default@insert_into6a@ds=2
+PREHOOK: Output: default@insert_into6b
+POSTHOOK: query: INSERT INTO TABLE insert_into6b PARTITION (ds) SELECT * FROM insert_into6a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into6a@ds=1
+POSTHOOK: Input: default@insert_into6a@ds=2
+POSTHOOK: Output: default@insert_into6b@ds=1
+POSTHOOK: Output: default@insert_into6b@ds=2
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=1).key SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=1).value SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=2).key SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=2).value SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into6b
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert_into6b@ds=1
+PREHOOK: Input: default@insert_into6b@ds=2
+PREHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-45-06_245_73434261146167363/-mr-10000
+POSTHOOK: query: SELECT SUM(HASH(c)) FROM (
+    SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into6b
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert_into6b@ds=1
+POSTHOOK: Input: default@insert_into6b@ds=2
+POSTHOOK: Output: file:/tmp/franklin/hive_2011-06-06_17-45-06_245_73434261146167363/-mr-10000
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=1).key SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=1).value SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=2).key SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=2).value SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:value, type:string, comment:null), ]
+-35226404960
+PREHOOK: query: SHOW PARTITIONS insert_into6b
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: SHOW PARTITIONS insert_into6b
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=1).key SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=1).value SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=2).key SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=2).value SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:value, type:string, comment:null), ]
+ds=1
+ds=2
+PREHOOK: query: DROP TABLE insert_into6a
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@insert_into6a
+PREHOOK: Output: default@insert_into6a
+POSTHOOK: query: DROP TABLE insert_into6a
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@insert_into6a
+POSTHOOK: Output: default@insert_into6a
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=1).key SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=1).value SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=2).key SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=2).value SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: DROP TABLE insert_into6b
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@insert_into6b
+PREHOOK: Output: default@insert_into6b
+POSTHOOK: query: DROP TABLE insert_into6b
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@insert_into6b
+POSTHOOK: Output: default@insert_into6b
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6a PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=1).key SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=1).value SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:value, type:string, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=2).key SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:key, type:int, comment:null), ]
+POSTHOOK: Lineage: insert_into6b PARTITION(ds=2).value SIMPLE [(insert_into6a)insert_into6a.FieldSchema(name:value, type:string, comment:null), ]

Modified: hive/trunk/ql/src/test/results/compiler/errors/missing_overwrite.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/compiler/errors/missing_overwrite.q.out?rev=1145411&r1=1145410&r2=1145411&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/compiler/errors/missing_overwrite.q.out (original)
+++ hive/trunk/ql/src/test/results/compiler/errors/missing_overwrite.q.out Tue Jul 12 00:46:53 2011
@@ -1 +1 @@
-Parse Error: line 2:7 mismatched input 'TABLE' expecting OVERWRITE near 'INSERT' in insert clause
+Parse Error: line 2:0 cannot recognize input near 'INSERT' 'TABLE' 'dest1' in insert clause



Mime
View raw message