hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1552857 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/parse/ test/queries/clientnegative/ test/queries/clientpositive/ test/results/clientnegative/ test/results/clientpositive/
Date Sat, 21 Dec 2013 01:26:59 GMT
Author: hashutosh
Date: Sat Dec 21 01:26:59 2013
New Revision: 1552857

URL: http://svn.apache.org/r1552857
Log:
HIVE-5558 : Support alternate join syntax (Harish Butani via Ashutosh Chauhan)

Added:
    hive/trunk/ql/src/test/queries/clientnegative/join_alt_syntax_comma_on.q
    hive/trunk/ql/src/test/queries/clientpositive/join_alt_syntax.q
    hive/trunk/ql/src/test/results/clientnegative/join_alt_syntax_comma_on.q.out
    hive/trunk/ql/src/test/results/clientpositive/join_alt_syntax.q.out
Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g?rev=1552857&r1=1552856&r2=1552857&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g Sat Dec 21 01:26:59
2013
@@ -86,7 +86,7 @@ fromClause
 joinSource
 @init { gParent.msgs.push("join source"); }
 @after { gParent.msgs.pop(); }
-    : fromSource ( joinToken^ fromSource (KW_ON! expression)? )*
+    : fromSource ( joinToken^ fromSource ( KW_ON! expression {$joinToken.start.getType()
!= COMMA}? )? )*
     | uniqueJoinToken^ uniqueJoinSource (COMMA! uniqueJoinSource)+
     ;
 
@@ -114,6 +114,7 @@ joinToken
     :
       KW_JOIN                      -> TOK_JOIN
     | KW_INNER KW_JOIN             -> TOK_JOIN
+    | COMMA                        -> TOK_JOIN
     | KW_CROSS KW_JOIN             -> TOK_CROSSJOIN
     | KW_LEFT  (KW_OUTER)? KW_JOIN -> TOK_LEFTOUTERJOIN
     | KW_RIGHT (KW_OUTER)? KW_JOIN -> TOK_RIGHTOUTERJOIN
@@ -125,10 +126,10 @@ lateralView
 @init {gParent.msgs.push("lateral view"); }
 @after {gParent.msgs.pop(); }
 	:
-	KW_LATERAL KW_VIEW KW_OUTER function tableAlias (KW_AS identifier (COMMA identifier)*)?
+	KW_LATERAL KW_VIEW KW_OUTER function tableAlias (KW_AS identifier ((COMMA)=> COMMA identifier)*)?
 	-> ^(TOK_LATERAL_VIEW_OUTER ^(TOK_SELECT ^(TOK_SELEXPR function identifier* tableAlias)))
 	|
-	KW_LATERAL KW_VIEW function tableAlias (KW_AS identifier (COMMA identifier)*)?
+	KW_LATERAL KW_VIEW function tableAlias (KW_AS identifier ((COMMA)=> COMMA identifier)*)?
 	-> ^(TOK_LATERAL_VIEW ^(TOK_SELECT ^(TOK_SELEXPR function identifier* tableAlias)))
 	;
 

Added: hive/trunk/ql/src/test/queries/clientnegative/join_alt_syntax_comma_on.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/join_alt_syntax_comma_on.q?rev=1552857&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/join_alt_syntax_comma_on.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/join_alt_syntax_comma_on.q Sat Dec 21 01:26:59
2013
@@ -0,0 +1,3 @@
+explain select *
+from src s1 , 
+src s2 on s1.key = s2.key;
\ No newline at end of file

Added: hive/trunk/ql/src/test/queries/clientpositive/join_alt_syntax.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/join_alt_syntax.q?rev=1552857&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/join_alt_syntax.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/join_alt_syntax.q Sat Dec 21 01:26:59 2013
@@ -0,0 +1,41 @@
+DROP TABLE part;
+
+-- data setup
+CREATE TABLE part( 
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part;
+
+explain select p1.p_name, p2.p_name
+from part p1 , part p2;
+
+explain select p1.p_name, p2.p_name, p3.p_name
+from part p1 ,part p2 ,part p3 
+where p1.p_name = p2.p_name and p2.p_name = p3.p_name;
+
+explain select p1.p_name, p2.p_name, p3.p_name
+from part p1 , (select p_name from part) p2 ,part p3 
+where p1.p_name = p2.p_name and p2.p_name = p3.p_name;
+
+explain select p1.p_name, p2.p_name, p3.p_name
+from part p1 , part p2 , part p3 
+where p2.p_partkey + p1.p_partkey = p1.p_partkey and p3.p_name = p2.p_name;
+
+explain select p1.p_name, p2.p_name, p3.p_name, p4.p_name
+from part p1 , part p2 join part p3 on p2.p_name = p1.p_name join part p4 
+where p2.p_name = p3.p_name and p1.p_partkey = p4.p_partkey 
+            and p1.p_partkey = p2.p_partkey;
+            
+explain select p1.p_name, p2.p_name, p3.p_name, p4.p_name
+from part p1 join part p2 on p2.p_name = p1.p_name , part p3  , part p4 
+where p2.p_name = p3.p_name and p1.p_partkey = p4.p_partkey 
+            and p1.p_partkey = p2.p_partkey;
\ No newline at end of file

Added: hive/trunk/ql/src/test/results/clientnegative/join_alt_syntax_comma_on.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/join_alt_syntax_comma_on.q.out?rev=1552857&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/join_alt_syntax_comma_on.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/join_alt_syntax_comma_on.q.out Sat Dec 21
01:26:59 2013
@@ -0,0 +1 @@
+FAILED: ParseException line 3:25 Failed to recognize predicate '<EOF>'. Failed rule:
'joinSource' in join source

Added: hive/trunk/ql/src/test/results/clientpositive/join_alt_syntax.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/join_alt_syntax.q.out?rev=1552857&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/join_alt_syntax.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/join_alt_syntax.q.out Sat Dec 21 01:26:59
2013
@@ -0,0 +1,870 @@
+PREHOOK: query: DROP TABLE part
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE part
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: -- data setup
+CREATE TABLE part( 
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- data setup
+CREATE TABLE part( 
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table
part
+PREHOOK: type: LOAD
+PREHOOK: Output: default@part
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table
part
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@part
+PREHOOK: query: explain select p1.p_name, p2.p_name
+from part p1 , part p2
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select p1.p_name, p2.p_name
+from part p1 , part p2
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME part) p1) (TOK_TABREF (TOK_TABNAME
part) p2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR
(. (TOK_TABLE_OR_COL p1) p_name)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL p2) p_name)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        p1 
+          TableScan
+            alias: p1
+            Reduce Output Operator
+              sort order: 
+              tag: 0
+              value expressions:
+                    expr: p_name
+                    type: string
+        p2 
+          TableScan
+            alias: p2
+            Reduce Output Operator
+              sort order: 
+              tag: 1
+              value expressions:
+                    expr: p_name
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col1}
+            1 {VALUE._col1}
+          handleSkewJoin: false
+          outputColumnNames: _col1, _col12
+          Select Operator
+            expressions:
+                  expr: _col1
+                  type: string
+                  expr: _col12
+                  type: string
+            outputColumnNames: _col0, _col1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: explain select p1.p_name, p2.p_name, p3.p_name
+from part p1 ,part p2 ,part p3 
+where p1.p_name = p2.p_name and p2.p_name = p3.p_name
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select p1.p_name, p2.p_name, p3.p_name
+from part p1 ,part p2 ,part p3 
+where p1.p_name = p2.p_name and p2.p_name = p3.p_name
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_TABREF (TOK_TABNAME part) p1) (TOK_TABREF
(TOK_TABNAME part) p2)) (TOK_TABREF (TOK_TABNAME part) p3))) (TOK_INSERT (TOK_DESTINATION
(TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL p1) p_name)) (TOK_SELEXPR
(. (TOK_TABLE_OR_COL p2) p_name)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL p3) p_name))) (TOK_WHERE
(and (= (. (TOK_TABLE_OR_COL p1) p_name) (. (TOK_TABLE_OR_COL p2) p_name)) (= (. (TOK_TABLE_OR_COL
p2) p_name) (. (TOK_TABLE_OR_COL p3) p_name))))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        p1 
+          TableScan
+            alias: p1
+            Reduce Output Operator
+              key expressions:
+                    expr: p_name
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: p_name
+                    type: string
+              tag: 0
+              value expressions:
+                    expr: p_name
+                    type: string
+        p2 
+          TableScan
+            alias: p2
+            Reduce Output Operator
+              key expressions:
+                    expr: p_name
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: p_name
+                    type: string
+              tag: 1
+              value expressions:
+                    expr: p_name
+                    type: string
+        p3 
+          TableScan
+            alias: p3
+            Reduce Output Operator
+              key expressions:
+                    expr: p_name
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: p_name
+                    type: string
+              tag: 2
+              value expressions:
+                    expr: p_name
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+               Inner Join 1 to 2
+          condition expressions:
+            0 {VALUE._col1}
+            1 {VALUE._col1}
+            2 {VALUE._col1}
+          handleSkewJoin: false
+          outputColumnNames: _col1, _col12, _col23
+          Filter Operator
+            predicate:
+                expr: ((_col1 = _col12) and (_col12 = _col23))
+                type: boolean
+            Select Operator
+              expressions:
+                    expr: _col1
+                    type: string
+                    expr: _col12
+                    type: string
+                    expr: _col23
+                    type: string
+              outputColumnNames: _col0, _col1, _col2
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: explain select p1.p_name, p2.p_name, p3.p_name
+from part p1 , (select p_name from part) p2 ,part p3 
+where p1.p_name = p2.p_name and p2.p_name = p3.p_name
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select p1.p_name, p2.p_name, p3.p_name
+from part p1 , (select p_name from part) p2 ,part p3 
+where p1.p_name = p2.p_name and p2.p_name = p3.p_name
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_TABREF (TOK_TABNAME part) p1) (TOK_SUBQUERY
(TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME part))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR
TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL p_name))))) p2)) (TOK_TABREF (TOK_TABNAME
part) p3))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR
(. (TOK_TABLE_OR_COL p1) p_name)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL p2) p_name)) (TOK_SELEXPR
(. (TOK_TABLE_OR_COL p3) p_name))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL p1) p_name) (.
(TOK_TABLE_OR_COL p2) p_name)) (= (. (TOK_TABLE_OR_COL p2) p_name) (. (TOK_TABLE_OR_COL p3)
p_name))))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        p1 
+          TableScan
+            alias: p1
+            Reduce Output Operator
+              key expressions:
+                    expr: p_name
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: p_name
+                    type: string
+              tag: 0
+              value expressions:
+                    expr: p_name
+                    type: string
+        p2:part 
+          TableScan
+            alias: part
+            Select Operator
+              expressions:
+                    expr: p_name
+                    type: string
+              outputColumnNames: _col0
+              Reduce Output Operator
+                key expressions:
+                      expr: _col0
+                      type: string
+                sort order: +
+                Map-reduce partition columns:
+                      expr: _col0
+                      type: string
+                tag: 1
+                value expressions:
+                      expr: _col0
+                      type: string
+        p3 
+          TableScan
+            alias: p3
+            Reduce Output Operator
+              key expressions:
+                    expr: p_name
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: p_name
+                    type: string
+              tag: 2
+              value expressions:
+                    expr: p_name
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+               Inner Join 1 to 2
+          condition expressions:
+            0 {VALUE._col1}
+            1 {VALUE._col0}
+            2 {VALUE._col1}
+          handleSkewJoin: false
+          outputColumnNames: _col1, _col11, _col13
+          Filter Operator
+            predicate:
+                expr: ((_col1 = _col11) and (_col11 = _col13))
+                type: boolean
+            Select Operator
+              expressions:
+                    expr: _col1
+                    type: string
+                    expr: _col11
+                    type: string
+                    expr: _col13
+                    type: string
+              outputColumnNames: _col0, _col1, _col2
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: explain select p1.p_name, p2.p_name, p3.p_name
+from part p1 , part p2 , part p3 
+where p2.p_partkey + p1.p_partkey = p1.p_partkey and p3.p_name = p2.p_name
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select p1.p_name, p2.p_name, p3.p_name
+from part p1 , part p2 , part p3 
+where p2.p_partkey + p1.p_partkey = p1.p_partkey and p3.p_name = p2.p_name
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_TABREF (TOK_TABNAME part) p1) (TOK_TABREF
(TOK_TABNAME part) p2)) (TOK_TABREF (TOK_TABNAME part) p3))) (TOK_INSERT (TOK_DESTINATION
(TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL p1) p_name)) (TOK_SELEXPR
(. (TOK_TABLE_OR_COL p2) p_name)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL p3) p_name))) (TOK_WHERE
(and (= (+ (. (TOK_TABLE_OR_COL p2) p_partkey) (. (TOK_TABLE_OR_COL p1) p_partkey)) (. (TOK_TABLE_OR_COL
p1) p_partkey)) (= (. (TOK_TABLE_OR_COL p3) p_name) (. (TOK_TABLE_OR_COL p2) p_name))))))
+
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        p1 
+          TableScan
+            alias: p1
+            Reduce Output Operator
+              sort order: 
+              tag: 0
+              value expressions:
+                    expr: p_partkey
+                    type: int
+                    expr: p_name
+                    type: string
+        p2 
+          TableScan
+            alias: p2
+            Reduce Output Operator
+              sort order: 
+              tag: 1
+              value expressions:
+                    expr: p_partkey
+                    type: int
+                    expr: p_name
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col0} {VALUE._col1}
+            1 {VALUE._col0} {VALUE._col1}
+          handleSkewJoin: false
+          outputColumnNames: _col0, _col1, _col11, _col12
+          Filter Operator
+            predicate:
+                expr: ((_col11 + _col0) = _col0)
+                type: boolean
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        $INTNAME 
+          TableScan
+            Reduce Output Operator
+              key expressions:
+                    expr: _col12
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: _col12
+                    type: string
+              tag: 0
+              value expressions:
+                    expr: _col11
+                    type: int
+                    expr: _col12
+                    type: string
+                    expr: _col0
+                    type: int
+                    expr: _col1
+                    type: string
+        p3 
+          TableScan
+            alias: p3
+            Reduce Output Operator
+              key expressions:
+                    expr: p_name
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: p_name
+                    type: string
+              tag: 1
+              value expressions:
+                    expr: p_name
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col0} {VALUE._col1} {VALUE._col11} {VALUE._col12}
+            1 {VALUE._col1}
+          handleSkewJoin: false
+          outputColumnNames: _col0, _col1, _col11, _col12, _col23
+          Filter Operator
+            predicate:
+                expr: (((_col0 + _col11) = _col11) and (_col23 = _col1))
+                type: boolean
+            Select Operator
+              expressions:
+                    expr: _col12
+                    type: string
+                    expr: _col1
+                    type: string
+                    expr: _col23
+                    type: string
+              outputColumnNames: _col0, _col1, _col2
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: explain select p1.p_name, p2.p_name, p3.p_name, p4.p_name
+from part p1 , part p2 join part p3 on p2.p_name = p1.p_name join part p4 
+where p2.p_name = p3.p_name and p1.p_partkey = p4.p_partkey 
+            and p1.p_partkey = p2.p_partkey
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select p1.p_name, p2.p_name, p3.p_name, p4.p_name
+from part p1 , part p2 join part p3 on p2.p_name = p1.p_name join part p4 
+where p2.p_name = p3.p_name and p1.p_partkey = p4.p_partkey 
+            and p1.p_partkey = p2.p_partkey
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_JOIN (TOK_TABREF (TOK_TABNAME part) p1) (TOK_TABREF
(TOK_TABNAME part) p2)) (TOK_TABREF (TOK_TABNAME part) p3) (= (. (TOK_TABLE_OR_COL p2) p_name)
(. (TOK_TABLE_OR_COL p1) p_name))) (TOK_TABREF (TOK_TABNAME part) p4))) (TOK_INSERT (TOK_DESTINATION
(TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL p1) p_name)) (TOK_SELEXPR
(. (TOK_TABLE_OR_COL p2) p_name)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL p3) p_name)) (TOK_SELEXPR
(. (TOK_TABLE_OR_COL p4) p_name))) (TOK_WHERE (and (and (= (. (TOK_TABLE_OR_COL p2) p_name)
(. (TOK_TABLE_OR_COL p3) p_name)) (= (. (TOK_TABLE_OR_COL p1) p_partkey) (. (TOK_TABLE_OR_COL
p4) p_partkey))) (= (. (TOK_TABLE_OR_COL p1) p_partkey) (. (TOK_TABLE_OR_COL p2) p_partkey))))))
+
+STAGE DEPENDENCIES:
+  Stage-3 is a root stage
+  Stage-2 depends on stages: Stage-3
+  Stage-1 depends on stages: Stage-2
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-3
+    Map Reduce
+      Alias -> Map Operator Tree:
+        p1 
+          TableScan
+            alias: p1
+            Reduce Output Operator
+              key expressions:
+                    expr: p_name
+                    type: string
+                    expr: p_partkey
+                    type: int
+              sort order: ++
+              Map-reduce partition columns:
+                    expr: p_name
+                    type: string
+                    expr: p_partkey
+                    type: int
+              tag: 0
+              value expressions:
+                    expr: p_partkey
+                    type: int
+                    expr: p_name
+                    type: string
+        p2 
+          TableScan
+            alias: p2
+            Reduce Output Operator
+              key expressions:
+                    expr: p_name
+                    type: string
+                    expr: p_partkey
+                    type: int
+              sort order: ++
+              Map-reduce partition columns:
+                    expr: p_name
+                    type: string
+                    expr: p_partkey
+                    type: int
+              tag: 1
+              value expressions:
+                    expr: p_partkey
+                    type: int
+                    expr: p_name
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col0} {VALUE._col1}
+            1 {VALUE._col0} {VALUE._col1}
+          handleSkewJoin: false
+          outputColumnNames: _col0, _col1, _col11, _col12
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        $INTNAME 
+          TableScan
+            Reduce Output Operator
+              key expressions:
+                    expr: _col12
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: _col12
+                    type: string
+              tag: 0
+              value expressions:
+                    expr: _col11
+                    type: int
+                    expr: _col12
+                    type: string
+                    expr: _col0
+                    type: int
+                    expr: _col1
+                    type: string
+        p3 
+          TableScan
+            alias: p3
+            Reduce Output Operator
+              key expressions:
+                    expr: p_name
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: p_name
+                    type: string
+              tag: 1
+              value expressions:
+                    expr: p_name
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col0} {VALUE._col1} {VALUE._col11} {VALUE._col12}
+            1 {VALUE._col1}
+          handleSkewJoin: false
+          outputColumnNames: _col0, _col1, _col11, _col12, _col23
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        $INTNAME 
+          TableScan
+            Reduce Output Operator
+              key expressions:
+                    expr: _col11
+                    type: int
+              sort order: +
+              Map-reduce partition columns:
+                    expr: _col11
+                    type: int
+              tag: 0
+              value expressions:
+                    expr: _col23
+                    type: string
+                    expr: _col0
+                    type: int
+                    expr: _col1
+                    type: string
+                    expr: _col11
+                    type: int
+                    expr: _col12
+                    type: string
+        p4 
+          TableScan
+            alias: p4
+            Reduce Output Operator
+              key expressions:
+                    expr: p_partkey
+                    type: int
+              sort order: +
+              Map-reduce partition columns:
+                    expr: p_partkey
+                    type: int
+              tag: 1
+              value expressions:
+                    expr: p_partkey
+                    type: int
+                    expr: p_name
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col1} {VALUE._col11} {VALUE._col12} {VALUE._col22} {VALUE._col23}
+            1 {VALUE._col0} {VALUE._col1}
+          handleSkewJoin: false
+          outputColumnNames: _col1, _col11, _col12, _col22, _col23, _col33, _col34
+          Filter Operator
+            predicate:
+                expr: (((_col12 = _col1) and (_col22 = _col33)) and (_col22 = _col11))
+                type: boolean
+            Select Operator
+              expressions:
+                    expr: _col23
+                    type: string
+                    expr: _col12
+                    type: string
+                    expr: _col1
+                    type: string
+                    expr: _col34
+                    type: string
+              outputColumnNames: _col0, _col1, _col2, _col3
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+PREHOOK: query: explain select p1.p_name, p2.p_name, p3.p_name, p4.p_name
+from part p1 join part p2 on p2.p_name = p1.p_name , part p3  , part p4 
+where p2.p_name = p3.p_name and p1.p_partkey = p4.p_partkey 
+            and p1.p_partkey = p2.p_partkey
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select p1.p_name, p2.p_name, p3.p_name, p4.p_name
+from part p1 join part p2 on p2.p_name = p1.p_name , part p3  , part p4 
+where p2.p_name = p3.p_name and p1.p_partkey = p4.p_partkey 
+            and p1.p_partkey = p2.p_partkey
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_JOIN (TOK_TABREF (TOK_TABNAME part) p1) (TOK_TABREF
(TOK_TABNAME part) p2) (= (. (TOK_TABLE_OR_COL p2) p_name) (. (TOK_TABLE_OR_COL p1) p_name)))
(TOK_TABREF (TOK_TABNAME part) p3)) (TOK_TABREF (TOK_TABNAME part) p4))) (TOK_INSERT (TOK_DESTINATION
(TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL p1) p_name)) (TOK_SELEXPR
(. (TOK_TABLE_OR_COL p2) p_name)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL p3) p_name)) (TOK_SELEXPR
(. (TOK_TABLE_OR_COL p4) p_name))) (TOK_WHERE (and (and (= (. (TOK_TABLE_OR_COL p2) p_name)
(. (TOK_TABLE_OR_COL p3) p_name)) (= (. (TOK_TABLE_OR_COL p1) p_partkey) (. (TOK_TABLE_OR_COL
p4) p_partkey))) (= (. (TOK_TABLE_OR_COL p1) p_partkey) (. (TOK_TABLE_OR_COL p2) p_partkey))))))
+
+STAGE DEPENDENCIES:
+  Stage-3 is a root stage
+  Stage-2 depends on stages: Stage-3
+  Stage-1 depends on stages: Stage-2
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-3
+    Map Reduce
+      Alias -> Map Operator Tree:
+        p1 
+          TableScan
+            alias: p1
+            Reduce Output Operator
+              key expressions:
+                    expr: p_name
+                    type: string
+                    expr: p_partkey
+                    type: int
+              sort order: ++
+              Map-reduce partition columns:
+                    expr: p_name
+                    type: string
+                    expr: p_partkey
+                    type: int
+              tag: 0
+              value expressions:
+                    expr: p_partkey
+                    type: int
+                    expr: p_name
+                    type: string
+        p2 
+          TableScan
+            alias: p2
+            Reduce Output Operator
+              key expressions:
+                    expr: p_name
+                    type: string
+                    expr: p_partkey
+                    type: int
+              sort order: ++
+              Map-reduce partition columns:
+                    expr: p_name
+                    type: string
+                    expr: p_partkey
+                    type: int
+              tag: 1
+              value expressions:
+                    expr: p_partkey
+                    type: int
+                    expr: p_name
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col0} {VALUE._col1}
+            1 {VALUE._col0} {VALUE._col1}
+          handleSkewJoin: false
+          outputColumnNames: _col0, _col1, _col11, _col12
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        $INTNAME 
+          TableScan
+            Reduce Output Operator
+              key expressions:
+                    expr: _col12
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: _col12
+                    type: string
+              tag: 0
+              value expressions:
+                    expr: _col11
+                    type: int
+                    expr: _col12
+                    type: string
+                    expr: _col0
+                    type: int
+                    expr: _col1
+                    type: string
+        p3 
+          TableScan
+            alias: p3
+            Reduce Output Operator
+              key expressions:
+                    expr: p_name
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: p_name
+                    type: string
+              tag: 1
+              value expressions:
+                    expr: p_name
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col0} {VALUE._col1} {VALUE._col11} {VALUE._col12}
+            1 {VALUE._col1}
+          handleSkewJoin: false
+          outputColumnNames: _col0, _col1, _col11, _col12, _col23
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        $INTNAME 
+          TableScan
+            Reduce Output Operator
+              key expressions:
+                    expr: _col11
+                    type: int
+              sort order: +
+              Map-reduce partition columns:
+                    expr: _col11
+                    type: int
+              tag: 0
+              value expressions:
+                    expr: _col23
+                    type: string
+                    expr: _col0
+                    type: int
+                    expr: _col1
+                    type: string
+                    expr: _col11
+                    type: int
+                    expr: _col12
+                    type: string
+        p4 
+          TableScan
+            alias: p4
+            Reduce Output Operator
+              key expressions:
+                    expr: p_partkey
+                    type: int
+              sort order: +
+              Map-reduce partition columns:
+                    expr: p_partkey
+                    type: int
+              tag: 1
+              value expressions:
+                    expr: p_partkey
+                    type: int
+                    expr: p_name
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col1} {VALUE._col11} {VALUE._col12} {VALUE._col22} {VALUE._col23}
+            1 {VALUE._col0} {VALUE._col1}
+          handleSkewJoin: false
+          outputColumnNames: _col1, _col11, _col12, _col22, _col23, _col33, _col34
+          Filter Operator
+            predicate:
+                expr: (((_col12 = _col1) and (_col22 = _col33)) and (_col22 = _col11))
+                type: boolean
+            Select Operator
+              expressions:
+                    expr: _col23
+                    type: string
+                    expr: _col12
+                    type: string
+                    expr: _col1
+                    type: string
+                    expr: _col34
+                    type: string
+              outputColumnNames: _col0, _col1, _col2, _col3
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+



Mime
View raw message