hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jcama...@apache.org
Subject [3/4] hive git commit: HIVE-11752: Pre-materializing complex CTE queries (Navis, Jesus Camacho Rodriguez, reviewed by Laljo John Pullokkaran)
Date Fri, 12 Feb 2016 18:41:48 GMT
http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java
index be51edc..fc42aaa 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java
@@ -29,6 +29,7 @@ import java.util.LinkedHashMap;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
+import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
@@ -42,6 +43,7 @@ import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
 import org.apache.hadoop.hive.ql.plan.ReduceWork;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.ql.plan.TezWork;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -64,9 +66,17 @@ public class TestGenTezWork {
   @SuppressWarnings("unchecked")
   @Before
   public void setUp() throws Exception {
+    // Init conf
+    final HiveConf conf = new HiveConf(SemanticAnalyzer.class);
+    SessionState.start(conf);
+
+    // Init parse context
+    final ParseContext pctx = new ParseContext();
+    pctx.setContext(new Context(conf));
+
     ctx = new GenTezProcContext(
-        new HiveConf(),
-        new ParseContext(),
+        conf,
+        pctx,
         Collections.EMPTY_LIST,
         new ArrayList<Task<? extends Serializable>>(),
         Collections.EMPTY_SET,

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/queries/clientpositive/cte_3.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/cte_3.q b/ql/src/test/queries/clientpositive/cte_3.q
new file mode 100644
index 0000000..aa8adbc
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/cte_3.q
@@ -0,0 +1,31 @@
+set hive.mapred.mode=nonstrict;
+set hive.optimize.cte.materialize.threshold=1;
+
+explain
+with q1 as ( select key from src where key = '5')
+select *
+from q1
+;
+
+with q1 as ( select key from src where key = '5')
+select *
+from q1
+;
+
+-- in subquery
+explain
+with q1 as ( select key from src where key = '5')
+select * from (select key from q1) a;
+
+with q1 as ( select key from src where key = '5')
+select * from (select key from q1) a;
+
+-- chaining
+explain
+with q1 as ( select key from q2 where key = '5'),
+q2 as ( select key from src where key = '5')
+select * from (select key from q1) a;
+
+with q1 as ( select key from q2 where key = '5'),
+q2 as ( select key from src where key = '5')
+select * from (select key from q1) a;

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/queries/clientpositive/cte_4.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/cte_4.q b/ql/src/test/queries/clientpositive/cte_4.q
new file mode 100644
index 0000000..0455a6a
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/cte_4.q
@@ -0,0 +1,56 @@
+set hive.mapred.mode=nonstrict;
+set hive.optimize.cte.materialize.threshold=1;
+
+-- union test
+with q1 as (select * from src where key= '5'),
+q2 as (select * from src s2 where key = '4')
+select * from q1 union all select * from q2
+;
+
+-- insert test
+create table s1 like src;
+with q1 as ( select key, value from src where key = '5')
+from q1
+insert overwrite table s1
+select *
+;
+select * from s1;
+drop table s1;
+
+-- from style
+with q1 as (select * from src where key= '5')
+from q1
+select *
+;
+
+-- ctas
+create table s2 as
+with q1 as ( select key from src where key = '4')
+select * from q1
+;
+
+select * from s2;
+drop table s2;
+
+-- view test
+create view v1 as
+with q1 as ( select key from src where key = '5')
+select * from q1
+;
+
+select * from v1;
+
+drop view v1;
+
+
+-- view test, name collision
+create view v1 as
+with q1 as ( select key from src where key = '5')
+select * from q1
+;
+
+with q1 as ( select key from src where key = '4')
+select * from v1
+;
+
+drop view v1;

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/queries/clientpositive/cte_5.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/cte_5.q b/ql/src/test/queries/clientpositive/cte_5.q
new file mode 100644
index 0000000..8968688
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/cte_5.q
@@ -0,0 +1,23 @@
+set hive.mapred.mode=nonstrict;
+set hive.optimize.cte.materialize.threshold=-1;
+
+create database mydb;
+use mydb;
+create table q1 (colnum int, colstring string);
+insert into q1 values (5, 'A');
+
+use default;
+
+show tables in mydb;
+show tables;
+
+explain
+with q1 as (select * from src where key= '5')
+select a.colnum
+from mydb.q1 as a join q1 as b
+on a.colnum=b.key;
+
+with q1 as (select * from src where key= '5')
+select a.colnum
+from mydb.q1 as a join q1 as b
+on a.colnum=b.key;

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/queries/clientpositive/cte_mat_1.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/cte_mat_1.q b/ql/src/test/queries/clientpositive/cte_mat_1.q
new file mode 100644
index 0000000..2afb960
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/cte_mat_1.q
@@ -0,0 +1,8 @@
+set hive.mapred.mode=nonstrict;
+set hive.optimize.cte.materialize.threshold=-1;
+
+explain
+with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key;

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/queries/clientpositive/cte_mat_2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/cte_mat_2.q b/ql/src/test/queries/clientpositive/cte_mat_2.q
new file mode 100644
index 0000000..adcd087
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/cte_mat_2.q
@@ -0,0 +1,8 @@
+set hive.mapred.mode=nonstrict;
+set hive.optimize.cte.materialize.threshold=3;
+
+explain
+with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key;

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/queries/clientpositive/cte_mat_3.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/cte_mat_3.q b/ql/src/test/queries/clientpositive/cte_mat_3.q
new file mode 100644
index 0000000..650cc24
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/cte_mat_3.q
@@ -0,0 +1,8 @@
+set hive.mapred.mode=nonstrict;
+set hive.optimize.cte.materialize.threshold=2;
+
+explain
+with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key;

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/queries/clientpositive/cte_mat_4.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/cte_mat_4.q b/ql/src/test/queries/clientpositive/cte_mat_4.q
new file mode 100644
index 0000000..2d75963
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/cte_mat_4.q
@@ -0,0 +1,39 @@
+set hive.mapred.mode=nonstrict;
+set hive.optimize.cte.materialize.threshold=2;
+
+create temporary table q1 (a int, b string);
+insert into q1 values (1, 'A');
+
+show tables;
+
+explain
+with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key;
+
+with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key;
+
+show tables;
+
+select * from q1;
+
+drop table q1;
+
+show tables;
+
+explain
+with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key;
+
+with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key;
+
+show tables;

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/queries/clientpositive/cte_mat_5.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/cte_mat_5.q b/ql/src/test/queries/clientpositive/cte_mat_5.q
new file mode 100644
index 0000000..fd0aeda
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/cte_mat_5.q
@@ -0,0 +1,23 @@
+set hive.mapred.mode=nonstrict;
+set hive.optimize.cte.materialize.threshold=1;
+
+create database mydb;
+use mydb;
+create table q1 (colnum int, colstring string);
+insert into q1 values (5, 'A');
+
+use default;
+
+show tables in mydb;
+show tables;
+
+explain
+with q1 as (select * from src where key= '5')
+select a.colnum
+from mydb.q1 as a join q1 as b
+on a.colnum=b.key;
+
+with q1 as (select * from src where key= '5')
+select a.colnum
+from mydb.q1 as a join q1 as b
+on a.colnum=b.key;

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/results/clientnegative/analyze1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/analyze1.q.out b/ql/src/test/results/clientnegative/analyze1.q.out
index 589a6ee..3230b02 100644
--- a/ql/src/test/results/clientnegative/analyze1.q.out
+++ b/ql/src/test/results/clientnegative/analyze1.q.out
@@ -1 +1 @@
-FAILED: SemanticException Partition spec {key=null} contains non-partition columns
+FAILED: ValidationFailureSemanticException Partition spec {key=null} contains non-partition columns

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/results/clientnegative/dyn_part1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/dyn_part1.q.out b/ql/src/test/results/clientnegative/dyn_part1.q.out
index 62d01fb..4c8e171 100644
--- a/ql/src/test/results/clientnegative/dyn_part1.q.out
+++ b/ql/src/test/results/clientnegative/dyn_part1.q.out
@@ -6,4 +6,4 @@ POSTHOOK: query: create table dynamic_partition (key string) partitioned by (val
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@dynamic_partition
-FAILED: SemanticException Partition spec {hr=null} contains non-partition columns
+FAILED: ValidationFailureSemanticException Partition spec {hr=null} contains non-partition columns

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/results/clientpositive/alter_view_as_select.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/alter_view_as_select.q.out b/ql/src/test/results/clientpositive/alter_view_as_select.q.out
index c89c0dc..3666221 100644
--- a/ql/src/test/results/clientpositive/alter_view_as_select.q.out
+++ b/ql/src/test/results/clientpositive/alter_view_as_select.q.out
@@ -57,7 +57,6 @@ POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@src
 POSTHOOK: Output: database:tv
 POSTHOOK: Output: tv@testView
-POSTHOOK: Output: tv@testview
 PREHOOK: query: DESCRIBE FORMATTED tv.testView
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: tv@testview
@@ -106,7 +105,6 @@ POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@src
 POSTHOOK: Output: database:tv
 POSTHOOK: Output: tv@testView
-POSTHOOK: Output: tv@testview
 PREHOOK: query: DESCRIBE FORMATTED tv.testView
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: tv@testview

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/results/clientpositive/cte_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/cte_3.q.out b/ql/src/test/results/clientpositive/cte_3.q.out
new file mode 100644
index 0000000..0fe0865
--- /dev/null
+++ b/ql/src/test/results/clientpositive/cte_3.q.out
@@ -0,0 +1,444 @@
+PREHOOK: query: explain
+with q1 as ( select key from src where key = '5')
+select *
+from q1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+with q1 as ( select key from src where key = '5')
+select *
+from q1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-6 depends on stages: Stage-1 , consists of Stage-3, Stage-2, Stage-4
+  Stage-3
+  Stage-0 depends on stages: Stage-3, Stage-2, Stage-5
+  Stage-2
+  Stage-4
+  Stage-5 depends on stages: Stage-4
+  Stage-7 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key = '5') (type: boolean)
+              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: '5' (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.q1
+
+  Stage: Stage-6
+    Conditional Operator
+
+  Stage: Stage-3
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-0
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.q1
+
+  Stage: Stage-4
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.q1
+
+  Stage: Stage-5
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-7
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: q1
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+          Select Operator
+            expressions: key (type: string)
+            outputColumnNames: _col0
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            ListSink
+
+PREHOOK: query: with q1 as ( select key from src where key = '5')
+select *
+from q1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@q1
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@q1
+#### A masked pattern was here ####
+POSTHOOK: query: with q1 as ( select key from src where key = '5')
+select *
+from q1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@q1
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@q1
+#### A masked pattern was here ####
+5
+5
+5
+PREHOOK: query: -- in subquery
+explain
+with q1 as ( select key from src where key = '5')
+select * from (select key from q1) a
+PREHOOK: type: QUERY
+POSTHOOK: query: -- in subquery
+explain
+with q1 as ( select key from src where key = '5')
+select * from (select key from q1) a
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-6 depends on stages: Stage-1 , consists of Stage-3, Stage-2, Stage-4
+  Stage-3
+  Stage-0 depends on stages: Stage-3, Stage-2, Stage-5
+  Stage-2
+  Stage-4
+  Stage-5 depends on stages: Stage-4
+  Stage-7 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key = '5') (type: boolean)
+              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: '5' (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.q1
+
+  Stage: Stage-6
+    Conditional Operator
+
+  Stage: Stage-3
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-0
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.q1
+
+  Stage: Stage-4
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.q1
+
+  Stage: Stage-5
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-7
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: q1
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+          Select Operator
+            expressions: key (type: string)
+            outputColumnNames: _col0
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            ListSink
+
+PREHOOK: query: with q1 as ( select key from src where key = '5')
+select * from (select key from q1) a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@q1
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@q1
+#### A masked pattern was here ####
+POSTHOOK: query: with q1 as ( select key from src where key = '5')
+select * from (select key from q1) a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@q1
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@q1
+#### A masked pattern was here ####
+5
+5
+5
+PREHOOK: query: -- chaining
+explain
+with q1 as ( select key from q2 where key = '5'),
+q2 as ( select key from src where key = '5')
+select * from (select key from q1) a
+PREHOOK: type: QUERY
+POSTHOOK: query: -- chaining
+explain
+with q1 as ( select key from q2 where key = '5'),
+q2 as ( select key from src where key = '5')
+select * from (select key from q1) a
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-6 depends on stages: Stage-1 , consists of Stage-3, Stage-2, Stage-4
+  Stage-3
+  Stage-0 depends on stages: Stage-3, Stage-2, Stage-5
+  Stage-8 depends on stages: Stage-0
+  Stage-13 depends on stages: Stage-8 , consists of Stage-10, Stage-9, Stage-11
+  Stage-10
+  Stage-7 depends on stages: Stage-10, Stage-9, Stage-12
+  Stage-9
+  Stage-11
+  Stage-12 depends on stages: Stage-11
+  Stage-2
+  Stage-4
+  Stage-5 depends on stages: Stage-4
+  Stage-14 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key = '5') (type: boolean)
+              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: '5' (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.q2
+
+  Stage: Stage-6
+    Conditional Operator
+
+  Stage: Stage-3
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-0
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-8
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: q2
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            Filter Operator
+              predicate: (key = '5') (type: boolean)
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+              Select Operator
+                expressions: '5' (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.q1
+
+  Stage: Stage-13
+    Conditional Operator
+
+  Stage: Stage-10
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-7
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-9
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.q1
+
+  Stage: Stage-11
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.q1
+
+  Stage: Stage-12
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.q2
+
+  Stage: Stage-4
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.q2
+
+  Stage: Stage-5
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-14
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: q1
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+          Select Operator
+            expressions: key (type: string)
+            outputColumnNames: _col0
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            ListSink
+
+PREHOOK: query: with q1 as ( select key from q2 where key = '5'),
+q2 as ( select key from src where key = '5')
+select * from (select key from q1) a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@q1
+PREHOOK: Input: default@q2
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@q1
+PREHOOK: Output: default@q2
+#### A masked pattern was here ####
+POSTHOOK: query: with q1 as ( select key from q2 where key = '5'),
+q2 as ( select key from src where key = '5')
+select * from (select key from q1) a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@q1
+POSTHOOK: Input: default@q2
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@q1
+POSTHOOK: Output: default@q2
+#### A masked pattern was here ####
+5
+5
+5

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/results/clientpositive/cte_4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/cte_4.q.out b/ql/src/test/results/clientpositive/cte_4.q.out
new file mode 100644
index 0000000..d560d74
--- /dev/null
+++ b/ql/src/test/results/clientpositive/cte_4.q.out
@@ -0,0 +1,219 @@
+PREHOOK: query: -- union test
+with q1 as (select * from src where key= '5'),
+q2 as (select * from src s2 where key = '4')
+select * from q1 union all select * from q2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@q1
+PREHOOK: Input: default@q2
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@q1
+PREHOOK: Output: default@q2
+#### A masked pattern was here ####
+POSTHOOK: query: -- union test
+with q1 as (select * from src where key= '5'),
+q2 as (select * from src s2 where key = '4')
+select * from q1 union all select * from q2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@q1
+POSTHOOK: Input: default@q2
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@q1
+POSTHOOK: Output: default@q2
+#### A masked pattern was here ####
+5	val_5
+5	val_5
+5	val_5
+4	val_4
+PREHOOK: query: -- insert test
+create table s1 like src
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@s1
+POSTHOOK: query: -- insert test
+create table s1 like src
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@s1
+PREHOOK: query: with q1 as ( select key, value from src where key = '5')
+from q1
+insert overwrite table s1
+select *
+PREHOOK: type: QUERY
+PREHOOK: Input: default@q1
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@q1
+PREHOOK: Output: default@s1
+#### A masked pattern was here ####
+POSTHOOK: query: with q1 as ( select key, value from src where key = '5')
+from q1
+insert overwrite table s1
+select *
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@q1
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@q1
+POSTHOOK: Output: default@s1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: s1.key SIMPLE [(q1)q1.FieldSchema(name:key, type:string, comment:null), ]
+POSTHOOK: Lineage: s1.value SIMPLE [(q1)q1.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: select * from s1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@s1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from s1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@s1
+#### A masked pattern was here ####
+5	val_5
+5	val_5
+5	val_5
+PREHOOK: query: drop table s1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@s1
+PREHOOK: Output: default@s1
+POSTHOOK: query: drop table s1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@s1
+POSTHOOK: Output: default@s1
+PREHOOK: query: -- from style
+with q1 as (select * from src where key= '5')
+from q1
+select *
+PREHOOK: type: QUERY
+PREHOOK: Input: default@q1
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@q1
+#### A masked pattern was here ####
+POSTHOOK: query: -- from style
+with q1 as (select * from src where key= '5')
+from q1
+select *
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@q1
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@q1
+#### A masked pattern was here ####
+5	val_5
+5	val_5
+5	val_5
+PREHOOK: query: -- ctas
+create table s2 as
+with q1 as ( select key from src where key = '4')
+select * from q1
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@q1
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@q1
+PREHOOK: Output: default@s2
+#### A masked pattern was here ####
+POSTHOOK: query: -- ctas
+create table s2 as
+with q1 as ( select key from src where key = '4')
+select * from q1
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@q1
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@q1
+POSTHOOK: Output: default@s2
+#### A masked pattern was here ####
+PREHOOK: query: select * from s2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@s2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from s2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@s2
+#### A masked pattern was here ####
+4
+PREHOOK: query: drop table s2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@s2
+PREHOOK: Output: default@s2
+POSTHOOK: query: drop table s2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@s2
+POSTHOOK: Output: default@s2
+PREHOOK: query: -- view test
+create view v1 as
+with q1 as ( select key from src where key = '5')
+select * from q1
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@v1
+POSTHOOK: query: -- view test
+create view v1 as
+with q1 as ( select key from src where key = '5')
+select * from q1
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@v1
+PREHOOK: query: select * from v1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@v1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from v1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@v1
+#### A masked pattern was here ####
+5
+5
+5
+PREHOOK: query: drop view v1
+PREHOOK: type: DROPVIEW
+PREHOOK: Input: default@v1
+PREHOOK: Output: default@v1
+POSTHOOK: query: drop view v1
+POSTHOOK: type: DROPVIEW
+POSTHOOK: Input: default@v1
+POSTHOOK: Output: default@v1
+PREHOOK: query: -- view test, name collision
+create view v1 as
+with q1 as ( select key from src where key = '5')
+select * from q1
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@v1
+POSTHOOK: query: -- view test, name collision
+create view v1 as
+with q1 as ( select key from src where key = '5')
+select * from q1
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@v1
+PREHOOK: query: with q1 as ( select key from src where key = '4')
+select * from v1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@v1
+#### A masked pattern was here ####
+POSTHOOK: query: with q1 as ( select key from src where key = '4')
+select * from v1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@v1
+#### A masked pattern was here ####
+5
+5
+5
+PREHOOK: query: drop view v1
+PREHOOK: type: DROPVIEW
+PREHOOK: Input: default@v1
+PREHOOK: Output: default@v1
+POSTHOOK: query: drop view v1
+POSTHOOK: type: DROPVIEW
+POSTHOOK: Input: default@v1
+POSTHOOK: Output: default@v1

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/results/clientpositive/cte_5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/cte_5.q.out b/ql/src/test/results/clientpositive/cte_5.q.out
new file mode 100644
index 0000000..44a3282
--- /dev/null
+++ b/ql/src/test/results/clientpositive/cte_5.q.out
@@ -0,0 +1,156 @@
+PREHOOK: query: create database mydb
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:mydb
+POSTHOOK: query: create database mydb
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:mydb
+PREHOOK: query: use mydb
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:mydb
+POSTHOOK: query: use mydb
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:mydb
+PREHOOK: query: create table q1 (colnum int, colstring string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:mydb
+PREHOOK: Output: mydb@q1
+POSTHOOK: query: create table q1 (colnum int, colstring string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:mydb
+POSTHOOK: Output: mydb@q1
+PREHOOK: query: insert into q1 values (5, 'A')
+PREHOOK: type: QUERY
+PREHOOK: Input: mydb@values__tmp__table__1
+PREHOOK: Output: mydb@q1
+POSTHOOK: query: insert into q1 values (5, 'A')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: mydb@values__tmp__table__1
+POSTHOOK: Output: mydb@q1
+POSTHOOK: Lineage: q1.colnum EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: q1.colstring SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: use default
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:default
+POSTHOOK: query: use default
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:default
+PREHOOK: query: show tables in mydb
+PREHOOK: type: SHOWTABLES
+PREHOOK: Input: database:mydb
+POSTHOOK: query: show tables in mydb
+POSTHOOK: type: SHOWTABLES
+POSTHOOK: Input: database:mydb
+q1
+values__tmp__table__1
+PREHOOK: query: show tables
+PREHOOK: type: SHOWTABLES
+PREHOOK: Input: database:default
+POSTHOOK: query: show tables
+POSTHOOK: type: SHOWTABLES
+POSTHOOK: Input: database:default
+alltypesorc
+cbo_t1
+cbo_t2
+cbo_t3
+lineitem
+part
+src
+src1
+src_cbo
+src_json
+src_sequencefile
+src_thrift
+srcbucket
+srcbucket2
+srcpart
+PREHOOK: query: explain
+with q1 as (select * from src where key= '5')
+select a.colnum
+from mydb.q1 as a join q1 as b
+on a.colnum=b.key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+with q1 as (select * from src where key= '5')
+select a.colnum
+from mydb.q1 as a join q1 as b
+on a.colnum=b.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: a
+            Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: colnum is not null (type: boolean)
+              Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: colnum (type: int)
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: UDFToDouble(_col0) (type: double)
+                  sort order: +
+                  Map-reduce partition columns: UDFToDouble(_col0) (type: double)
+                  Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: int)
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key = '5') (type: boolean)
+              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: 5.0 (type: double)
+                  sort order: +
+                  Map-reduce partition columns: 5.0 (type: double)
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          keys:
+            0 UDFToDouble(_col0) (type: double)
+            1 UDFToDouble('5') (type: double)
+          outputColumnNames: _col0
+          Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: with q1 as (select * from src where key= '5')
+select a.colnum
+from mydb.q1 as a join q1 as b
+on a.colnum=b.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: mydb@q1
+#### A masked pattern was here ####
+POSTHOOK: query: with q1 as (select * from src where key= '5')
+select a.colnum
+from mydb.q1 as a join q1 as b
+on a.colnum=b.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: mydb@q1
+#### A masked pattern was here ####
+5
+5
+5

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/results/clientpositive/cte_mat_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/cte_mat_1.q.out b/ql/src/test/results/clientpositive/cte_mat_1.q.out
new file mode 100644
index 0000000..6429470
--- /dev/null
+++ b/ql/src/test/results/clientpositive/cte_mat_1.q.out
@@ -0,0 +1,72 @@
+PREHOOK: query: explain
+with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key = '5') (type: boolean)
+              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: '5' (type: string)
+                  sort order: +
+                  Map-reduce partition columns: '5' (type: string)
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key = '5') (type: boolean)
+              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: '5' (type: string)
+                  sort order: +
+                  Map-reduce partition columns: '5' (type: string)
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          keys:
+            0 '5' (type: string)
+            1 '5' (type: string)
+          Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: '5' (type: string)
+            outputColumnNames: _col0
+            Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/results/clientpositive/cte_mat_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/cte_mat_2.q.out b/ql/src/test/results/clientpositive/cte_mat_2.q.out
new file mode 100644
index 0000000..6429470
--- /dev/null
+++ b/ql/src/test/results/clientpositive/cte_mat_2.q.out
@@ -0,0 +1,72 @@
+PREHOOK: query: explain
+with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key = '5') (type: boolean)
+              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: '5' (type: string)
+                  sort order: +
+                  Map-reduce partition columns: '5' (type: string)
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key = '5') (type: boolean)
+              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: '5' (type: string)
+                  sort order: +
+                  Map-reduce partition columns: '5' (type: string)
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          keys:
+            0 '5' (type: string)
+            1 '5' (type: string)
+          Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: '5' (type: string)
+            outputColumnNames: _col0
+            Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/results/clientpositive/cte_mat_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/cte_mat_3.q.out b/ql/src/test/results/clientpositive/cte_mat_3.q.out
new file mode 100644
index 0000000..683228c
--- /dev/null
+++ b/ql/src/test/results/clientpositive/cte_mat_3.q.out
@@ -0,0 +1,147 @@
+PREHOOK: query: explain
+with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-6 depends on stages: Stage-1 , consists of Stage-3, Stage-2, Stage-4
+  Stage-3
+  Stage-0 depends on stages: Stage-3, Stage-2, Stage-5
+  Stage-8 depends on stages: Stage-0
+  Stage-2
+  Stage-4
+  Stage-5 depends on stages: Stage-4
+  Stage-7 depends on stages: Stage-8
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key = '5') (type: boolean)
+              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: '5' (type: string), value (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.q1
+
+  Stage: Stage-6
+    Conditional Operator
+
+  Stage: Stage-3
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-0
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-8
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: a
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            Filter Operator
+              predicate: key is not null (type: boolean)
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+          TableScan
+            alias: a
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            Filter Operator
+              predicate: key is not null (type: boolean)
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          keys:
+            0 _col0 (type: string)
+            1 _col0 (type: string)
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.q1
+
+  Stage: Stage-4
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.q1
+
+  Stage: Stage-5
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-7
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/results/clientpositive/cte_mat_4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/cte_mat_4.q.out b/ql/src/test/results/clientpositive/cte_mat_4.q.out
new file mode 100644
index 0000000..048e4b6
--- /dev/null
+++ b/ql/src/test/results/clientpositive/cte_mat_4.q.out
@@ -0,0 +1,477 @@
+PREHOOK: query: create temporary table q1 (a int, b string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@q1
+POSTHOOK: query: create temporary table q1 (a int, b string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@q1
+PREHOOK: query: insert into q1 values (1, 'A')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@q1
+POSTHOOK: query: insert into q1 values (1, 'A')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@q1
+POSTHOOK: Lineage: q1.a EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: q1.b SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: show tables
+PREHOOK: type: SHOWTABLES
+PREHOOK: Input: database:default
+POSTHOOK: query: show tables
+POSTHOOK: type: SHOWTABLES
+POSTHOOK: Input: database:default
+alltypesorc
+cbo_t1
+cbo_t2
+cbo_t3
+lineitem
+part
+q1
+src
+src1
+src_cbo
+src_json
+src_sequencefile
+src_thrift
+srcbucket
+srcbucket2
+srcpart
+values__tmp__table__1
+PREHOOK: query: explain
+with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-6 depends on stages: Stage-1 , consists of Stage-3, Stage-2, Stage-4
+  Stage-3
+  Stage-0 depends on stages: Stage-3, Stage-2, Stage-5
+  Stage-8 depends on stages: Stage-0
+  Stage-2
+  Stage-4
+  Stage-5 depends on stages: Stage-4
+  Stage-7 depends on stages: Stage-8
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key = '5') (type: boolean)
+              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: '5' (type: string), value (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.q1
+
+  Stage: Stage-6
+    Conditional Operator
+
+  Stage: Stage-3
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-0
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-8
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: a
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            Filter Operator
+              predicate: key is not null (type: boolean)
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+          TableScan
+            alias: a
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            Filter Operator
+              predicate: key is not null (type: boolean)
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          keys:
+            0 _col0 (type: string)
+            1 _col0 (type: string)
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.q1
+
+  Stage: Stage-4
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.q1
+
+  Stage: Stage-5
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-7
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@q1
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@q1
+#### A masked pattern was here ####
+POSTHOOK: query: with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@q1
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@q1
+#### A masked pattern was here ####
+5
+5
+5
+5
+5
+5
+5
+5
+5
+PREHOOK: query: show tables
+PREHOOK: type: SHOWTABLES
+PREHOOK: Input: database:default
+POSTHOOK: query: show tables
+POSTHOOK: type: SHOWTABLES
+POSTHOOK: Input: database:default
+alltypesorc
+cbo_t1
+cbo_t2
+cbo_t3
+lineitem
+part
+q1
+src
+src1
+src_cbo
+src_json
+src_sequencefile
+src_thrift
+srcbucket
+srcbucket2
+srcpart
+values__tmp__table__1
+PREHOOK: query: select * from q1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@q1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from q1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@q1
+#### A masked pattern was here ####
+1	A
+PREHOOK: query: drop table q1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@q1
+PREHOOK: Output: default@q1
+POSTHOOK: query: drop table q1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@q1
+POSTHOOK: Output: default@q1
+PREHOOK: query: show tables
+PREHOOK: type: SHOWTABLES
+PREHOOK: Input: database:default
+POSTHOOK: query: show tables
+POSTHOOK: type: SHOWTABLES
+POSTHOOK: Input: database:default
+alltypesorc
+cbo_t1
+cbo_t2
+cbo_t3
+lineitem
+part
+src
+src1
+src_cbo
+src_json
+src_sequencefile
+src_thrift
+srcbucket
+srcbucket2
+srcpart
+values__tmp__table__1
+PREHOOK: query: explain
+with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-6 depends on stages: Stage-1 , consists of Stage-3, Stage-2, Stage-4
+  Stage-3
+  Stage-0 depends on stages: Stage-3, Stage-2, Stage-5
+  Stage-8 depends on stages: Stage-0
+  Stage-2
+  Stage-4
+  Stage-5 depends on stages: Stage-4
+  Stage-7 depends on stages: Stage-8
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key = '5') (type: boolean)
+              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: '5' (type: string), value (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.q1
+
+  Stage: Stage-6
+    Conditional Operator
+
+  Stage: Stage-3
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-0
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-8
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: a
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            Filter Operator
+              predicate: key is not null (type: boolean)
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+          TableScan
+            alias: a
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            Filter Operator
+              predicate: key is not null (type: boolean)
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          keys:
+            0 _col0 (type: string)
+            1 _col0 (type: string)
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.q1
+
+  Stage: Stage-4
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.q1
+
+  Stage: Stage-5
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-7
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@q1
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@q1
+#### A masked pattern was here ####
+POSTHOOK: query: with q1 as (select * from src where key= '5')
+select a.key
+from q1 a join q1 b
+on a.key=b.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@q1
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@q1
+#### A masked pattern was here ####
+5
+5
+5
+5
+5
+5
+5
+5
+5
+PREHOOK: query: show tables
+PREHOOK: type: SHOWTABLES
+PREHOOK: Input: database:default
+POSTHOOK: query: show tables
+POSTHOOK: type: SHOWTABLES
+POSTHOOK: Input: database:default
+alltypesorc
+cbo_t1
+cbo_t2
+cbo_t3
+lineitem
+part
+src
+src1
+src_cbo
+src_json
+src_sequencefile
+src_thrift
+srcbucket
+srcbucket2
+srcpart
+values__tmp__table__1

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/results/clientpositive/cte_mat_5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/cte_mat_5.q.out b/ql/src/test/results/clientpositive/cte_mat_5.q.out
new file mode 100644
index 0000000..b7b34ba
--- /dev/null
+++ b/ql/src/test/results/clientpositive/cte_mat_5.q.out
@@ -0,0 +1,238 @@
+PREHOOK: query: create database mydb
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:mydb
+POSTHOOK: query: create database mydb
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:mydb
+PREHOOK: query: use mydb
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:mydb
+POSTHOOK: query: use mydb
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:mydb
+PREHOOK: query: create table q1 (colnum int, colstring string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:mydb
+PREHOOK: Output: mydb@q1
+POSTHOOK: query: create table q1 (colnum int, colstring string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:mydb
+POSTHOOK: Output: mydb@q1
+PREHOOK: query: insert into q1 values (5, 'A')
+PREHOOK: type: QUERY
+PREHOOK: Input: mydb@values__tmp__table__1
+PREHOOK: Output: mydb@q1
+POSTHOOK: query: insert into q1 values (5, 'A')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: mydb@values__tmp__table__1
+POSTHOOK: Output: mydb@q1
+POSTHOOK: Lineage: q1.colnum EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: q1.colstring SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: use default
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:default
+POSTHOOK: query: use default
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:default
+PREHOOK: query: show tables in mydb
+PREHOOK: type: SHOWTABLES
+PREHOOK: Input: database:mydb
+POSTHOOK: query: show tables in mydb
+POSTHOOK: type: SHOWTABLES
+POSTHOOK: Input: database:mydb
+q1
+values__tmp__table__1
+PREHOOK: query: show tables
+PREHOOK: type: SHOWTABLES
+PREHOOK: Input: database:default
+POSTHOOK: query: show tables
+POSTHOOK: type: SHOWTABLES
+POSTHOOK: Input: database:default
+alltypesorc
+cbo_t1
+cbo_t2
+cbo_t3
+lineitem
+part
+src
+src1
+src_cbo
+src_json
+src_sequencefile
+src_thrift
+srcbucket
+srcbucket2
+srcpart
+PREHOOK: query: explain
+with q1 as (select * from src where key= '5')
+select a.colnum
+from mydb.q1 as a join q1 as b
+on a.colnum=b.key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+with q1 as (select * from src where key= '5')
+select a.colnum
+from mydb.q1 as a join q1 as b
+on a.colnum=b.key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-6 depends on stages: Stage-1 , consists of Stage-3, Stage-2, Stage-4
+  Stage-3
+  Stage-0 depends on stages: Stage-3, Stage-2, Stage-5
+  Stage-8 depends on stages: Stage-0
+  Stage-2
+  Stage-4
+  Stage-5 depends on stages: Stage-4
+  Stage-7 depends on stages: Stage-8
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key = '5') (type: boolean)
+              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: '5' (type: string), value (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.q1
+
+  Stage: Stage-6
+    Conditional Operator
+
+  Stage: Stage-3
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-0
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-8
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: a
+            Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: colnum is not null (type: boolean)
+              Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: colnum (type: int)
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: UDFToDouble(_col0) (type: double)
+                  sort order: +
+                  Map-reduce partition columns: UDFToDouble(_col0) (type: double)
+                  Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: int)
+          TableScan
+            alias: b
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+            Filter Operator
+              predicate: key is not null (type: boolean)
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+                Reduce Output Operator
+                  key expressions: UDFToDouble(_col0) (type: double)
+                  sort order: +
+                  Map-reduce partition columns: UDFToDouble(_col0) (type: double)
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          keys:
+            0 UDFToDouble(_col0) (type: double)
+            1 UDFToDouble(_col0) (type: double)
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.q1
+
+  Stage: Stage-4
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.q1
+
+  Stage: Stage-5
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-7
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: with q1 as (select * from src where key= '5')
+select a.colnum
+from mydb.q1 as a join q1 as b
+on a.colnum=b.key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@q1
+PREHOOK: Input: default@src
+PREHOOK: Input: mydb@q1
+PREHOOK: Output: database:default
+PREHOOK: Output: default@q1
+#### A masked pattern was here ####
+POSTHOOK: query: with q1 as (select * from src where key= '5')
+select a.colnum
+from mydb.q1 as a join q1 as b
+on a.colnum=b.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@q1
+POSTHOOK: Input: default@src
+POSTHOOK: Input: mydb@q1
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@q1
+#### A masked pattern was here ####
+5
+5
+5

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/results/clientpositive/llap/cte_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/cte_1.q.out b/ql/src/test/results/clientpositive/llap/cte_1.q.out
new file mode 100644
index 0000000..41ffdc7
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/cte_1.q.out
@@ -0,0 +1,126 @@
+PREHOOK: query: explain
+with q1 as ( select key from src where key = '5')
+select *
+from q1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+with q1 as ( select key from src where key = '5')
+select *
+from q1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: src
+          Filter Operator
+            predicate: (key = '5') (type: boolean)
+            Select Operator
+              expressions: '5' (type: string)
+              outputColumnNames: _col0
+              ListSink
+
+PREHOOK: query: with q1 as ( select key from src where key = '5')
+select *
+from q1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: with q1 as ( select key from src where key = '5')
+select *
+from q1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+5
+5
+5
+PREHOOK: query: -- in subquery
+explain
+with q1 as ( select key from src where key = '5')
+select * from (select key from q1) a
+PREHOOK: type: QUERY
+POSTHOOK: query: -- in subquery
+explain
+with q1 as ( select key from src where key = '5')
+select * from (select key from q1) a
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: src
+          Filter Operator
+            predicate: (key = '5') (type: boolean)
+            Select Operator
+              expressions: '5' (type: string)
+              outputColumnNames: _col0
+              ListSink
+
+PREHOOK: query: with q1 as ( select key from src where key = '5')
+select * from (select key from q1) a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: with q1 as ( select key from src where key = '5')
+select * from (select key from q1) a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+5
+5
+5
+PREHOOK: query: -- chaining
+explain
+with q1 as ( select key from q2 where key = '5'),
+q2 as ( select key from src where key = '5')
+select * from (select key from q1) a
+PREHOOK: type: QUERY
+POSTHOOK: query: -- chaining
+explain
+with q1 as ( select key from q2 where key = '5'),
+q2 as ( select key from src where key = '5')
+select * from (select key from q1) a
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: src
+          Filter Operator
+            predicate: (key = '5') (type: boolean)
+            Select Operator
+              expressions: '5' (type: string)
+              outputColumnNames: _col0
+              ListSink
+
+PREHOOK: query: with q1 as ( select key from q2 where key = '5'),
+q2 as ( select key from src where key = '5')
+select * from (select key from q1) a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: with q1 as ( select key from q2 where key = '5'),
+q2 as ( select key from src where key = '5')
+select * from (select key from q1) a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+5
+5
+5

http://git-wip-us.apache.org/repos/asf/hive/blob/dca4233d/ql/src/test/results/clientpositive/llap/cte_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/cte_2.q.out b/ql/src/test/results/clientpositive/llap/cte_2.q.out
new file mode 100644
index 0000000..23f8ec6
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/cte_2.q.out
@@ -0,0 +1,189 @@
+PREHOOK: query: -- union test
+with q1 as (select * from src where key= '5'),
+q2 as (select * from src s2 where key = '4')
+select * from q1 union all select * from q2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- union test
+with q1 as (select * from src where key= '5'),
+q2 as (select * from src s2 where key = '4')
+select * from q1 union all select * from q2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+5	val_5
+5	val_5
+5	val_5
+4	val_4
+PREHOOK: query: -- insert test
+create table s1 like src
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@s1
+POSTHOOK: query: -- insert test
+create table s1 like src
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@s1
+PREHOOK: query: with q1 as ( select key, value from src where key = '5')
+from q1
+insert overwrite table s1
+select *
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@s1
+POSTHOOK: query: with q1 as ( select key, value from src where key = '5')
+from q1
+insert overwrite table s1
+select *
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@s1
+POSTHOOK: Lineage: s1.key SIMPLE []
+POSTHOOK: Lineage: s1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: select * from s1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@s1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from s1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@s1
+#### A masked pattern was here ####
+5	val_5
+5	val_5
+5	val_5
+PREHOOK: query: drop table s1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@s1
+PREHOOK: Output: default@s1
+POSTHOOK: query: drop table s1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@s1
+POSTHOOK: Output: default@s1
+PREHOOK: query: -- from style
+with q1 as (select * from src where key= '5')
+from q1
+select *
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- from style
+with q1 as (select * from src where key= '5')
+from q1
+select *
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+5	val_5
+5	val_5
+5	val_5
+PREHOOK: query: -- ctas
+create table s2 as
+with q1 as ( select key from src where key = '4')
+select * from q1
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@s2
+POSTHOOK: query: -- ctas
+create table s2 as
+with q1 as ( select key from src where key = '4')
+select * from q1
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@s2
+PREHOOK: query: select * from s2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@s2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from s2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@s2
+#### A masked pattern was here ####
+4
+PREHOOK: query: drop table s2
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@s2
+PREHOOK: Output: default@s2
+POSTHOOK: query: drop table s2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@s2
+POSTHOOK: Output: default@s2
+PREHOOK: query: -- view test
+create view v1 as
+with q1 as ( select key from src where key = '5')
+select * from q1
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@v1
+POSTHOOK: query: -- view test
+create view v1 as
+with q1 as ( select key from src where key = '5')
+select * from q1
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@v1
+PREHOOK: query: select * from v1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@v1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from v1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@v1
+#### A masked pattern was here ####
+5
+5
+5
+PREHOOK: query: drop view v1
+PREHOOK: type: DROPVIEW
+PREHOOK: Input: default@v1
+PREHOOK: Output: default@v1
+POSTHOOK: query: drop view v1
+POSTHOOK: type: DROPVIEW
+POSTHOOK: Input: default@v1
+POSTHOOK: Output: default@v1
+PREHOOK: query: -- view test, name collision
+create view v1 as
+with q1 as ( select key from src where key = '5')
+select * from q1
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@v1
+POSTHOOK: query: -- view test, name collision
+create view v1 as
+with q1 as ( select key from src where key = '5')
+select * from q1
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@v1
+PREHOOK: query: with q1 as ( select key from src where key = '4')
+select * from v1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@v1
+#### A masked pattern was here ####
+POSTHOOK: query: with q1 as ( select key from src where key = '4')
+select * from v1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@v1
+#### A masked pattern was here ####
+5
+5
+5
+PREHOOK: query: drop view v1
+PREHOOK: type: DROPVIEW
+PREHOOK: Input: default@v1
+PREHOOK: Output: default@v1
+POSTHOOK: query: drop view v1
+POSTHOOK: type: DROPVIEW
+POSTHOOK: Input: default@v1
+POSTHOOK: Output: default@v1


Mime
View raw message