hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kgyrtk...@apache.org
Subject [23/54] [partial] hive git commit: HIVE-17550: Remove unreferenced q.out-s (Zoltan Haindrich, reviewed by Ashutosh Chauhan)
Date Thu, 21 Sep 2017 09:07:00 GMT
http://git-wip-us.apache.org/repos/asf/hive/blob/6f5c1135/ql/src/test/results/clientpositive/column_access_stats.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/column_access_stats.q.out b/ql/src/test/results/clientpositive/column_access_stats.q.out
deleted file mode 100644
index dd8ade2..0000000
--- a/ql/src/test/results/clientpositive/column_access_stats.q.out
+++ /dev/null
@@ -1,869 +0,0 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
--- This test is used for testing the ColumnAccessAnalyzer
-
-CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@T1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@t1
-PREHOOK: query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@T2
-PREHOOK: query: CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@T3
-PREHOOK: query: CREATE TABLE T4(key STRING, val STRING) PARTITIONED BY (p STRING)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@T4
-PREHOOK: query: -- Simple select queries
-SELECT key FROM T1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t1
-#### A masked pattern was here ####
-Table:default@t1
-Columns:key
-
-1
-2
-3
-7
-8
-8
-PREHOOK: query: SELECT key, val FROM T1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t1
-#### A masked pattern was here ####
-Table:default@t1
-Columns:key,val
-
-1	11
-2	12
-3	13
-7	17
-8	18
-8	28
-PREHOOK: query: SELECT 1 FROM T1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t1
-#### A masked pattern was here ####
-1
-1
-1
-1
-1
-1
-PREHOOK: query: SELECT key, val from T4 where p=1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t4
-#### A masked pattern was here ####
-Table:default@t4
-Columns:key,p,val
-
-PREHOOK: query: SELECT val FROM T4 where p=1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t4
-#### A masked pattern was here ####
-Table:default@t4
-Columns:p,val
-
-PREHOOK: query: SELECT p, val FROM T4 where p=1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t4
-#### A masked pattern was here ####
-Table:default@t4
-Columns:p,val
-
-PREHOOK: query: -- More complicated select queries
-EXPLAIN SELECT key FROM (SELECT key, val FROM T1) subq1
-PREHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        TableScan
-          alias: t1
-          Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-          Select Operator
-            expressions: key (type: string)
-            outputColumnNames: _col0
-            Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-            ListSink
-
-PREHOOK: query: SELECT key FROM (SELECT key, val FROM T1) subq1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t1
-#### A masked pattern was here ####
-Table:default@t1
-Columns:key
-
-1
-2
-3
-7
-8
-8
-PREHOOK: query: EXPLAIN SELECT k FROM (SELECT key as k, val as v FROM T1) subq1
-PREHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        TableScan
-          alias: t1
-          Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-          Select Operator
-            expressions: key (type: string)
-            outputColumnNames: _col0
-            Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-            ListSink
-
-PREHOOK: query: SELECT k FROM (SELECT key as k, val as v FROM T1) subq1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t1
-#### A masked pattern was here ####
-Table:default@t1
-Columns:key
-
-1
-2
-3
-7
-8
-8
-PREHOOK: query: SELECT key + 1 as k FROM T1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t1
-#### A masked pattern was here ####
-Table:default@t1
-Columns:key
-
-2.0
-3.0
-4.0
-8.0
-9.0
-9.0
-PREHOOK: query: SELECT key + val as k FROM T1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t1
-#### A masked pattern was here ####
-Table:default@t1
-Columns:key,val
-
-12.0
-14.0
-16.0
-24.0
-26.0
-36.0
-PREHOOK: query: -- Work with union
-EXPLAIN
-SELECT * FROM (
-SELECT key as c FROM T1
- UNION ALL
-SELECT val as c FROM T1
-) subq1
-PREHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: t1
-            Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: key (type: string)
-              outputColumnNames: _col0
-              Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-              Union
-                Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          TableScan
-            alias: t1
-            Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: val (type: string)
-              outputColumnNames: _col0
-              Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-              Union
-                Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: SELECT * FROM (
-SELECT key as c FROM T1
- UNION ALL
-SELECT val as c FROM T1
-) subq1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t1
-#### A masked pattern was here ####
-Table:default@t1
-Columns:key,val
-
-1
-11
-12
-13
-17
-18
-2
-28
-3
-7
-8
-8
-PREHOOK: query: EXPLAIN
-SELECT * FROM (
-SELECT key as c FROM T1
- UNION ALL
-SELECT key as c FROM T1
-) subq1
-PREHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: t1
-            Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: key (type: string)
-              outputColumnNames: _col0
-              Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-              Union
-                Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          TableScan
-            alias: t1
-            Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: key (type: string)
-              outputColumnNames: _col0
-              Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-              Union
-                Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 2 Data size: 60 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: SELECT * FROM (
-SELECT key as c FROM T1
- UNION ALL
-SELECT key as c FROM T1
-) subq1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t1
-#### A masked pattern was here ####
-Table:default@t1
-Columns:key
-
-1
-1
-2
-2
-3
-3
-7
-7
-8
-8
-8
-8
-PREHOOK: query: -- Work with insert overwrite
-FROM T1
-INSERT OVERWRITE TABLE T2 SELECT key, count(1) GROUP BY key
-INSERT OVERWRITE TABLE T3 SELECT key, sum(val) GROUP BY key
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t1
-PREHOOK: Output: default@t2
-PREHOOK: Output: default@t3
-Table:default@t1
-Columns:key,val
-
-PREHOOK: query: -- Simple joins
-SELECT *
-FROM T1 JOIN T2
-ON T1.key = T2.key
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t1
-PREHOOK: Input: default@t2
-#### A masked pattern was here ####
-Table:default@t1
-Columns:key,val
-
-Table:default@t2
-Columns:key,val
-
-1	11	1	1
-2	12	2	1
-3	13	3	1
-7	17	7	1
-8	18	8	2
-8	28	8	2
-PREHOOK: query: EXPLAIN
-SELECT T1.key
-FROM T1 JOIN T2
-ON T1.key = T2.key
-PREHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: t1
-            Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: key is not null (type: boolean)
-              Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-          TableScan
-            alias: t2
-            Statistics: Num rows: 5 Data size: 15 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: key is not null (type: boolean)
-              Statistics: Num rows: 5 Data size: 15 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 5 Data size: 15 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 5 Data size: 15 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Inner Join 0 to 1
-          keys:
-            0 _col0 (type: string)
-            1 _col0 (type: string)
-          outputColumnNames: _col0
-          Statistics: Num rows: 5 Data size: 16 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 5 Data size: 16 Basic stats: COMPLETE Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: SELECT T1.key
-FROM T1 JOIN T2
-ON T1.key = T2.key
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t1
-PREHOOK: Input: default@t2
-#### A masked pattern was here ####
-Table:default@t1
-Columns:key
-
-Table:default@t2
-Columns:key
-
-1
-2
-3
-7
-8
-8
-PREHOOK: query: SELECT *
-FROM T1 JOIN T2
-ON T1.key = T2.key AND T1.val = T2.val
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t1
-PREHOOK: Input: default@t2
-#### A masked pattern was here ####
-Table:default@t1
-Columns:key,val
-
-Table:default@t2
-Columns:key,val
-
-PREHOOK: query: -- Map join
-SELECT /*+ MAPJOIN(a) */ * 
-FROM T1 a JOIN T2 b 
-ON a.key = b.key
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t1
-PREHOOK: Input: default@t2
-#### A masked pattern was here ####
-Table:default@t1
-Columns:key,val
-
-Table:default@t2
-Columns:key,val
-
-1	11	1	1
-2	12	2	1
-3	13	3	1
-7	17	7	1
-8	18	8	2
-8	28	8	2
-PREHOOK: query: -- More joins
-EXPLAIN
-SELECT *
-FROM T1 JOIN T2
-ON T1.key = T2.key AND T1.val = 3 and T2.val = 3
-PREHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: t1
-            Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: ((UDFToDouble(val) = 3.0) and key is not null) (type: boolean)
-              Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), val (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: string)
-          TableScan
-            alias: t2
-            Statistics: Num rows: 5 Data size: 15 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: ((UDFToDouble(val) = 3.0) and key is not null) (type: boolean)
-              Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), val (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: string)
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Inner Join 0 to 1
-          keys:
-            0 _col0 (type: string)
-            1 _col0 (type: string)
-          outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: SELECT *
-FROM T1 JOIN T2
-ON T1.key = T2.key AND T1.val = 3 and T2.val = 3
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t1
-PREHOOK: Input: default@t2
-#### A masked pattern was here ####
-Table:default@t1
-Columns:key,val
-
-Table:default@t2
-Columns:key,val
-
-PREHOOK: query: EXPLAIN
-SELECT subq1.val
-FROM 
-(
-  SELECT val FROM T1 WHERE key = 5  
-) subq1
-JOIN 
-(
-  SELECT val FROM T2 WHERE key = 6
-) subq2 
-ON subq1.val = subq2.val
-PREHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: t1
-            Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: ((UDFToDouble(key) = 5.0) and val is not null) (type: boolean)
-              Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: val (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-          TableScan
-            alias: t2
-            Statistics: Num rows: 5 Data size: 15 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: ((UDFToDouble(key) = 6.0) and val is not null) (type: boolean)
-              Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: val (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Inner Join 0 to 1
-          keys:
-            0 _col0 (type: string)
-            1 _col0 (type: string)
-          outputColumnNames: _col0
-          Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: SELECT subq1.val
-FROM 
-(
-  SELECT val FROM T1 WHERE key = 5  
-) subq1
-JOIN 
-(
-  SELECT val FROM T2 WHERE key = 6
-) subq2 
-ON subq1.val = subq2.val
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t1
-PREHOOK: Input: default@t2
-#### A masked pattern was here ####
-Table:default@t1
-Columns:key,val
-
-Table:default@t2
-Columns:key,val
-
-PREHOOK: query: -- Join followed by join
-EXPLAIN
-SELECT *
-FROM
-(
-  SELECT subq1.key as key
-  FROM
-  (
-    SELECT key, val FROM T1
-  ) subq1
-  JOIN
-  (
-    SELECT key, 'teststring' as val FROM T2
-  ) subq2
-  ON subq1.key = subq2.key
-) T4
-JOIN T3
-ON T3.key = T4.key
-PREHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: t1
-            Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: key is not null (type: boolean)
-              Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-          TableScan
-            alias: t2
-            Statistics: Num rows: 5 Data size: 15 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: key is not null (type: boolean)
-              Statistics: Num rows: 5 Data size: 15 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 5 Data size: 15 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 5 Data size: 15 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Inner Join 0 to 1
-          keys:
-            0 _col0 (type: string)
-            1 _col0 (type: string)
-          outputColumnNames: _col0
-          Statistics: Num rows: 5 Data size: 16 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string)
-              sort order: +
-              Map-reduce partition columns: _col0 (type: string)
-              Statistics: Num rows: 5 Data size: 16 Basic stats: COMPLETE Column stats: NONE
-          TableScan
-            alias: t3
-            Statistics: Num rows: 5 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: key is not null (type: boolean)
-              Statistics: Num rows: 5 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), val (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 5 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 5 Data size: 30 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: string)
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Inner Join 0 to 1
-          keys:
-            0 _col0 (type: string)
-            1 _col0 (type: string)
-          outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 5 Data size: 17 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 5 Data size: 17 Basic stats: COMPLETE Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: SELECT *
-FROM
-(
-  SELECT subq1.key as key
-  FROM
-  (
-    SELECT key, val FROM T1
-  ) subq1
-  JOIN
-  (
-    SELECT key, 'teststring' as val FROM T2
-  ) subq2
-  ON subq1.key = subq2.key
-) T4
-JOIN T3
-ON T3.key = T4.key
-PREHOOK: type: QUERY
-PREHOOK: Input: default@t1
-PREHOOK: Input: default@t2
-PREHOOK: Input: default@t3
-#### A masked pattern was here ####
-Table:default@t1
-Columns:key
-
-Table:default@t2
-Columns:key
-
-Table:default@t3
-Columns:key,val
-
-1	1	11.0
-2	2	12.0
-3	3	13.0
-7	7	17.0
-8	8	46.0
-8	8	46.0
-PREHOOK: query: -- for partitioned table
-SELECT * FROM srcpart TABLESAMPLE (10 ROWS)
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-#### A masked pattern was here ####
-Table:default@srcpart
-Columns:ds,hr,key,value
-
-165	val_165	2008-04-08	11
-238	val_238	2008-04-08	11
-255	val_255	2008-04-08	11
-27	val_27	2008-04-08	11
-278	val_278	2008-04-08	11
-311	val_311	2008-04-08	11
-409	val_409	2008-04-08	11
-484	val_484	2008-04-08	11
-86	val_86	2008-04-08	11
-98	val_98	2008-04-08	11
-PREHOOK: query: SELECT key,ds FROM srcpart TABLESAMPLE (10 ROWS) WHERE hr='11'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
-#### A masked pattern was here ####
-Table:default@srcpart
-Columns:ds,hr,key
-
-165	2008-04-08
-238	2008-04-08
-255	2008-04-08
-27	2008-04-08
-278	2008-04-08
-311	2008-04-08
-409	2008-04-08
-484	2008-04-08
-86	2008-04-08
-98	2008-04-08
-PREHOOK: query: SELECT value FROM srcpart TABLESAMPLE (10 ROWS) WHERE ds='2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-Table:default@srcpart
-Columns:ds,value
-
-val_165
-val_238
-val_255
-val_27
-val_278
-val_311
-val_409
-val_484
-val_86
-val_98

http://git-wip-us.apache.org/repos/asf/hive/blob/6f5c1135/ql/src/test/results/clientpositive/columnstats_part_coltype.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/columnstats_part_coltype.q.out b/ql/src/test/results/clientpositive/columnstats_part_coltype.q.out
deleted file mode 100644
index d52f020..0000000
--- a/ql/src/test/results/clientpositive/columnstats_part_coltype.q.out
+++ /dev/null
@@ -1,441 +0,0 @@
-PREHOOK: query: -- Test type date, int, and string in partition column
-drop table if exists partcolstats
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- Test type date, int, and string in partition column
-drop table if exists partcolstats
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table partcolstats (key int, value string) partitioned by (ds date, hr int, part string)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@partcolstats
-POSTHOOK: query: create table partcolstats (key int, value string) partitioned by (ds date, hr int, part string)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@partcolstats
-PREHOOK: query: insert into partcolstats partition (ds=date '2015-04-02', hr=2, part='partA') select key, value from src limit 20
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@partcolstats@ds=2015-04-02/hr=2/part=partA
-POSTHOOK: query: insert into partcolstats partition (ds=date '2015-04-02', hr=2, part='partA') select key, value from src limit 20
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partcolstats@ds=2015-04-02/hr=2/part=partA
-POSTHOOK: Lineage: partcolstats PARTITION(ds=2015-04-02,hr=2,part=partA).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partcolstats PARTITION(ds=2015-04-02,hr=2,part=partA).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: insert into partcolstats partition (ds=date '2015-04-02', hr=2, part='partB') select key, value from src limit 20
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@partcolstats@ds=2015-04-02/hr=2/part=partB
-POSTHOOK: query: insert into partcolstats partition (ds=date '2015-04-02', hr=2, part='partB') select key, value from src limit 20
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partcolstats@ds=2015-04-02/hr=2/part=partB
-POSTHOOK: Lineage: partcolstats PARTITION(ds=2015-04-02,hr=2,part=partB).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partcolstats PARTITION(ds=2015-04-02,hr=2,part=partB).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: insert into partcolstats partition (ds=date '2015-04-02', hr=3, part='partA') select key, value from src limit 30
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@partcolstats@ds=2015-04-02/hr=3/part=partA
-POSTHOOK: query: insert into partcolstats partition (ds=date '2015-04-02', hr=3, part='partA') select key, value from src limit 30
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partcolstats@ds=2015-04-02/hr=3/part=partA
-POSTHOOK: Lineage: partcolstats PARTITION(ds=2015-04-02,hr=3,part=partA).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partcolstats PARTITION(ds=2015-04-02,hr=3,part=partA).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: insert into partcolstats partition (ds=date '2015-04-03', hr=3, part='partA') select key, value from src limit 40
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@partcolstats@ds=2015-04-03/hr=3/part=partA
-POSTHOOK: query: insert into partcolstats partition (ds=date '2015-04-03', hr=3, part='partA') select key, value from src limit 40
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partcolstats@ds=2015-04-03/hr=3/part=partA
-POSTHOOK: Lineage: partcolstats PARTITION(ds=2015-04-03,hr=3,part=partA).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partcolstats PARTITION(ds=2015-04-03,hr=3,part=partA).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: insert into partcolstats partition (ds=date '2015-04-03', hr=3, part='partB') select key, value from src limit 60
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@partcolstats@ds=2015-04-03/hr=3/part=partB
-POSTHOOK: query: insert into partcolstats partition (ds=date '2015-04-03', hr=3, part='partB') select key, value from src limit 60
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partcolstats@ds=2015-04-03/hr=3/part=partB
-POSTHOOK: Lineage: partcolstats PARTITION(ds=2015-04-03,hr=3,part=partB).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partcolstats PARTITION(ds=2015-04-03,hr=3,part=partB).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: analyze table partcolstats partition (ds=date '2015-04-02', hr=2, part='partA') compute statistics for columns
-PREHOOK: type: QUERY
-PREHOOK: Input: default@partcolstats
-PREHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partA
-#### A masked pattern was here ####
-POSTHOOK: query: analyze table partcolstats partition (ds=date '2015-04-02', hr=2, part='partA') compute statistics for columns
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@partcolstats
-POSTHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partA
-#### A masked pattern was here ####
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partA') key
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partA') key
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	min                 	max                 	num_nulls           	distinct_count      	avg_col_len         	max_col_len         	num_trues           	num_falses          	comment             
-	 	 	 	 	 	 	 	 	 	 
-key                 	int                 	27                  	484                 	0                   	18                  	                    	                    	                    	                    	from deserializer   
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partA') value
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partA') value
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	min                 	max                 	num_nulls           	distinct_count      	avg_col_len         	max_col_len         	num_trues           	num_falses          	comment             
-	 	 	 	 	 	 	 	 	 	 
-value               	string              	                    	                    	0                   	18                  	6.8                 	7                   	                    	                    	from deserializer   
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partB') key
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partB') key
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 
-	 	 	 	 	 	 	 	 	 	 
-key                 	int                 	from deserializer   	 	 	 	 	 	 	 	 
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partB') value
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partB') value
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 
-	 	 	 	 	 	 	 	 	 	 
-value               	string              	from deserializer   	 	 	 	 	 	 	 	 
-PREHOOK: query: analyze table partcolstats partition (ds=date '2015-04-02', hr=2, part) compute statistics for columns
-PREHOOK: type: QUERY
-PREHOOK: Input: default@partcolstats
-PREHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partA
-PREHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partB
-#### A masked pattern was here ####
-POSTHOOK: query: analyze table partcolstats partition (ds=date '2015-04-02', hr=2, part) compute statistics for columns
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@partcolstats
-POSTHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partA
-POSTHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partB
-#### A masked pattern was here ####
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partB') key
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partB') key
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	min                 	max                 	num_nulls           	distinct_count      	avg_col_len         	max_col_len         	num_trues           	num_falses          	comment             
-	 	 	 	 	 	 	 	 	 	 
-key                 	int                 	27                  	484                 	0                   	18                  	                    	                    	                    	                    	from deserializer   
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partB') value
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=2, part='partB') value
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	min                 	max                 	num_nulls           	distinct_count      	avg_col_len         	max_col_len         	num_trues           	num_falses          	comment             
-	 	 	 	 	 	 	 	 	 	 
-value               	string              	                    	                    	0                   	18                  	6.8                 	7                   	                    	                    	from deserializer   
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=3, part='partA') key
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=3, part='partA') key
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 
-	 	 	 	 	 	 	 	 	 	 
-key                 	int                 	from deserializer   	 	 	 	 	 	 	 	 
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=3, part='partA') value
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=3, part='partA') value
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 
-	 	 	 	 	 	 	 	 	 	 
-value               	string              	from deserializer   	 	 	 	 	 	 	 	 
-PREHOOK: query: analyze table partcolstats partition (ds=date '2015-04-02', hr, part) compute statistics for columns
-PREHOOK: type: QUERY
-PREHOOK: Input: default@partcolstats
-PREHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partA
-PREHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partB
-PREHOOK: Input: default@partcolstats@ds=2015-04-02/hr=3/part=partA
-#### A masked pattern was here ####
-POSTHOOK: query: analyze table partcolstats partition (ds=date '2015-04-02', hr, part) compute statistics for columns
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@partcolstats
-POSTHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partA
-POSTHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partB
-POSTHOOK: Input: default@partcolstats@ds=2015-04-02/hr=3/part=partA
-#### A masked pattern was here ####
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=3, part='partA') key
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=3, part='partA') key
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	min                 	max                 	num_nulls           	distinct_count      	avg_col_len         	max_col_len         	num_trues           	num_falses          	comment             
-	 	 	 	 	 	 	 	 	 	 
-key                 	int                 	27                  	495                 	0                   	28                  	                    	                    	                    	                    	from deserializer   
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=3, part='partA') value
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-02', hr=3, part='partA') value
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	min                 	max                 	num_nulls           	distinct_count      	avg_col_len         	max_col_len         	num_trues           	num_falses          	comment             
-	 	 	 	 	 	 	 	 	 	 
-value               	string              	                    	                    	0                   	18                  	6.833333333333333   	7                   	                    	                    	from deserializer   
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partA') key
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partA') key
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 
-	 	 	 	 	 	 	 	 	 	 
-key                 	int                 	from deserializer   	 	 	 	 	 	 	 	 
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partA') value
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partA') value
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 
-	 	 	 	 	 	 	 	 	 	 
-value               	string              	from deserializer   	 	 	 	 	 	 	 	 
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partB') key
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partB') key
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 
-	 	 	 	 	 	 	 	 	 	 
-key                 	int                 	from deserializer   	 	 	 	 	 	 	 	 
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partB') value
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partB') value
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 
-	 	 	 	 	 	 	 	 	 	 
-value               	string              	from deserializer   	 	 	 	 	 	 	 	 
-PREHOOK: query: analyze table partcolstats partition (ds, hr, part) compute statistics for columns
-PREHOOK: type: QUERY
-PREHOOK: Input: default@partcolstats
-PREHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partA
-PREHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partB
-PREHOOK: Input: default@partcolstats@ds=2015-04-02/hr=3/part=partA
-PREHOOK: Input: default@partcolstats@ds=2015-04-03/hr=3/part=partA
-PREHOOK: Input: default@partcolstats@ds=2015-04-03/hr=3/part=partB
-#### A masked pattern was here ####
-POSTHOOK: query: analyze table partcolstats partition (ds, hr, part) compute statistics for columns
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@partcolstats
-POSTHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partA
-POSTHOOK: Input: default@partcolstats@ds=2015-04-02/hr=2/part=partB
-POSTHOOK: Input: default@partcolstats@ds=2015-04-02/hr=3/part=partA
-POSTHOOK: Input: default@partcolstats@ds=2015-04-03/hr=3/part=partA
-POSTHOOK: Input: default@partcolstats@ds=2015-04-03/hr=3/part=partB
-#### A masked pattern was here ####
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partA') key
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partA') key
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	min                 	max                 	num_nulls           	distinct_count      	avg_col_len         	max_col_len         	num_trues           	num_falses          	comment             
-	 	 	 	 	 	 	 	 	 	 
-key                 	int                 	15                  	495                 	0                   	43                  	                    	                    	                    	                    	from deserializer   
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partA') value
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partA') value
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	min                 	max                 	num_nulls           	distinct_count      	avg_col_len         	max_col_len         	num_trues           	num_falses          	comment             
-	 	 	 	 	 	 	 	 	 	 
-value               	string              	                    	                    	0                   	34                  	6.825               	7                   	                    	                    	from deserializer   
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partB') key
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partB') key
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	min                 	max                 	num_nulls           	distinct_count      	avg_col_len         	max_col_len         	num_trues           	num_falses          	comment             
-	 	 	 	 	 	 	 	 	 	 
-key                 	int                 	15                  	495                 	0                   	51                  	                    	                    	                    	                    	from deserializer   
-PREHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partB') value
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstats
-POSTHOOK: query: describe formatted partcolstats partition (ds=date '2015-04-03', hr=3, part='partB') value
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstats
-# col_name            	data_type           	min                 	max                 	num_nulls           	distinct_count      	avg_col_len         	max_col_len         	num_trues           	num_falses          	comment             
-	 	 	 	 	 	 	 	 	 	 
-value               	string              	                    	                    	0                   	53                  	6.883333333333334   	7                   	                    	                    	from deserializer   
-PREHOOK: query: drop table partcolstats
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@partcolstats
-PREHOOK: Output: default@partcolstats
-POSTHOOK: query: drop table partcolstats
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@partcolstats
-POSTHOOK: Output: default@partcolstats
-PREHOOK: query: -- Test type tinyint, smallint, and bigint in partition column
-drop table if exists partcolstatsnum
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- Test type tinyint, smallint, and bigint in partition column
-drop table if exists partcolstatsnum
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table partcolstatsnum (key int, value string) partitioned by (tint tinyint, sint smallint, bint bigint)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@partcolstatsnum
-POSTHOOK: query: create table partcolstatsnum (key int, value string) partitioned by (tint tinyint, sint smallint, bint bigint)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@partcolstatsnum
-PREHOOK: query: insert into partcolstatsnum partition (tint=100, sint=1000, bint=1000000) select key, value from src limit 30
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@partcolstatsnum@tint=100/sint=1000/bint=1000000
-POSTHOOK: query: insert into partcolstatsnum partition (tint=100, sint=1000, bint=1000000) select key, value from src limit 30
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partcolstatsnum@tint=100/sint=1000/bint=1000000
-POSTHOOK: Lineage: partcolstatsnum PARTITION(tint=100,sint=1000,bint=1000000).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partcolstatsnum PARTITION(tint=100,sint=1000,bint=1000000).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: analyze table partcolstatsnum partition (tint=100, sint=1000, bint=1000000) compute statistics for columns
-PREHOOK: type: QUERY
-PREHOOK: Input: default@partcolstatsnum
-PREHOOK: Input: default@partcolstatsnum@tint=100/sint=1000/bint=1000000
-#### A masked pattern was here ####
-POSTHOOK: query: analyze table partcolstatsnum partition (tint=100, sint=1000, bint=1000000) compute statistics for columns
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@partcolstatsnum
-POSTHOOK: Input: default@partcolstatsnum@tint=100/sint=1000/bint=1000000
-#### A masked pattern was here ####
-PREHOOK: query: describe formatted partcolstatsnum partition (tint=100, sint=1000, bint=1000000) value
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstatsnum
-POSTHOOK: query: describe formatted partcolstatsnum partition (tint=100, sint=1000, bint=1000000) value
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstatsnum
-# col_name            	data_type           	min                 	max                 	num_nulls           	distinct_count      	avg_col_len         	max_col_len         	num_trues           	num_falses          	comment             
-	 	 	 	 	 	 	 	 	 	 
-value               	string              	                    	                    	0                   	18                  	6.833333333333333   	7                   	                    	                    	from deserializer   
-PREHOOK: query: drop table partcolstatsnum
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@partcolstatsnum
-PREHOOK: Output: default@partcolstatsnum
-POSTHOOK: query: drop table partcolstatsnum
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@partcolstatsnum
-POSTHOOK: Output: default@partcolstatsnum
-PREHOOK: query: -- Test type decimal in partition column
-drop table if exists partcolstatsdec
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- Test type decimal in partition column
-drop table if exists partcolstatsdec
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table partcolstatsdec (key int, value string) partitioned by (decpart decimal(8,4))
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@partcolstatsdec
-POSTHOOK: query: create table partcolstatsdec (key int, value string) partitioned by (decpart decimal(8,4))
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@partcolstatsdec
-PREHOOK: query: insert into partcolstatsdec partition (decpart='1000.0001') select key, value from src limit 30
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@partcolstatsdec@decpart=1000.0001
-POSTHOOK: query: insert into partcolstatsdec partition (decpart='1000.0001') select key, value from src limit 30
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partcolstatsdec@decpart=1000.0001
-POSTHOOK: Lineage: partcolstatsdec PARTITION(decpart=1000.0001).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partcolstatsdec PARTITION(decpart=1000.0001).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: analyze table partcolstatsdec partition (decpart='1000.0001') compute statistics for columns
-PREHOOK: type: QUERY
-PREHOOK: Input: default@partcolstatsdec
-PREHOOK: Input: default@partcolstatsdec@decpart=1000.0001
-#### A masked pattern was here ####
-POSTHOOK: query: analyze table partcolstatsdec partition (decpart='1000.0001') compute statistics for columns
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@partcolstatsdec
-POSTHOOK: Input: default@partcolstatsdec@decpart=1000.0001
-#### A masked pattern was here ####
-PREHOOK: query: describe formatted partcolstatsdec partition (decpart='1000.0001') value
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstatsdec
-POSTHOOK: query: describe formatted partcolstatsdec partition (decpart='1000.0001') value
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstatsdec
-# col_name            	data_type           	min                 	max                 	num_nulls           	distinct_count      	avg_col_len         	max_col_len         	num_trues           	num_falses          	comment             
-	 	 	 	 	 	 	 	 	 	 
-value               	string              	                    	                    	0                   	18                  	6.833333333333333   	7                   	                    	                    	from deserializer   
-PREHOOK: query: drop table partcolstatsdec
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@partcolstatsdec
-PREHOOK: Output: default@partcolstatsdec
-POSTHOOK: query: drop table partcolstatsdec
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@partcolstatsdec
-POSTHOOK: Output: default@partcolstatsdec
-PREHOOK: query: -- Test type varchar and char in partition column
-drop table if exists partcolstatschar
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- Test type varchar and char in partition column
-drop table if exists partcolstatschar
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table partcolstatschar (key int, value string) partitioned by (varpart varchar(5), charpart char(3))
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@partcolstatschar
-POSTHOOK: query: create table partcolstatschar (key int, value string) partitioned by (varpart varchar(5), charpart char(3))
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@partcolstatschar
-PREHOOK: query: insert into partcolstatschar partition (varpart='part1', charpart='aaa') select key, value from src limit 30
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@partcolstatschar@varpart=part1/charpart=aaa
-POSTHOOK: query: insert into partcolstatschar partition (varpart='part1', charpart='aaa') select key, value from src limit 30
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@partcolstatschar@varpart=part1/charpart=aaa
-POSTHOOK: Lineage: partcolstatschar PARTITION(varpart=part1,charpart=aaa).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: partcolstatschar PARTITION(varpart=part1,charpart=aaa).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: analyze table partcolstatschar partition (varpart='part1', charpart='aaa') compute statistics for columns
-PREHOOK: type: QUERY
-PREHOOK: Input: default@partcolstatschar
-PREHOOK: Input: default@partcolstatschar@varpart=part1/charpart=aaa
-#### A masked pattern was here ####
-POSTHOOK: query: analyze table partcolstatschar partition (varpart='part1', charpart='aaa') compute statistics for columns
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@partcolstatschar
-POSTHOOK: Input: default@partcolstatschar@varpart=part1/charpart=aaa
-#### A masked pattern was here ####
-PREHOOK: query: describe formatted partcolstatschar partition (varpart='part1', charpart='aaa') value
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@partcolstatschar
-POSTHOOK: query: describe formatted partcolstatschar partition (varpart='part1', charpart='aaa') value
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@partcolstatschar
-# col_name            	data_type           	min                 	max                 	num_nulls           	distinct_count      	avg_col_len         	max_col_len         	num_trues           	num_falses          	comment             
-	 	 	 	 	 	 	 	 	 	 
-value               	string              	                    	                    	0                   	18                  	6.833333333333333   	7                   	                    	                    	from deserializer   
-PREHOOK: query: drop table partcolstatschar
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@partcolstatschar
-PREHOOK: Output: default@partcolstatschar
-POSTHOOK: query: drop table partcolstatschar
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@partcolstatschar
-POSTHOOK: Output: default@partcolstatschar


Mime
View raw message