hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1529037 - in /hive/trunk/ql/src/test: queries/clientpositive/stats_partscan_1.q queries/clientpositive/stats_partscan_1_23.q results/clientpositive/stats_partscan_1.q.out results/clientpositive/stats_partscan_1_23.q.out
Date Fri, 04 Oct 2013 00:17:48 GMT
Author: hashutosh
Date: Fri Oct  4 00:17:47 2013
New Revision: 1529037

URL: http://svn.apache.org/r1529037
Log:
HIVE-4690 : stats_partscan_1.q makes different result with different hadhoop.mr.rev (Navis
& Brock Noland via Ashutosh Chauhan)

Added:
    hive/trunk/ql/src/test/queries/clientpositive/stats_partscan_1_23.q
    hive/trunk/ql/src/test/results/clientpositive/stats_partscan_1_23.q.out
Modified:
    hive/trunk/ql/src/test/queries/clientpositive/stats_partscan_1.q
    hive/trunk/ql/src/test/results/clientpositive/stats_partscan_1.q.out

Modified: hive/trunk/ql/src/test/queries/clientpositive/stats_partscan_1.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/stats_partscan_1.q?rev=1529037&r1=1529036&r2=1529037&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/stats_partscan_1.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/stats_partscan_1.q Fri Oct  4 00:17:47 2013
@@ -7,6 +7,11 @@ set mapred.min.split.size.per.node=256;
 set mapred.min.split.size.per.rack=256;
 set mapred.max.split.size=256;
 
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+-- This test uses mapred.max.split.size/mapred.max.split.size for controlling
+-- number of input splits, which is not effective in hive 0.20.
+-- stats_partscan_1_23.q is the same test with this but has different result.
+
 -- test analyze table ... compute statistics partialscan
 
 -- 1. prepare data

Added: hive/trunk/ql/src/test/queries/clientpositive/stats_partscan_1_23.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/stats_partscan_1_23.q?rev=1529037&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/stats_partscan_1_23.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/stats_partscan_1_23.q Fri Oct  4 00:17:47
2013
@@ -0,0 +1,37 @@
+set datanucleus.cache.collections=false;
+set hive.stats.autogather=false;
+set hive.exec.dynamic.partition=true;
+set hive.exec.dynamic.partition.mode=nonstrict;
+set mapred.min.split.size=256;
+set mapred.min.split.size.per.node=256;
+set mapred.min.split.size.per.rack=256;
+set mapred.max.split.size=256;
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- This test uses mapred.max.split.size/mapred.max.split.size for controlling
+-- number of input splits.
+-- stats_partscan_1.q is the same test with this but has different result.
+
+-- test analyze table ... compute statistics partialscan
+
+-- 1. prepare data
+CREATE table analyze_srcpart_partial_scan (key STRING, value STRING)
+partitioned by (ds string, hr string)
+stored as rcfile;
+insert overwrite table analyze_srcpart_partial_scan partition (ds, hr) select * from srcpart
where ds is not null;
+describe formatted analyze_srcpart_partial_scan PARTITION(ds='2008-04-08',hr=11);
+
+set hive.stats.autogather=true;
+
+-- 2. partialscan
+explain
+analyze table analyze_srcpart_partial_scan PARTITION(ds='2008-04-08',hr=11) compute statistics
partialscan;
+analyze table analyze_srcpart_partial_scan PARTITION(ds='2008-04-08',hr=11) compute statistics
partialscan;
+
+-- 3. confirm result
+describe formatted analyze_srcpart_partial_scan PARTITION(ds='2008-04-08',hr=11);
+describe formatted analyze_srcpart_partial_scan PARTITION(ds='2008-04-09',hr=11);
+drop table analyze_srcpart_partial_scan;
+
+
+

Modified: hive/trunk/ql/src/test/results/clientpositive/stats_partscan_1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/stats_partscan_1.q.out?rev=1529037&r1=1529036&r2=1529037&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/stats_partscan_1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/stats_partscan_1.q.out Fri Oct  4 00:17:47
2013
@@ -1,11 +1,21 @@
-PREHOOK: query: -- test analyze table ... compute statistics partialscan
+PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+-- This test uses mapred.max.split.size/mapred.max.split.size for controlling
+-- number of input splits, which is not effective in hive 0.20.
+-- stats_partscan_1_23.q is the same test with this but has different result.
+
+-- test analyze table ... compute statistics partialscan
 
 -- 1. prepare data
 CREATE table analyze_srcpart_partial_scan (key STRING, value STRING)
 partitioned by (ds string, hr string)
 stored as rcfile
 PREHOOK: type: CREATETABLE
-POSTHOOK: query: -- test analyze table ... compute statistics partialscan
+POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+-- This test uses mapred.max.split.size/mapred.max.split.size for controlling
+-- number of input splits, which is not effective in hive 0.20.
+-- stats_partscan_1_23.q is the same test with this but has different result.
+
+-- test analyze table ... compute statistics partialscan
 
 -- 1. prepare data
 CREATE table analyze_srcpart_partial_scan (key STRING, value STRING)

Added: hive/trunk/ql/src/test/results/clientpositive/stats_partscan_1_23.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/stats_partscan_1_23.q.out?rev=1529037&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/stats_partscan_1_23.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/stats_partscan_1_23.q.out Fri Oct  4 00:17:47
2013
@@ -0,0 +1,254 @@
+PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- This test uses mapred.max.split.size/mapred.max.split.size for controlling
+-- number of input splits.
+-- stats_partscan_1.q is the same test with this but has different result.
+
+-- test analyze table ... compute statistics partialscan
+
+-- 1. prepare data
+CREATE table analyze_srcpart_partial_scan (key STRING, value STRING)
+partitioned by (ds string, hr string)
+stored as rcfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- This test uses mapred.max.split.size/mapred.max.split.size for controlling
+-- number of input splits.
+-- stats_partscan_1.q is the same test with this but has different result.
+
+-- test analyze table ... compute statistics partialscan
+
+-- 1. prepare data
+CREATE table analyze_srcpart_partial_scan (key STRING, value STRING)
+partitioned by (ds string, hr string)
+stored as rcfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@analyze_srcpart_partial_scan
+PREHOOK: query: insert overwrite table analyze_srcpart_partial_scan partition (ds, hr) select
* from srcpart where ds is not null
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+PREHOOK: Output: default@analyze_srcpart_partial_scan
+POSTHOOK: query: insert overwrite table analyze_srcpart_partial_scan partition (ds, hr) select
* from srcpart where ds is not null
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+POSTHOOK: Output: default@analyze_srcpart_partial_scan@ds=2008-04-08/hr=11
+POSTHOOK: Output: default@analyze_srcpart_partial_scan@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@analyze_srcpart_partial_scan@ds=2008-04-09/hr=11
+POSTHOOK: Output: default@analyze_srcpart_partial_scan@ds=2008-04-09/hr=12
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=11).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=11).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=12).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=12).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=11).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=11).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=12).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=12).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: describe formatted analyze_srcpart_partial_scan PARTITION(ds='2008-04-08',hr=11)
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe formatted analyze_srcpart_partial_scan PARTITION(ds='2008-04-08',hr=11)
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=11).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=11).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=12).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=12).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=11).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=11).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=12).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=12).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	None                
+value               	string              	None                
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	None                
+hr                  	string              	None                
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	default             	 
+Table:              	analyze_srcpart_partial_scan	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: -- 2. partialscan
+explain
+analyze table analyze_srcpart_partial_scan PARTITION(ds='2008-04-08',hr=11) compute statistics
partialscan
+PREHOOK: type: QUERY
+POSTHOOK: query: -- 2. partialscan
+explain
+analyze table analyze_srcpart_partial_scan PARTITION(ds='2008-04-08',hr=11) compute statistics
partialscan
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=11).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=11).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=12).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=12).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=11).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=11).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=12).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=12).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+ABSTRACT SYNTAX TREE:
+  (TOK_ANALYZE (TOK_TAB (TOK_TABNAME analyze_srcpart_partial_scan) (TOK_PARTSPEC (TOK_PARTVAL
ds '2008-04-08') (TOK_PARTVAL hr 11))) partialscan)
+
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-2
+    Partial Scan Statistics
+
+  Stage: Stage-1
+    Stats-Aggr Operator
+
+
+PREHOOK: query: analyze table analyze_srcpart_partial_scan PARTITION(ds='2008-04-08',hr=11)
compute statistics partialscan
+PREHOOK: type: QUERY
+PREHOOK: Input: default@analyze_srcpart_partial_scan
+PREHOOK: Input: default@analyze_srcpart_partial_scan@ds=2008-04-08/hr=11
+PREHOOK: Output: default@analyze_srcpart_partial_scan
+PREHOOK: Output: default@analyze_srcpart_partial_scan@ds=2008-04-08/hr=11
+POSTHOOK: query: analyze table analyze_srcpart_partial_scan PARTITION(ds='2008-04-08',hr=11)
compute statistics partialscan
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@analyze_srcpart_partial_scan
+POSTHOOK: Input: default@analyze_srcpart_partial_scan@ds=2008-04-08/hr=11
+POSTHOOK: Output: default@analyze_srcpart_partial_scan
+POSTHOOK: Output: default@analyze_srcpart_partial_scan@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=11).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=11).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=12).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=12).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=11).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=11).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=12).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=12).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- 3. confirm result
+describe formatted analyze_srcpart_partial_scan PARTITION(ds='2008-04-08',hr=11)
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: -- 3. confirm result
+describe formatted analyze_srcpart_partial_scan PARTITION(ds='2008-04-08',hr=11)
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=11).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=11).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=12).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=12).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=11).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=11).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=12).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=12).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	None                
+value               	string              	None                
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	None                
+hr                  	string              	None                
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	default             	 
+Table:              	analyze_srcpart_partial_scan	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	numFiles            	22                  
+	numRows             	500                 
+	rawDataSize         	4812                
+	totalSize           	6954                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: describe formatted analyze_srcpart_partial_scan PARTITION(ds='2008-04-09',hr=11)
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe formatted analyze_srcpart_partial_scan PARTITION(ds='2008-04-09',hr=11)
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=11).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=11).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=12).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=12).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=11).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=11).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=12).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=12).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	None                
+value               	string              	None                
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	None                
+hr                  	string              	None                
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-09, 11]    	 
+Database:           	default             	 
+Table:              	analyze_srcpart_partial_scan	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: drop table analyze_srcpart_partial_scan
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@analyze_srcpart_partial_scan
+PREHOOK: Output: default@analyze_srcpart_partial_scan
+POSTHOOK: query: drop table analyze_srcpart_partial_scan
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@analyze_srcpart_partial_scan
+POSTHOOK: Output: default@analyze_srcpart_partial_scan
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=11).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=11).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=12).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-08,hr=12).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=11).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=11).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=12).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: analyze_srcpart_partial_scan PARTITION(ds=2008-04-09,hr=12).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]



Mime
View raw message