hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From br...@apache.org
Subject svn commit: r1635536 [17/28] - in /hive/branches/spark: ./ accumulo-handler/ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/ accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/columns/ accumulo-handler/src/test/org/apache/hado...
Date Thu, 30 Oct 2014 16:22:48 GMT
Modified: hive/branches/spark/ql/src/test/results/clientpositive/alter_partition_change_col.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/alter_partition_change_col.q.out?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/alter_partition_change_col.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/alter_partition_change_col.q.out Thu Oct 30 16:22:33 2014
@@ -18,73 +18,81 @@ POSTHOOK: query: load data local inpath 
 POSTHOOK: type: LOAD
 #### A masked pattern was here ####
 POSTHOOK: Output: default@alter_partition_change_col0
-PREHOOK: query: create table alter_partition_change_col1 (c1 string, c2 string) partitioned by (p1 string)
+PREHOOK: query: create table alter_partition_change_col1 (c1 string, c2 string) partitioned by (p1 string, p2 string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@alter_partition_change_col1
-POSTHOOK: query: create table alter_partition_change_col1 (c1 string, c2 string) partitioned by (p1 string)
+POSTHOOK: query: create table alter_partition_change_col1 (c1 string, c2 string) partitioned by (p1 string, p2 string)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@alter_partition_change_col1
-PREHOOK: query: insert overwrite table alter_partition_change_col1 partition (p1)
-  select c1, c2, 'abc' from alter_partition_change_col0
+PREHOOK: query: insert overwrite table alter_partition_change_col1 partition (p1, p2)
+  select c1, c2, 'abc', '123' from alter_partition_change_col0
   union all
-  select c1, c2, null from alter_partition_change_col0
+  select c1, c2, null, '123' from alter_partition_change_col0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alter_partition_change_col0
 PREHOOK: Output: default@alter_partition_change_col1
-POSTHOOK: query: insert overwrite table alter_partition_change_col1 partition (p1)
-  select c1, c2, 'abc' from alter_partition_change_col0
+POSTHOOK: query: insert overwrite table alter_partition_change_col1 partition (p1, p2)
+  select c1, c2, 'abc', '123' from alter_partition_change_col0
   union all
-  select c1, c2, null from alter_partition_change_col0
+  select c1, c2, null, '123' from alter_partition_change_col0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alter_partition_change_col0
-POSTHOOK: Output: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Output: default@alter_partition_change_col1@p1=abc
-POSTHOOK: Lineage: alter_partition_change_col1 PARTITION(p1=__HIVE_DEFAULT_PARTITION__).c1 EXPRESSION [(alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c1, type:string, comment:null), (alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c1, type:string, comment:null), ]
-POSTHOOK: Lineage: alter_partition_change_col1 PARTITION(p1=__HIVE_DEFAULT_PARTITION__).c2 EXPRESSION [(alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c2, type:string, comment:null), (alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c2, type:string, comment:null), ]
-POSTHOOK: Lineage: alter_partition_change_col1 PARTITION(p1=abc).c1 EXPRESSION [(alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c1, type:string, comment:null), (alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c1, type:string, comment:null), ]
-POSTHOOK: Lineage: alter_partition_change_col1 PARTITION(p1=abc).c2 EXPRESSION [(alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c2, type:string, comment:null), (alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c2, type:string, comment:null), ]
+POSTHOOK: Output: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+POSTHOOK: Output: default@alter_partition_change_col1@p1=abc/p2=123
+POSTHOOK: Lineage: alter_partition_change_col1 PARTITION(p1=__HIVE_DEFAULT_PARTITION__,p2=123).c1 EXPRESSION [(alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c1, type:string, comment:null), (alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c1, type:string, comment:null), ]
+POSTHOOK: Lineage: alter_partition_change_col1 PARTITION(p1=__HIVE_DEFAULT_PARTITION__,p2=123).c2 EXPRESSION [(alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c2, type:string, comment:null), (alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c2, type:string, comment:null), ]
+POSTHOOK: Lineage: alter_partition_change_col1 PARTITION(p1=abc,p2=123).c1 EXPRESSION [(alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c1, type:string, comment:null), (alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c1, type:string, comment:null), ]
+POSTHOOK: Lineage: alter_partition_change_col1 PARTITION(p1=abc,p2=123).c2 EXPRESSION [(alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c2, type:string, comment:null), (alter_partition_change_col0)alter_partition_change_col0.FieldSchema(name:c2, type:string, comment:null), ]
 PREHOOK: query: show partitions alter_partition_change_col1
 PREHOOK: type: SHOWPARTITIONS
 PREHOOK: Input: default@alter_partition_change_col1
 POSTHOOK: query: show partitions alter_partition_change_col1
 POSTHOOK: type: SHOWPARTITIONS
 POSTHOOK: Input: default@alter_partition_change_col1
-p1=__HIVE_DEFAULT_PARTITION__
-p1=abc
-PREHOOK: query: select * from alter_partition_change_col1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@alter_partition_change_col1
-PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-PREHOOK: Input: default@alter_partition_change_col1@p1=abc
-#### A masked pattern was here ####
-POSTHOOK: query: select * from alter_partition_change_col1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Input: default@alter_partition_change_col1@p1=abc
-#### A masked pattern was here ####
-Beck	0.0	__HIVE_DEFAULT_PARTITION__
-Beck	0.0	abc
-Beck	77.341	__HIVE_DEFAULT_PARTITION__
-Beck	77.341	abc
-Beck	79.9	__HIVE_DEFAULT_PARTITION__
-Beck	79.9	abc
-Cluck	5.96	__HIVE_DEFAULT_PARTITION__
-Cluck	5.96	abc
-Mary	33.33	__HIVE_DEFAULT_PARTITION__
-Mary	33.33	abc
-Mary	4.329	__HIVE_DEFAULT_PARTITION__
-Mary	4.329	abc
-Snow	55.71	__HIVE_DEFAULT_PARTITION__
-Snow	55.71	abc
-Tom	-12.25	__HIVE_DEFAULT_PARTITION__
-Tom	-12.25	abc
-Tom	19.00	__HIVE_DEFAULT_PARTITION__
-Tom	19.00	abc
-Tom	234.79	__HIVE_DEFAULT_PARTITION__
-Tom	234.79	abc
+p1=__HIVE_DEFAULT_PARTITION__/p2=123
+p1=abc/p2=123
+PREHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alter_partition_change_col1
+PREHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alter_partition_change_col1
+POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+Beck	0.0	abc	123
+Beck	77.341	abc	123
+Beck	79.9	abc	123
+Cluck	5.96	abc	123
+Mary	33.33	abc	123
+Mary	4.329	abc	123
+Snow	55.71	abc	123
+Tom	-12.25	abc	123
+Tom	19.00	abc	123
+Tom	234.79	abc	123
+PREHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alter_partition_change_col1
+PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+#### A masked pattern was here ####
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alter_partition_change_col1
+POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+#### A masked pattern was here ####
+Beck	0.0	__HIVE_DEFAULT_PARTITION__	123
+Beck	77.341	__HIVE_DEFAULT_PARTITION__	123
+Beck	79.9	__HIVE_DEFAULT_PARTITION__	123
+Cluck	5.96	__HIVE_DEFAULT_PARTITION__	123
+Mary	33.33	__HIVE_DEFAULT_PARTITION__	123
+Mary	4.329	__HIVE_DEFAULT_PARTITION__	123
+Snow	55.71	__HIVE_DEFAULT_PARTITION__	123
+Tom	-12.25	__HIVE_DEFAULT_PARTITION__	123
+Tom	19.00	__HIVE_DEFAULT_PARTITION__	123
+Tom	234.79	__HIVE_DEFAULT_PARTITION__	123
 PREHOOK: query: -- Change c2 to decimal(10,0)
 alter table alter_partition_change_col1 change c2 c2 decimal(10,0)
 PREHOOK: type: ALTERTABLE_RENAMECOL
@@ -95,56 +103,64 @@ alter table alter_partition_change_col1 
 POSTHOOK: type: ALTERTABLE_RENAMECOL
 POSTHOOK: Input: default@alter_partition_change_col1
 POSTHOOK: Output: default@alter_partition_change_col1
-PREHOOK: query: alter table alter_partition_change_col1 partition (p1='abc') change c2 c2 decimal(10,0)
+PREHOOK: query: alter table alter_partition_change_col1 partition (p1='abc', p2='123') change c2 c2 decimal(10,0)
 PREHOOK: type: ALTERTABLE_RENAMECOL
 PREHOOK: Input: default@alter_partition_change_col1
-PREHOOK: Output: default@alter_partition_change_col1@p1=abc
-POSTHOOK: query: alter table alter_partition_change_col1 partition (p1='abc') change c2 c2 decimal(10,0)
+PREHOOK: Output: default@alter_partition_change_col1@p1=abc/p2=123
+POSTHOOK: query: alter table alter_partition_change_col1 partition (p1='abc', p2='123') change c2 c2 decimal(10,0)
 POSTHOOK: type: ALTERTABLE_RENAMECOL
 POSTHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: Input: default@alter_partition_change_col1@p1=abc
-POSTHOOK: Output: default@alter_partition_change_col1@p1=abc
-PREHOOK: query: alter table alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__') change c2 c2 decimal(10,0)
+POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+POSTHOOK: Output: default@alter_partition_change_col1@p1=abc/p2=123
+PREHOOK: query: alter table alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__', p2='123') change c2 c2 decimal(10,0)
 PREHOOK: type: ALTERTABLE_RENAMECOL
 PREHOOK: Input: default@alter_partition_change_col1
-PREHOOK: Output: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: query: alter table alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__') change c2 c2 decimal(10,0)
+PREHOOK: Output: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+POSTHOOK: query: alter table alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__', p2='123') change c2 c2 decimal(10,0)
 POSTHOOK: type: ALTERTABLE_RENAMECOL
 POSTHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Output: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-PREHOOK: query: select * from alter_partition_change_col1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@alter_partition_change_col1
-PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-PREHOOK: Input: default@alter_partition_change_col1@p1=abc
-#### A masked pattern was here ####
-POSTHOOK: query: select * from alter_partition_change_col1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Input: default@alter_partition_change_col1@p1=abc
-#### A masked pattern was here ####
-Beck	0	__HIVE_DEFAULT_PARTITION__
-Beck	0	abc
-Beck	77	__HIVE_DEFAULT_PARTITION__
-Beck	77	abc
-Beck	80	__HIVE_DEFAULT_PARTITION__
-Beck	80	abc
-Cluck	6	__HIVE_DEFAULT_PARTITION__
-Cluck	6	abc
-Mary	33	__HIVE_DEFAULT_PARTITION__
-Mary	33	abc
-Mary	4	__HIVE_DEFAULT_PARTITION__
-Mary	4	abc
-Snow	56	__HIVE_DEFAULT_PARTITION__
-Snow	56	abc
-Tom	-12	__HIVE_DEFAULT_PARTITION__
-Tom	-12	abc
-Tom	19	__HIVE_DEFAULT_PARTITION__
-Tom	19	abc
-Tom	235	__HIVE_DEFAULT_PARTITION__
-Tom	235	abc
+POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+POSTHOOK: Output: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+PREHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alter_partition_change_col1
+PREHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alter_partition_change_col1
+POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+Beck	0	abc	123
+Beck	77	abc	123
+Beck	80	abc	123
+Cluck	6	abc	123
+Mary	33	abc	123
+Mary	4	abc	123
+Snow	56	abc	123
+Tom	-12	abc	123
+Tom	19	abc	123
+Tom	235	abc	123
+PREHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alter_partition_change_col1
+PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+#### A masked pattern was here ####
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alter_partition_change_col1
+POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+#### A masked pattern was here ####
+Beck	0	__HIVE_DEFAULT_PARTITION__	123
+Beck	77	__HIVE_DEFAULT_PARTITION__	123
+Beck	80	__HIVE_DEFAULT_PARTITION__	123
+Cluck	6	__HIVE_DEFAULT_PARTITION__	123
+Mary	33	__HIVE_DEFAULT_PARTITION__	123
+Mary	4	__HIVE_DEFAULT_PARTITION__	123
+Snow	56	__HIVE_DEFAULT_PARTITION__	123
+Tom	-12	__HIVE_DEFAULT_PARTITION__	123
+Tom	19	__HIVE_DEFAULT_PARTITION__	123
+Tom	235	__HIVE_DEFAULT_PARTITION__	123
 PREHOOK: query: -- Change the column type at the table level. Table-level describe shows the new type, but the existing partition does not.
 alter table alter_partition_change_col1 change c2 c2 decimal(14,4)
 PREHOOK: type: ALTERTABLE_RENAMECOL
@@ -164,182 +180,214 @@ POSTHOOK: Input: default@alter_partition
 c1                  	string              	                    
 c2                  	decimal(14,4)       	                    
 p1                  	string              	                    
+p2                  	string              	                    
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
 p1                  	string              	                    
-PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc')
+p2                  	string              	                    
+PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc', p2='123')
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc')
+POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc', p2='123')
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@alter_partition_change_col1
 c1                  	string              	                    
 c2                  	decimal(10,0)       	                    
 p1                  	string              	                    
+p2                  	string              	                    
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
 p1                  	string              	                    
-PREHOOK: query: select * from alter_partition_change_col1
+p2                  	string              	                    
+PREHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alter_partition_change_col1
+PREHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alter_partition_change_col1
+POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+Beck	0	abc	123
+Beck	77	abc	123
+Beck	80	abc	123
+Cluck	6	abc	123
+Mary	33	abc	123
+Mary	4	abc	123
+Snow	56	abc	123
+Tom	-12	abc	123
+Tom	19	abc	123
+Tom	235	abc	123
+PREHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alter_partition_change_col1
-PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-PREHOOK: Input: default@alter_partition_change_col1@p1=abc
+PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
 #### A masked pattern was here ####
-POSTHOOK: query: select * from alter_partition_change_col1
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Input: default@alter_partition_change_col1@p1=abc
-#### A masked pattern was here ####
-Beck	0	__HIVE_DEFAULT_PARTITION__
-Beck	0	abc
-Beck	77	__HIVE_DEFAULT_PARTITION__
-Beck	77	abc
-Beck	80	__HIVE_DEFAULT_PARTITION__
-Beck	80	abc
-Cluck	6	__HIVE_DEFAULT_PARTITION__
-Cluck	6	abc
-Mary	33	__HIVE_DEFAULT_PARTITION__
-Mary	33	abc
-Mary	4	__HIVE_DEFAULT_PARTITION__
-Mary	4	abc
-Snow	56	__HIVE_DEFAULT_PARTITION__
-Snow	56	abc
-Tom	-12	__HIVE_DEFAULT_PARTITION__
-Tom	-12	abc
-Tom	19	__HIVE_DEFAULT_PARTITION__
-Tom	19	abc
-Tom	235	__HIVE_DEFAULT_PARTITION__
-Tom	235	abc
+POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+#### A masked pattern was here ####
+Beck	0	__HIVE_DEFAULT_PARTITION__	123
+Beck	77	__HIVE_DEFAULT_PARTITION__	123
+Beck	80	__HIVE_DEFAULT_PARTITION__	123
+Cluck	6	__HIVE_DEFAULT_PARTITION__	123
+Mary	33	__HIVE_DEFAULT_PARTITION__	123
+Mary	4	__HIVE_DEFAULT_PARTITION__	123
+Snow	56	__HIVE_DEFAULT_PARTITION__	123
+Tom	-12	__HIVE_DEFAULT_PARTITION__	123
+Tom	19	__HIVE_DEFAULT_PARTITION__	123
+Tom	235	__HIVE_DEFAULT_PARTITION__	123
 PREHOOK: query: -- now change the column type of the existing partition
-alter table alter_partition_change_col1 partition (p1='abc') change c2 c2 decimal(14,4)
+alter table alter_partition_change_col1 partition (p1='abc', p2='123') change c2 c2 decimal(14,4)
 PREHOOK: type: ALTERTABLE_RENAMECOL
 PREHOOK: Input: default@alter_partition_change_col1
-PREHOOK: Output: default@alter_partition_change_col1@p1=abc
+PREHOOK: Output: default@alter_partition_change_col1@p1=abc/p2=123
 POSTHOOK: query: -- now change the column type of the existing partition
-alter table alter_partition_change_col1 partition (p1='abc') change c2 c2 decimal(14,4)
+alter table alter_partition_change_col1 partition (p1='abc', p2='123') change c2 c2 decimal(14,4)
 POSTHOOK: type: ALTERTABLE_RENAMECOL
 POSTHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: Input: default@alter_partition_change_col1@p1=abc
-POSTHOOK: Output: default@alter_partition_change_col1@p1=abc
-PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc')
+POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+POSTHOOK: Output: default@alter_partition_change_col1@p1=abc/p2=123
+PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc', p2='123')
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc')
+POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc', p2='123')
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@alter_partition_change_col1
 c1                  	string              	                    
 c2                  	decimal(14,4)       	                    
 p1                  	string              	                    
+p2                  	string              	                    
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
 p1                  	string              	                    
-PREHOOK: query: select * from alter_partition_change_col1
+p2                  	string              	                    
+PREHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alter_partition_change_col1
+PREHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alter_partition_change_col1
+POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+Beck	0.0	abc	123
+Beck	77.341	abc	123
+Beck	79.9	abc	123
+Cluck	5.96	abc	123
+Mary	33.33	abc	123
+Mary	4.329	abc	123
+Snow	55.71	abc	123
+Tom	-12.25	abc	123
+Tom	19.00	abc	123
+Tom	234.79	abc	123
+PREHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alter_partition_change_col1
-PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-PREHOOK: Input: default@alter_partition_change_col1@p1=abc
+PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
 #### A masked pattern was here ####
-POSTHOOK: query: select * from alter_partition_change_col1
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Input: default@alter_partition_change_col1@p1=abc
-#### A masked pattern was here ####
-Beck	0	__HIVE_DEFAULT_PARTITION__
-Beck	0.0	abc
-Beck	77	__HIVE_DEFAULT_PARTITION__
-Beck	77.341	abc
-Beck	79.9	abc
-Beck	80	__HIVE_DEFAULT_PARTITION__
-Cluck	5.96	abc
-Cluck	6	__HIVE_DEFAULT_PARTITION__
-Mary	33	__HIVE_DEFAULT_PARTITION__
-Mary	33.33	abc
-Mary	4	__HIVE_DEFAULT_PARTITION__
-Mary	4.329	abc
-Snow	55.71	abc
-Snow	56	__HIVE_DEFAULT_PARTITION__
-Tom	-12	__HIVE_DEFAULT_PARTITION__
-Tom	-12.25	abc
-Tom	19	__HIVE_DEFAULT_PARTITION__
-Tom	19.00	abc
-Tom	234.79	abc
-Tom	235	__HIVE_DEFAULT_PARTITION__
+POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+#### A masked pattern was here ####
+Beck	0	__HIVE_DEFAULT_PARTITION__	123
+Beck	77	__HIVE_DEFAULT_PARTITION__	123
+Beck	80	__HIVE_DEFAULT_PARTITION__	123
+Cluck	6	__HIVE_DEFAULT_PARTITION__	123
+Mary	33	__HIVE_DEFAULT_PARTITION__	123
+Mary	4	__HIVE_DEFAULT_PARTITION__	123
+Snow	56	__HIVE_DEFAULT_PARTITION__	123
+Tom	-12	__HIVE_DEFAULT_PARTITION__	123
+Tom	19	__HIVE_DEFAULT_PARTITION__	123
+Tom	235	__HIVE_DEFAULT_PARTITION__	123
 PREHOOK: query: -- change column for default partition value
-alter table alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__') change c2 c2 decimal(14,4)
+alter table alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__', p2='123') change c2 c2 decimal(14,4)
 PREHOOK: type: ALTERTABLE_RENAMECOL
 PREHOOK: Input: default@alter_partition_change_col1
-PREHOOK: Output: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
+PREHOOK: Output: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
 POSTHOOK: query: -- change column for default partition value
-alter table alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__') change c2 c2 decimal(14,4)
+alter table alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__', p2='123') change c2 c2 decimal(14,4)
 POSTHOOK: type: ALTERTABLE_RENAMECOL
 POSTHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Output: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-PREHOOK: query: describe alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__')
+POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+POSTHOOK: Output: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+PREHOOK: query: describe alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__', p2='123')
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: query: describe alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__')
+POSTHOOK: query: describe alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__', p2='123')
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@alter_partition_change_col1
 c1                  	string              	                    
 c2                  	decimal(14,4)       	                    
 p1                  	string              	                    
+p2                  	string              	                    
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
 p1                  	string              	                    
-PREHOOK: query: select * from alter_partition_change_col1
+p2                  	string              	                    
+PREHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alter_partition_change_col1
+PREHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alter_partition_change_col1
+POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+Beck	0.0	abc	123
+Beck	77.341	abc	123
+Beck	79.9	abc	123
+Cluck	5.96	abc	123
+Mary	33.33	abc	123
+Mary	4.329	abc	123
+Snow	55.71	abc	123
+Tom	-12.25	abc	123
+Tom	19.00	abc	123
+Tom	234.79	abc	123
+PREHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alter_partition_change_col1
-PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-PREHOOK: Input: default@alter_partition_change_col1@p1=abc
+PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
 #### A masked pattern was here ####
-POSTHOOK: query: select * from alter_partition_change_col1
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Input: default@alter_partition_change_col1@p1=abc
-#### A masked pattern was here ####
-Beck	0.0	__HIVE_DEFAULT_PARTITION__
-Beck	0.0	abc
-Beck	77.341	__HIVE_DEFAULT_PARTITION__
-Beck	77.341	abc
-Beck	79.9	__HIVE_DEFAULT_PARTITION__
-Beck	79.9	abc
-Cluck	5.96	__HIVE_DEFAULT_PARTITION__
-Cluck	5.96	abc
-Mary	33.33	__HIVE_DEFAULT_PARTITION__
-Mary	33.33	abc
-Mary	4.329	__HIVE_DEFAULT_PARTITION__
-Mary	4.329	abc
-Snow	55.71	__HIVE_DEFAULT_PARTITION__
-Snow	55.71	abc
-Tom	-12.25	__HIVE_DEFAULT_PARTITION__
-Tom	-12.25	abc
-Tom	19.00	__HIVE_DEFAULT_PARTITION__
-Tom	19.00	abc
-Tom	234.79	__HIVE_DEFAULT_PARTITION__
-Tom	234.79	abc
+POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+#### A masked pattern was here ####
+Beck	0.0	__HIVE_DEFAULT_PARTITION__	123
+Beck	77.341	__HIVE_DEFAULT_PARTITION__	123
+Beck	79.9	__HIVE_DEFAULT_PARTITION__	123
+Cluck	5.96	__HIVE_DEFAULT_PARTITION__	123
+Mary	33.33	__HIVE_DEFAULT_PARTITION__	123
+Mary	4.329	__HIVE_DEFAULT_PARTITION__	123
+Snow	55.71	__HIVE_DEFAULT_PARTITION__	123
+Tom	-12.25	__HIVE_DEFAULT_PARTITION__	123
+Tom	19.00	__HIVE_DEFAULT_PARTITION__	123
+Tom	234.79	__HIVE_DEFAULT_PARTITION__	123
 PREHOOK: query: -- Try out replace columns
-alter table alter_partition_change_col1 partition (p1='abc') replace columns (c1 string)
+alter table alter_partition_change_col1 partition (p1='abc', p2='123') replace columns (c1 string)
 PREHOOK: type: ALTERTABLE_REPLACECOLS
 PREHOOK: Input: default@alter_partition_change_col1
-PREHOOK: Output: default@alter_partition_change_col1@p1=abc
+PREHOOK: Output: default@alter_partition_change_col1@p1=abc/p2=123
 POSTHOOK: query: -- Try out replace columns
-alter table alter_partition_change_col1 partition (p1='abc') replace columns (c1 string)
+alter table alter_partition_change_col1 partition (p1='abc', p2='123') replace columns (c1 string)
 POSTHOOK: type: ALTERTABLE_REPLACECOLS
 POSTHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: Input: default@alter_partition_change_col1@p1=abc
-POSTHOOK: Output: default@alter_partition_change_col1@p1=abc
+POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+POSTHOOK: Output: default@alter_partition_change_col1@p1=abc/p2=123
 PREHOOK: query: describe alter_partition_change_col1
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@alter_partition_change_col1
@@ -349,56 +397,68 @@ POSTHOOK: Input: default@alter_partition
 c1                  	string              	                    
 c2                  	decimal(14,4)       	                    
 p1                  	string              	                    
+p2                  	string              	                    
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
 p1                  	string              	                    
-PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc')
+p2                  	string              	                    
+PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc', p2='123')
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc')
+POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc', p2='123')
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@alter_partition_change_col1
 c1                  	string              	                    
 p1                  	string              	                    
+p2                  	string              	                    
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
 p1                  	string              	                    
-PREHOOK: query: select * from alter_partition_change_col1
+p2                  	string              	                    
+PREHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alter_partition_change_col1
+PREHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alter_partition_change_col1
+POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+Beck	NULL	abc	123
+Beck	NULL	abc	123
+Beck	NULL	abc	123
+Cluck	NULL	abc	123
+Mary	NULL	abc	123
+Mary	NULL	abc	123
+Snow	NULL	abc	123
+Tom	NULL	abc	123
+Tom	NULL	abc	123
+Tom	NULL	abc	123
+PREHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alter_partition_change_col1
-PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-PREHOOK: Input: default@alter_partition_change_col1@p1=abc
+PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
 #### A masked pattern was here ####
-POSTHOOK: query: select * from alter_partition_change_col1
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Input: default@alter_partition_change_col1@p1=abc
+POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
 #### A masked pattern was here ####
-Beck	0.0	__HIVE_DEFAULT_PARTITION__
-Beck	77.341	__HIVE_DEFAULT_PARTITION__
-Beck	79.9	__HIVE_DEFAULT_PARTITION__
-Beck	NULL	abc
-Beck	NULL	abc
-Beck	NULL	abc
-Cluck	5.96	__HIVE_DEFAULT_PARTITION__
-Cluck	NULL	abc
-Mary	33.33	__HIVE_DEFAULT_PARTITION__
-Mary	4.329	__HIVE_DEFAULT_PARTITION__
-Mary	NULL	abc
-Mary	NULL	abc
-Snow	55.71	__HIVE_DEFAULT_PARTITION__
-Snow	NULL	abc
-Tom	-12.25	__HIVE_DEFAULT_PARTITION__
-Tom	19.00	__HIVE_DEFAULT_PARTITION__
-Tom	234.79	__HIVE_DEFAULT_PARTITION__
-Tom	NULL	abc
-Tom	NULL	abc
-Tom	NULL	abc
+Beck	0.0	__HIVE_DEFAULT_PARTITION__	123
+Beck	77.341	__HIVE_DEFAULT_PARTITION__	123
+Beck	79.9	__HIVE_DEFAULT_PARTITION__	123
+Cluck	5.96	__HIVE_DEFAULT_PARTITION__	123
+Mary	33.33	__HIVE_DEFAULT_PARTITION__	123
+Mary	4.329	__HIVE_DEFAULT_PARTITION__	123
+Snow	55.71	__HIVE_DEFAULT_PARTITION__	123
+Tom	-12.25	__HIVE_DEFAULT_PARTITION__	123
+Tom	19.00	__HIVE_DEFAULT_PARTITION__	123
+Tom	234.79	__HIVE_DEFAULT_PARTITION__	123
 PREHOOK: query: alter table alter_partition_change_col1 replace columns (c1 string)
 PREHOOK: type: ALTERTABLE_REPLACECOLS
 PREHOOK: Input: default@alter_partition_change_col1
@@ -415,43 +475,53 @@ POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@alter_partition_change_col1
 c1                  	string              	                    
 p1                  	string              	                    
+p2                  	string              	                    
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
 p1                  	string              	                    
-PREHOOK: query: select * from alter_partition_change_col1
+p2                  	string              	                    
+PREHOOK: query: select * from alter_partition_change_col1 where p1='abc'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alter_partition_change_col1
-PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-PREHOOK: Input: default@alter_partition_change_col1@p1=abc
+PREHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
 #### A masked pattern was here ####
-POSTHOOK: query: select * from alter_partition_change_col1
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='abc'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Input: default@alter_partition_change_col1@p1=abc
-#### A masked pattern was here ####
-Beck	__HIVE_DEFAULT_PARTITION__
-Beck	__HIVE_DEFAULT_PARTITION__
-Beck	__HIVE_DEFAULT_PARTITION__
-Beck	abc
-Beck	abc
-Beck	abc
-Cluck	__HIVE_DEFAULT_PARTITION__
-Cluck	abc
-Mary	__HIVE_DEFAULT_PARTITION__
-Mary	__HIVE_DEFAULT_PARTITION__
-Mary	abc
-Mary	abc
-Snow	__HIVE_DEFAULT_PARTITION__
-Snow	abc
-Tom	__HIVE_DEFAULT_PARTITION__
-Tom	__HIVE_DEFAULT_PARTITION__
-Tom	__HIVE_DEFAULT_PARTITION__
-Tom	abc
-Tom	abc
-Tom	abc
+POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+Beck	abc	123
+Beck	abc	123
+Beck	abc	123
+Cluck	abc	123
+Mary	abc	123
+Mary	abc	123
+Snow	abc	123
+Tom	abc	123
+Tom	abc	123
+Tom	abc	123
+PREHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alter_partition_change_col1
+PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+#### A masked pattern was here ####
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alter_partition_change_col1
+POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+#### A masked pattern was here ####
+Beck	__HIVE_DEFAULT_PARTITION__	123
+Beck	__HIVE_DEFAULT_PARTITION__	123
+Beck	__HIVE_DEFAULT_PARTITION__	123
+Cluck	__HIVE_DEFAULT_PARTITION__	123
+Mary	__HIVE_DEFAULT_PARTITION__	123
+Mary	__HIVE_DEFAULT_PARTITION__	123
+Snow	__HIVE_DEFAULT_PARTITION__	123
+Tom	__HIVE_DEFAULT_PARTITION__	123
+Tom	__HIVE_DEFAULT_PARTITION__	123
+Tom	__HIVE_DEFAULT_PARTITION__	123
 PREHOOK: query: -- Try add columns
 alter table alter_partition_change_col1 add columns (c2 decimal(14,4))
 PREHOOK: type: ALTERTABLE_ADDCOLS
@@ -471,108 +541,216 @@ POSTHOOK: Input: default@alter_partition
 c1                  	string              	                    
 c2                  	decimal(14,4)       	                    
 p1                  	string              	                    
+p2                  	string              	                    
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
 p1                  	string              	                    
-PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc')
+p2                  	string              	                    
+PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc', p2='123')
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc')
+POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc', p2='123')
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@alter_partition_change_col1
 c1                  	string              	                    
 p1                  	string              	                    
+p2                  	string              	                    
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
 p1                  	string              	                    
-PREHOOK: query: select * from alter_partition_change_col1
+p2                  	string              	                    
+PREHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alter_partition_change_col1
+PREHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alter_partition_change_col1
+POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+Beck	NULL	abc	123
+Beck	NULL	abc	123
+Beck	NULL	abc	123
+Cluck	NULL	abc	123
+Mary	NULL	abc	123
+Mary	NULL	abc	123
+Snow	NULL	abc	123
+Tom	NULL	abc	123
+Tom	NULL	abc	123
+Tom	NULL	abc	123
+PREHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alter_partition_change_col1
-PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-PREHOOK: Input: default@alter_partition_change_col1@p1=abc
+PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
 #### A masked pattern was here ####
-POSTHOOK: query: select * from alter_partition_change_col1
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Input: default@alter_partition_change_col1@p1=abc
+POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
 #### A masked pattern was here ####
-Beck	0.0	__HIVE_DEFAULT_PARTITION__
-Beck	77.341	__HIVE_DEFAULT_PARTITION__
-Beck	79.9	__HIVE_DEFAULT_PARTITION__
-Beck	NULL	abc
-Beck	NULL	abc
-Beck	NULL	abc
-Cluck	5.96	__HIVE_DEFAULT_PARTITION__
-Cluck	NULL	abc
-Mary	33.33	__HIVE_DEFAULT_PARTITION__
-Mary	4.329	__HIVE_DEFAULT_PARTITION__
-Mary	NULL	abc
-Mary	NULL	abc
-Snow	55.71	__HIVE_DEFAULT_PARTITION__
-Snow	NULL	abc
-Tom	-12.25	__HIVE_DEFAULT_PARTITION__
-Tom	19.00	__HIVE_DEFAULT_PARTITION__
-Tom	234.79	__HIVE_DEFAULT_PARTITION__
-Tom	NULL	abc
-Tom	NULL	abc
-Tom	NULL	abc
-PREHOOK: query: alter table alter_partition_change_col1 partition (p1='abc') add columns (c2 decimal(14,4))
+Beck	0.0	__HIVE_DEFAULT_PARTITION__	123
+Beck	77.341	__HIVE_DEFAULT_PARTITION__	123
+Beck	79.9	__HIVE_DEFAULT_PARTITION__	123
+Cluck	5.96	__HIVE_DEFAULT_PARTITION__	123
+Mary	33.33	__HIVE_DEFAULT_PARTITION__	123
+Mary	4.329	__HIVE_DEFAULT_PARTITION__	123
+Snow	55.71	__HIVE_DEFAULT_PARTITION__	123
+Tom	-12.25	__HIVE_DEFAULT_PARTITION__	123
+Tom	19.00	__HIVE_DEFAULT_PARTITION__	123
+Tom	234.79	__HIVE_DEFAULT_PARTITION__	123
+PREHOOK: query: alter table alter_partition_change_col1 partition (p1='abc', p2='123') add columns (c2 decimal(14,4))
 PREHOOK: type: ALTERTABLE_ADDCOLS
 PREHOOK: Input: default@alter_partition_change_col1
-PREHOOK: Output: default@alter_partition_change_col1@p1=abc
-POSTHOOK: query: alter table alter_partition_change_col1 partition (p1='abc') add columns (c2 decimal(14,4))
+PREHOOK: Output: default@alter_partition_change_col1@p1=abc/p2=123
+POSTHOOK: query: alter table alter_partition_change_col1 partition (p1='abc', p2='123') add columns (c2 decimal(14,4))
 POSTHOOK: type: ALTERTABLE_ADDCOLS
 POSTHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: Input: default@alter_partition_change_col1@p1=abc
-POSTHOOK: Output: default@alter_partition_change_col1@p1=abc
-PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc')
+POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+POSTHOOK: Output: default@alter_partition_change_col1@p1=abc/p2=123
+PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc', p2='123')
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc')
+POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc', p2='123')
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@alter_partition_change_col1
 c1                  	string              	                    
 c2                  	decimal(14,4)       	                    
 p1                  	string              	                    
+p2                  	string              	                    
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
 p1                  	string              	                    
-PREHOOK: query: select * from alter_partition_change_col1
+p2                  	string              	                    
+PREHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alter_partition_change_col1
+PREHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alter_partition_change_col1
+POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+Beck	0.0	abc	123
+Beck	77.341	abc	123
+Beck	79.9	abc	123
+Cluck	5.96	abc	123
+Mary	33.33	abc	123
+Mary	4.329	abc	123
+Snow	55.71	abc	123
+Tom	-12.25	abc	123
+Tom	19.00	abc	123
+Tom	234.79	abc	123
+PREHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alter_partition_change_col1
-PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-PREHOOK: Input: default@alter_partition_change_col1@p1=abc
+PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
 #### A masked pattern was here ####
-POSTHOOK: query: select * from alter_partition_change_col1
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alter_partition_change_col1
-POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Input: default@alter_partition_change_col1@p1=abc
-#### A masked pattern was here ####
-Beck	0.0	__HIVE_DEFAULT_PARTITION__
-Beck	0.0	abc
-Beck	77.341	__HIVE_DEFAULT_PARTITION__
-Beck	77.341	abc
-Beck	79.9	__HIVE_DEFAULT_PARTITION__
-Beck	79.9	abc
-Cluck	5.96	__HIVE_DEFAULT_PARTITION__
-Cluck	5.96	abc
-Mary	33.33	__HIVE_DEFAULT_PARTITION__
-Mary	33.33	abc
-Mary	4.329	__HIVE_DEFAULT_PARTITION__
-Mary	4.329	abc
-Snow	55.71	__HIVE_DEFAULT_PARTITION__
-Snow	55.71	abc
-Tom	-12.25	__HIVE_DEFAULT_PARTITION__
-Tom	-12.25	abc
-Tom	19.00	__HIVE_DEFAULT_PARTITION__
-Tom	19.00	abc
-Tom	234.79	__HIVE_DEFAULT_PARTITION__
-Tom	234.79	abc
+POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+#### A masked pattern was here ####
+Beck	0.0	__HIVE_DEFAULT_PARTITION__	123
+Beck	77.341	__HIVE_DEFAULT_PARTITION__	123
+Beck	79.9	__HIVE_DEFAULT_PARTITION__	123
+Cluck	5.96	__HIVE_DEFAULT_PARTITION__	123
+Mary	33.33	__HIVE_DEFAULT_PARTITION__	123
+Mary	4.329	__HIVE_DEFAULT_PARTITION__	123
+Snow	55.71	__HIVE_DEFAULT_PARTITION__	123
+Tom	-12.25	__HIVE_DEFAULT_PARTITION__	123
+Tom	19.00	__HIVE_DEFAULT_PARTITION__	123
+Tom	234.79	__HIVE_DEFAULT_PARTITION__	123
+PREHOOK: query: -- Try changing column for all partitions at once
+alter table alter_partition_change_col1 partition (p1, p2='123') change column c2 c2 decimal(10,0)
+PREHOOK: type: ALTERTABLE_RENAMECOL
+PREHOOK: Input: default@alter_partition_change_col1
+PREHOOK: Output: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+PREHOOK: Output: default@alter_partition_change_col1@p1=abc/p2=123
+POSTHOOK: query: -- Try changing column for all partitions at once
+alter table alter_partition_change_col1 partition (p1, p2='123') change column c2 c2 decimal(10,0)
+POSTHOOK: type: ALTERTABLE_RENAMECOL
+POSTHOOK: Input: default@alter_partition_change_col1
+POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+POSTHOOK: Output: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+POSTHOOK: Output: default@alter_partition_change_col1@p1=abc/p2=123
+PREHOOK: query: describe alter_partition_change_col1 partition (p1='abc', p2='123')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@alter_partition_change_col1
+POSTHOOK: query: describe alter_partition_change_col1 partition (p1='abc', p2='123')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@alter_partition_change_col1
+c1                  	string              	                    
+c2                  	decimal(10,0)       	                    
+p1                  	string              	                    
+p2                  	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+p1                  	string              	                    
+p2                  	string              	                    
+PREHOOK: query: describe alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__', p2='123')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@alter_partition_change_col1
+POSTHOOK: query: describe alter_partition_change_col1 partition (p1='__HIVE_DEFAULT_PARTITION__', p2='123')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@alter_partition_change_col1
+c1                  	string              	                    
+c2                  	decimal(10,0)       	                    
+p1                  	string              	                    
+p2                  	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+p1                  	string              	                    
+p2                  	string              	                    
+PREHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alter_partition_change_col1
+PREHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='abc'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alter_partition_change_col1
+POSTHOOK: Input: default@alter_partition_change_col1@p1=abc/p2=123
+#### A masked pattern was here ####
+Beck	0	abc	123
+Beck	77	abc	123
+Beck	80	abc	123
+Cluck	6	abc	123
+Mary	33	abc	123
+Mary	4	abc	123
+Snow	56	abc	123
+Tom	-12	abc	123
+Tom	19	abc	123
+Tom	235	abc	123
+PREHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alter_partition_change_col1
+PREHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+#### A masked pattern was here ####
+POSTHOOK: query: select * from alter_partition_change_col1 where p1='__HIVE_DEFAULT_PARTITION__'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alter_partition_change_col1
+POSTHOOK: Input: default@alter_partition_change_col1@p1=__HIVE_DEFAULT_PARTITION__/p2=123
+#### A masked pattern was here ####
+Beck	0	__HIVE_DEFAULT_PARTITION__	123
+Beck	77	__HIVE_DEFAULT_PARTITION__	123
+Beck	80	__HIVE_DEFAULT_PARTITION__	123
+Cluck	6	__HIVE_DEFAULT_PARTITION__	123
+Mary	33	__HIVE_DEFAULT_PARTITION__	123
+Mary	4	__HIVE_DEFAULT_PARTITION__	123
+Snow	56	__HIVE_DEFAULT_PARTITION__	123
+Tom	-12	__HIVE_DEFAULT_PARTITION__	123
+Tom	19	__HIVE_DEFAULT_PARTITION__	123
+Tom	235	__HIVE_DEFAULT_PARTITION__	123

Modified: hive/branches/spark/ql/src/test/results/clientpositive/ambiguous_col.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/ambiguous_col.q.out?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/ambiguous_col.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/ambiguous_col.q.out Thu Oct 30 16:22:33 2014
@@ -53,8 +53,8 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: _col0 (type: string), _col1 (type: string)
-            outputColumnNames: _col0, _col1
+            expressions: _col0 (type: string), _col0 (type: string), _col1 (type: string)
+            outputColumnNames: _col0, _col1, _col2
             Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
@@ -124,8 +124,8 @@ STAGE PLANS:
           outputColumnNames: _col0
           Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: _col0 (type: string)
-            outputColumnNames: _col0
+            expressions: _col0 (type: string), _col0 (type: string)
+            outputColumnNames: _col0, _col1
             Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
@@ -195,8 +195,8 @@ STAGE PLANS:
           outputColumnNames: _col0
           Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: _col0 (type: string)
-            outputColumnNames: _col0
+            expressions: _col0 (type: string), _col0 (type: string)
+            outputColumnNames: _col0, _col1
             Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false

Modified: hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_groupby.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_groupby.q.out?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_groupby.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_groupby.q.out Thu Oct 30 16:22:33 2014
@@ -177,17 +177,17 @@ STAGE PLANS:
           keys: KEY._col0 (type: string), KEY._col1 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: PARTIAL
+          Statistics: Num rows: 8 Data size: 800 Basic stats: COMPLETE Column stats: PARTIAL
           Select Operator
             expressions: _col0 (type: string), _col1 (type: int), _col2 (type: bigint)
             outputColumnNames: _col0, _col1, _col2
-            Statistics: Num rows: 4 Data size: 400 Basic stats: COMPLETE Column stats: PARTIAL
+            Statistics: Num rows: 8 Data size: 800 Basic stats: COMPLETE Column stats: PARTIAL
             Group By Operator
               aggregations: min(_col1)
               keys: _col0 (type: string), _col2 (type: bigint)
               mode: hash
               outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 4 Data size: 416 Basic stats: COMPLETE Column stats: PARTIAL
+              Statistics: Num rows: 8 Data size: 832 Basic stats: COMPLETE Column stats: PARTIAL
               File Output Operator
                 compressed: false
                 table:
@@ -203,7 +203,7 @@ STAGE PLANS:
               key expressions: _col0 (type: string), _col1 (type: bigint)
               sort order: ++
               Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint)
-              Statistics: Num rows: 4 Data size: 416 Basic stats: COMPLETE Column stats: PARTIAL
+              Statistics: Num rows: 8 Data size: 832 Basic stats: COMPLETE Column stats: PARTIAL
               value expressions: _col2 (type: int)
       Reduce Operator Tree:
         Group By Operator
@@ -211,14 +211,14 @@ STAGE PLANS:
           keys: KEY._col0 (type: string), KEY._col1 (type: bigint)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 2 Data size: 208 Basic stats: COMPLETE Column stats: PARTIAL
+          Statistics: Num rows: 8 Data size: 832 Basic stats: COMPLETE Column stats: PARTIAL
           Select Operator
             expressions: _col0 (type: string), _col1 (type: bigint), _col2 (type: int)
             outputColumnNames: _col0, _col1, _col2
-            Statistics: Num rows: 2 Data size: 208 Basic stats: COMPLETE Column stats: PARTIAL
+            Statistics: Num rows: 8 Data size: 832 Basic stats: COMPLETE Column stats: PARTIAL
             File Output Operator
               compressed: false
-              Statistics: Num rows: 2 Data size: 208 Basic stats: COMPLETE Column stats: PARTIAL
+              Statistics: Num rows: 8 Data size: 832 Basic stats: COMPLETE Column stats: PARTIAL
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -852,14 +852,14 @@ STAGE PLANS:
           keys: KEY._col0 (type: string), KEY._col1 (type: bigint)
           mode: mergepartial
           outputColumnNames: _col0, _col1
-          Statistics: Num rows: 2 Data size: 172 Basic stats: COMPLETE Column stats: PARTIAL
+          Statistics: Num rows: 4 Data size: 344 Basic stats: COMPLETE Column stats: PARTIAL
           Select Operator
             expressions: _col0 (type: string), _col1 (type: bigint)
             outputColumnNames: _col0, _col1
-            Statistics: Num rows: 2 Data size: 172 Basic stats: COMPLETE Column stats: PARTIAL
+            Statistics: Num rows: 4 Data size: 344 Basic stats: COMPLETE Column stats: PARTIAL
             File Output Operator
               compressed: false
-              Statistics: Num rows: 2 Data size: 172 Basic stats: COMPLETE Column stats: PARTIAL
+              Statistics: Num rows: 4 Data size: 344 Basic stats: COMPLETE Column stats: PARTIAL
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_groupby2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_groupby2.q.out?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_groupby2.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_groupby2.q.out Thu Oct 30 16:22:33 2014
@@ -274,25 +274,25 @@ STAGE PLANS:
                 keys: state (type: string), votes (type: bigint)
                 mode: hash
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 10 Data size: 860 Basic stats: COMPLETE Column stats: PARTIAL
+                Statistics: Num rows: 8 Data size: 688 Basic stats: COMPLETE Column stats: PARTIAL
                 Reduce Output Operator
                   key expressions: _col0 (type: string), _col1 (type: bigint)
                   sort order: ++
                   Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint)
-                  Statistics: Num rows: 10 Data size: 860 Basic stats: COMPLETE Column stats: PARTIAL
+                  Statistics: Num rows: 8 Data size: 688 Basic stats: COMPLETE Column stats: PARTIAL
       Reduce Operator Tree:
         Group By Operator
           keys: KEY._col0 (type: string), KEY._col1 (type: bigint)
           mode: mergepartial
           outputColumnNames: _col0, _col1
-          Statistics: Num rows: 5 Data size: 430 Basic stats: COMPLETE Column stats: PARTIAL
+          Statistics: Num rows: 2 Data size: 172 Basic stats: COMPLETE Column stats: PARTIAL
           Select Operator
             expressions: _col0 (type: string), _col1 (type: bigint)
             outputColumnNames: _col0, _col1
-            Statistics: Num rows: 5 Data size: 430 Basic stats: COMPLETE Column stats: PARTIAL
+            Statistics: Num rows: 2 Data size: 172 Basic stats: COMPLETE Column stats: PARTIAL
             File Output Operator
               compressed: false
-              Statistics: Num rows: 5 Data size: 430 Basic stats: COMPLETE Column stats: PARTIAL
+              Statistics: Num rows: 2 Data size: 172 Basic stats: COMPLETE Column stats: PARTIAL
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Copied: hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_join_pkfk.q.out (from r1633910, hive/trunk/ql/src/test/results/clientpositive/annotate_stats_join_pkfk.q.out)
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_join_pkfk.q.out?p2=hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_join_pkfk.q.out&p1=hive/trunk/ql/src/test/results/clientpositive/annotate_stats_join_pkfk.q.out&r1=1633910&r2=1635536&rev=1635536&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/annotate_stats_join_pkfk.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_join_pkfk.q.out Thu Oct 30 16:22:33 2014
@@ -337,7 +337,7 @@ STAGE PLANS:
             alias: s
             Statistics: Num rows: 12 Data size: 3143 Basic stats: COMPLETE Column stats: COMPLETE
             Filter Operator
-              predicate: (s_store_sk is not null and (s_store_sk > 0)) (type: boolean)
+              predicate: (s_store_sk > 0) (type: boolean)
               Statistics: Num rows: 4 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
               Reduce Output Operator
                 key expressions: s_store_sk (type: int)
@@ -348,13 +348,13 @@ STAGE PLANS:
             alias: ss
             Statistics: Num rows: 1000 Data size: 130523 Basic stats: COMPLETE Column stats: COMPLETE
             Filter Operator
-              predicate: (ss_store_sk is not null and (ss_store_sk > 0)) (type: boolean)
-              Statistics: Num rows: 321 Data size: 1236 Basic stats: COMPLETE Column stats: COMPLETE
+              predicate: (ss_store_sk > 0) (type: boolean)
+              Statistics: Num rows: 333 Data size: 1284 Basic stats: COMPLETE Column stats: COMPLETE
               Reduce Output Operator
                 key expressions: ss_store_sk (type: int)
                 sort order: +
                 Map-reduce partition columns: ss_store_sk (type: int)
-                Statistics: Num rows: 321 Data size: 1236 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 333 Data size: 1284 Basic stats: COMPLETE Column stats: COMPLETE
       Reduce Operator Tree:
         Join Operator
           condition map:
@@ -363,14 +363,14 @@ STAGE PLANS:
             0 {KEY.reducesinkkey0}
             1 
           outputColumnNames: _col0
-          Statistics: Num rows: 107 Data size: 428 Basic stats: COMPLETE Column stats: COMPLETE
+          Statistics: Num rows: 111 Data size: 444 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
             expressions: _col0 (type: int)
             outputColumnNames: _col0
-            Statistics: Num rows: 107 Data size: 428 Basic stats: COMPLETE Column stats: COMPLETE
+            Statistics: Num rows: 111 Data size: 444 Basic stats: COMPLETE Column stats: COMPLETE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 107 Data size: 428 Basic stats: COMPLETE Column stats: COMPLETE
+              Statistics: Num rows: 111 Data size: 444 Basic stats: COMPLETE Column stats: COMPLETE
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -485,14 +485,14 @@ STAGE PLANS:
             0 {KEY.reducesinkkey0}
             1 
           outputColumnNames: _col0
-          Statistics: Num rows: 321 Data size: 1284 Basic stats: COMPLETE Column stats: COMPLETE
+          Statistics: Num rows: 322 Data size: 1288 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
             expressions: _col0 (type: int)
             outputColumnNames: _col0
-            Statistics: Num rows: 321 Data size: 1284 Basic stats: COMPLETE Column stats: COMPLETE
+            Statistics: Num rows: 322 Data size: 1288 Basic stats: COMPLETE Column stats: COMPLETE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 321 Data size: 1284 Basic stats: COMPLETE Column stats: COMPLETE
+              Statistics: Num rows: 322 Data size: 1288 Basic stats: COMPLETE Column stats: COMPLETE
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -655,7 +655,7 @@ STAGE PLANS:
             alias: s1
             Statistics: Num rows: 12 Data size: 3143 Basic stats: COMPLETE Column stats: COMPLETE
             Filter Operator
-              predicate: (s_store_sk is not null and (s_store_sk > 1000)) (type: boolean)
+              predicate: (s_store_sk > 1000) (type: boolean)
               Statistics: Num rows: 4 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
               Reduce Output Operator
                 key expressions: s_store_sk (type: int)
@@ -666,7 +666,7 @@ STAGE PLANS:
             alias: s
             Statistics: Num rows: 12 Data size: 3143 Basic stats: COMPLETE Column stats: COMPLETE
             Filter Operator
-              predicate: (s_store_sk is not null and (s_store_sk > 1000)) (type: boolean)
+              predicate: (s_store_sk > 1000) (type: boolean)
               Statistics: Num rows: 4 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
               Reduce Output Operator
                 key expressions: s_store_sk (type: int)
@@ -677,13 +677,13 @@ STAGE PLANS:
             alias: ss
             Statistics: Num rows: 1000 Data size: 130523 Basic stats: COMPLETE Column stats: COMPLETE
             Filter Operator
-              predicate: (ss_store_sk is not null and (ss_store_sk > 1000)) (type: boolean)
-              Statistics: Num rows: 321 Data size: 1236 Basic stats: COMPLETE Column stats: COMPLETE
+              predicate: (ss_store_sk > 1000) (type: boolean)
+              Statistics: Num rows: 333 Data size: 1284 Basic stats: COMPLETE Column stats: COMPLETE
               Reduce Output Operator
                 key expressions: ss_store_sk (type: int)
                 sort order: +
                 Map-reduce partition columns: ss_store_sk (type: int)
-                Statistics: Num rows: 321 Data size: 1236 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 333 Data size: 1284 Basic stats: COMPLETE Column stats: COMPLETE
       Reduce Operator Tree:
         Join Operator
           condition map:
@@ -694,14 +694,14 @@ STAGE PLANS:
             1 
             2 
           outputColumnNames: _col0
-          Statistics: Num rows: 35 Data size: 140 Basic stats: COMPLETE Column stats: COMPLETE
+          Statistics: Num rows: 38 Data size: 152 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
             expressions: _col0 (type: int)
             outputColumnNames: _col0
-            Statistics: Num rows: 35 Data size: 140 Basic stats: COMPLETE Column stats: COMPLETE
+            Statistics: Num rows: 38 Data size: 152 Basic stats: COMPLETE Column stats: COMPLETE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 35 Data size: 140 Basic stats: COMPLETE Column stats: COMPLETE
+              Statistics: Num rows: 38 Data size: 152 Basic stats: COMPLETE Column stats: COMPLETE
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -768,14 +768,14 @@ STAGE PLANS:
             1 
             2 
           outputColumnNames: _col0
-          Statistics: Num rows: 321 Data size: 1284 Basic stats: COMPLETE Column stats: COMPLETE
+          Statistics: Num rows: 322 Data size: 1288 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
             expressions: _col0 (type: int)
             outputColumnNames: _col0
-            Statistics: Num rows: 321 Data size: 1284 Basic stats: COMPLETE Column stats: COMPLETE
+            Statistics: Num rows: 322 Data size: 1288 Basic stats: COMPLETE Column stats: COMPLETE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 321 Data size: 1284 Basic stats: COMPLETE Column stats: COMPLETE
+              Statistics: Num rows: 322 Data size: 1288 Basic stats: COMPLETE Column stats: COMPLETE
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -942,14 +942,14 @@ STAGE PLANS:
             0 {VALUE._col0}
             1 
           outputColumnNames: _col0
-          Statistics: Num rows: 916 Data size: 3664 Basic stats: COMPLETE Column stats: COMPLETE
+          Statistics: Num rows: 210 Data size: 840 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
             expressions: _col0 (type: int)
             outputColumnNames: _col0
-            Statistics: Num rows: 916 Data size: 3664 Basic stats: COMPLETE Column stats: COMPLETE
+            Statistics: Num rows: 210 Data size: 840 Basic stats: COMPLETE Column stats: COMPLETE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 916 Data size: 3664 Basic stats: COMPLETE Column stats: COMPLETE
+              Statistics: Num rows: 210 Data size: 840 Basic stats: COMPLETE Column stats: COMPLETE
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_part.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_part.q.out?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_part.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_part.q.out Thu Oct 30 16:22:33 2014
@@ -135,11 +135,11 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: loc_orc
-          Statistics: Num rows: 2 Data size: 323 Basic stats: COMPLETE Column stats: PARTIAL
+          Statistics: Num rows: 2 Data size: 323 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: string)
+            expressions: state (type: string), locid (type: int), zip (type: bigint), '__HIVE_DEFAULT_PARTITION__' (type: string)
             outputColumnNames: _col0, _col1, _col2, _col3
-            Statistics: Num rows: 2 Data size: 323 Basic stats: COMPLETE Column stats: PARTIAL
+            Statistics: Num rows: 2 Data size: 323 Basic stats: COMPLETE Column stats: NONE
             ListSink
 
 PREHOOK: query: -- basicStatState: PARTIAL colStatState: NONE
@@ -181,11 +181,11 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: loc_orc
-          Statistics: Num rows: 7 Data size: 400 Basic stats: COMPLETE Column stats: PARTIAL
+          Statistics: Num rows: 7 Data size: 400 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: string)
+            expressions: state (type: string), locid (type: int), zip (type: bigint), '2001' (type: string)
             outputColumnNames: _col0, _col1, _col2, _col3
-            Statistics: Num rows: 7 Data size: 400 Basic stats: COMPLETE Column stats: PARTIAL
+            Statistics: Num rows: 7 Data size: 400 Basic stats: COMPLETE Column stats: NONE
             ListSink
 
 PREHOOK: query: -- partition level analyze statistics for all partitions
@@ -222,11 +222,11 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: loc_orc
-          Statistics: Num rows: 1 Data size: 323 Basic stats: COMPLETE Column stats: PARTIAL
+          Statistics: Num rows: 1 Data size: 323 Basic stats: COMPLETE Column stats: NONE
           Select Operator
-            expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: string)
+            expressions: state (type: string), locid (type: int), zip (type: bigint), '__HIVE_DEFAULT_PARTITION__' (type: string)
             outputColumnNames: _col0, _col1, _col2, _col3
-            Statistics: Num rows: 1 Data size: 323 Basic stats: COMPLETE Column stats: PARTIAL
+            Statistics: Num rows: 1 Data size: 323 Basic stats: COMPLETE Column stats: NONE
             ListSink
 
 PREHOOK: query: -- basicStatState: COMPLETE colStatState: NONE
@@ -535,14 +535,14 @@ STAGE PLANS:
             Statistics: Num rows: 7 Data size: 400 Basic stats: COMPLETE Column stats: COMPLETE
             Filter Operator
               predicate: (locid > 0) (type: boolean)
-              Statistics: Num rows: 2 Data size: 176 Basic stats: COMPLETE Column stats: COMPLETE
+              Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
               Select Operator
-                expressions: locid (type: int), year (type: string)
+                expressions: locid (type: int), '2001' (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 2 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 2 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -571,14 +571,14 @@ STAGE PLANS:
             Statistics: Num rows: 7 Data size: 400 Basic stats: COMPLETE Column stats: COMPLETE
             Filter Operator
               predicate: (locid > 0) (type: boolean)
-              Statistics: Num rows: 2 Data size: 176 Basic stats: COMPLETE Column stats: COMPLETE
+              Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
               Select Operator
-                expressions: locid (type: int), year (type: string)
+                expressions: locid (type: int), '2001' (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 2 Data size: 176 Basic stats: COMPLETE Column stats: COMPLETE
+                Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 2 Data size: 176 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/authorization_role_grant2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/authorization_role_grant2.q.out?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/authorization_role_grant2.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/authorization_role_grant2.q.out Thu Oct 30 16:22:33 2014
@@ -41,6 +41,12 @@ PREHOOK: query: set role src_role_WadMin
 PREHOOK: type: SHOW_ROLES
 POSTHOOK: query: set role src_role_WadMin
 POSTHOOK: type: SHOW_ROLES
+PREHOOK: query: show principals src_role_wadmin
+PREHOOK: type: SHOW_ROLE_PRINCIPALS
+POSTHOOK: query: show principals src_role_wadmin
+POSTHOOK: type: SHOW_ROLE_PRINCIPALS
+principal_name	principal_type	grant_option	grantor	grantor_type	grant_time
+user2	USER	true	hive_admin_user	USER	-1
 PREHOOK: query: -- grant role to another user
 grant src_Role_wadmin to user user3
 PREHOOK: type: GRANT_ROLE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/auto_join11.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/auto_join11.q.out?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/auto_join11.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/auto_join11.q.out Thu Oct 30 16:22:33 2014
@@ -32,12 +32,12 @@ STAGE PLANS:
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: ((key < 100) and key is not null) (type: boolean)
-              Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+              predicate: (key < 100) (type: boolean)
+              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: key (type: string)
                 outputColumnNames: _col0
-                Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                 HashTable Sink Operator
                   condition expressions:
                     0 
@@ -53,12 +53,12 @@ STAGE PLANS:
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: ((key < 100) and key is not null) (type: boolean)
-              Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+              predicate: (key < 100) (type: boolean)
+              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                 Map Join Operator
                   condition map:
                        Inner Join 0 to 1
@@ -69,11 +69,11 @@ STAGE PLANS:
                     0 _col0 (type: string)
                     1 _col0 (type: string)
                   outputColumnNames: _col0, _col3
-                  Statistics: Num rows: 91 Data size: 969 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: _col0 (type: string), _col3 (type: string)
                     outputColumnNames: _col0, _col3
-                    Statistics: Num rows: 91 Data size: 969 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: sum(hash(_col0,_col3))
                       mode: hash

Modified: hive/branches/spark/ql/src/test/results/clientpositive/auto_join12.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/auto_join12.q.out?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/auto_join12.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/auto_join12.q.out Thu Oct 30 16:22:33 2014
@@ -41,12 +41,12 @@ STAGE PLANS:
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: (((key < 80) and key is not null) and (key < 100)) (type: boolean)
-              Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
+              predicate: ((key < 80) and (key < 100)) (type: boolean)
+              Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: key (type: string)
                 outputColumnNames: _col0
-                Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
                 HashTable Sink Operator
                   condition expressions:
                     0 
@@ -61,12 +61,12 @@ STAGE PLANS:
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: (((key < 100) and key is not null) and (key < 80)) (type: boolean)
-              Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
+              predicate: (((key < 100) and (key < 80)) and key is not null) (type: boolean)
+              Statistics: Num rows: 28 Data size: 297 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 28 Data size: 297 Basic stats: COMPLETE Column stats: NONE
                 HashTable Sink Operator
                   condition expressions:
                     0 
@@ -84,12 +84,12 @@ STAGE PLANS:
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: (((key < 100) and key is not null) and (key < 80)) (type: boolean)
-              Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
+              predicate: ((key < 100) and (key < 80)) (type: boolean)
+              Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: key (type: string)
                 outputColumnNames: _col0
-                Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
                 Map Join Operator
                   condition map:
                        Inner Join 0 to 1
@@ -103,11 +103,11 @@ STAGE PLANS:
                     1 _col0 (type: string)
                     2 _col0 (type: string)
                   outputColumnNames: _col0, _col3
-                  Statistics: Num rows: 59 Data size: 629 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 121 Data size: 1284 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: _col0 (type: string), _col3 (type: string)
                     outputColumnNames: _col0, _col3
-                    Statistics: Num rows: 59 Data size: 629 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 121 Data size: 1284 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: sum(hash(_col0,_col3))
                       mode: hash

Modified: hive/branches/spark/ql/src/test/results/clientpositive/auto_join13.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/auto_join13.q.out?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/auto_join13.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/auto_join13.q.out Thu Oct 30 16:22:33 2014
@@ -41,12 +41,12 @@ STAGE PLANS:
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: ((key < 100) and key is not null) (type: boolean)
-              Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+              predicate: (key < 100) (type: boolean)
+              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: key (type: string)
                 outputColumnNames: _col0
-                Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                 HashTable Sink Operator
                   condition expressions:
                     0 
@@ -80,12 +80,12 @@ STAGE PLANS:
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: ((key < 100) and key is not null) (type: boolean)
-              Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+              predicate: (key < 100) (type: boolean)
+              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                 Map Join Operator
                   condition map:
                        Inner Join 0 to 1
@@ -96,10 +96,10 @@ STAGE PLANS:
                     0 _col0 (type: string)
                     1 _col0 (type: string)
                   outputColumnNames: _col0, _col2, _col3
-                  Statistics: Num rows: 91 Data size: 969 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (_col0 + _col2) is not null (type: boolean)
-                    Statistics: Num rows: 46 Data size: 489 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 91 Data size: 969 Basic stats: COMPLETE Column stats: NONE
                     Map Join Operator
                       condition map:
                            Inner Join 0 to 1
@@ -110,11 +110,11 @@ STAGE PLANS:
                         0 (_col0 + _col2) (type: double)
                         1 UDFToDouble(_col0) (type: double)
                       outputColumnNames: _col0, _col3
-                      Statistics: Num rows: 91 Data size: 969 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 100 Data size: 1065 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
                         expressions: _col0 (type: string), _col3 (type: string)
                         outputColumnNames: _col0, _col3
-                        Statistics: Num rows: 91 Data size: 969 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 100 Data size: 1065 Basic stats: COMPLETE Column stats: NONE
                         Group By Operator
                           aggregations: sum(hash(_col0,_col3))
                           mode: hash

Modified: hive/branches/spark/ql/src/test/results/clientpositive/auto_join14.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/auto_join14.q.out?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/auto_join14.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/auto_join14.q.out Thu Oct 30 16:22:33 2014
@@ -37,8 +37,8 @@ STAGE PLANS:
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: ((key > 100) and key is not null) (type: boolean)
-              Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+              predicate: (key > 100) (type: boolean)
+              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
               HashTable Sink Operator
                 condition expressions:
                   0 



Mime
View raw message