hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gunt...@apache.org
Subject svn commit: r1527883 [5/6] - in /hive/branches/tez: ./ ant/src/org/apache/hadoop/hive/ant/ beeline/src/java/org/apache/hive/beeline/ bin/ common/src/java/org/apache/hadoop/hive/conf/ conf/ contrib/src/java/org/apache/hadoop/hive/contrib/fileformat/base...
Date Tue, 01 Oct 2013 04:48:48 GMT
Modified: hive/branches/tez/ql/src/test/queries/clientpositive/alter_partition_coltype.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/alter_partition_coltype.q?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/alter_partition_coltype.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/alter_partition_coltype.q Tue Oct  1 04:48:44 2013
@@ -24,6 +24,8 @@ select count(*) from alter_coltype where
 -- alter partition key column data type for ts column.
 alter table alter_coltype partition column (ts double);
 
+alter table alter_coltype partition column (dt string);
+
 -- load a new partition using new data type.
 insert overwrite table alter_coltype partition(dt='100x', ts=3.0) select * from src1;
 

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/input16.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/input16.q?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/input16.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/input16.q Tue Oct  1 04:48:44 2013
@@ -1,6 +1,6 @@
 -- TestSerDe is a user defined serde where the default delimiter is Ctrl-B
 DROP TABLE INPUT16;
-ADD JAR ../data/files/TestSerDe.jar;
+ADD JAR ../build/ql/test/TestSerDe.jar;
 CREATE TABLE INPUT16(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/kv1_cb.txt' INTO TABLE INPUT16;
 SELECT INPUT16.VALUE, INPUT16.KEY FROM INPUT16;

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/input16_cc.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/input16_cc.q?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/input16_cc.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/input16_cc.q Tue Oct  1 04:48:44 2013
@@ -4,7 +4,7 @@ set hive.input.format=org.apache.hadoop.
 -- the user is overwriting it with ctrlC
 
 DROP TABLE INPUT16_CC;
-ADD JAR ../data/files/TestSerDe.jar;
+ADD JAR ../build/ql/test/TestSerDe.jar;
 CREATE TABLE INPUT16_CC(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe'  with serdeproperties ('testserde.default.serialization.format'='\003', 'dummy.prop.not.used'='dummyy.val') STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/kv1_cc.txt' INTO TABLE INPUT16_CC;
 SELECT INPUT16_CC.VALUE, INPUT16_CC.KEY FROM INPUT16_CC;

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/reduce_deduplicate_extended.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/reduce_deduplicate_extended.q?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/reduce_deduplicate_extended.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/reduce_deduplicate_extended.q Tue Oct  1 04:48:44 2013
@@ -18,6 +18,7 @@ explain select src.key, sum(src.key) FRO
 explain select src.key, src.value FROM src JOIN src1 ON src.key = src1.key order by src.key, src.value;
 -- mGBY-RS-rGBY-mGBY-RS-rGBY
 explain from (select key, value from src group by key, value) s select s.key group by s.key;
+explain select key, count(distinct value) from (select key, value from src group by key, value) t group by key;
 
 select key, sum(key) from (select * from src distribute by key sort by key, value) Q1 group by key;
 select key, sum(key), lower(value) from (select * from src order by key) Q1 group by key, lower(value);
@@ -26,6 +27,7 @@ select key, sum(key) as value from src g
 select src.key, sum(src.key) FROM src JOIN src1 ON src.key = src1.key group by src.key, src.value;
 select src.key, src.value FROM src JOIN src1 ON src.key = src1.key order by src.key, src.value;
 from (select key, value from src group by key, value) s select s.key group by s.key;
+select key, count(distinct value) from (select key, value from src group by key, value) t group by key;
 
 set hive.map.aggr=false;
 
@@ -41,6 +43,7 @@ explain select src.key, sum(src.key) FRO
 explain select src.key, src.value FROM src JOIN src1 ON src.key = src1.key order by src.key, src.value;
 -- RS-GBY-RS-GBY
 explain from (select key, value from src group by key, value) s select s.key group by s.key;
+explain select key, count(distinct value) from (select key, value from src group by key, value) t group by key;
 
 select key, sum(key) from (select * from src distribute by key sort by key, value) Q1 group by key;
 select key, sum(key), lower(value) from (select * from src order by key) Q1 group by key, lower(value);
@@ -49,3 +52,4 @@ select key, sum(key) as value from src g
 select src.key, sum(src.key) FROM src JOIN src1 ON src.key = src1.key group by src.key, src.value;
 select src.key, src.value FROM src JOIN src1 ON src.key = src1.key order by src.key, src.value;
 from (select key, value from src group by key, value) s select s.key group by s.key;
+select key, count(distinct value) from (select key, value from src group by key, value) t group by key;

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/union_null.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/union_null.q?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/union_null.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/union_null.q Tue Oct  1 04:48:44 2013
@@ -1,2 +1,5 @@
 -- HIVE-2901
 select x from (select value as x from src union all select NULL as x from src)a limit 10;
+
+-- HIVE-4837
+select * from (select null as N from src1 group by key UNION ALL select null as N from src1 group by key ) a;

Modified: hive/branches/tez/ql/src/test/results/beelinepositive/union_null.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/beelinepositive/union_null.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/beelinepositive/union_null.q.out (original)
+++ hive/branches/tez/ql/src/test/results/beelinepositive/union_null.q.out Tue Oct  1 04:48:44 2013
@@ -14,4 +14,41 @@ Saving all output to "!!{outputDirectory
 'val_165'
 ''
 10 rows selected 
+>>>  
+>>>  -- HIVE-4837
+>>>  select * from (select null as N from src1 group by key UNION ALL select null as N from src1 group by key ) a;
+'n'
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+''
+32 rows selected 
 >>>  !record

Modified: hive/branches/tez/ql/src/test/results/clientnegative/alter_table_add_partition.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/alter_table_add_partition.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/alter_table_add_partition.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/alter_table_add_partition.q.out Tue Oct  1 04:48:44 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table mp (a int) partitioned by (b int)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@mp
-FAILED: SemanticException [Error 10214]: Invalid partition spec specified table is partitioned but partition spec is not specified or does not fully match table partitioning: {b=1, c=1}
+FAILED: SemanticException [Error 10098]: Non-Partition column appears in the partition specification:  c

Modified: hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure5.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure5.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure5.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure5.q.out Tue Oct  1 04:48:44 2013
@@ -13,4 +13,4 @@ AS 
 SELECT * FROM src
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Output: default@xxx6
-FAILED: SemanticException [Error 10214]: Invalid partition spec specified value not found in table's partition spec: {v=val_86}
+FAILED: SemanticException [Error 10098]: Non-Partition column appears in the partition specification:  v

Modified: hive/branches/tez/ql/src/test/results/clientnegative/columnstats_tbllvl.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/columnstats_tbllvl.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/columnstats_tbllvl.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/columnstats_tbllvl.q.out Tue Oct  1 04:48:44 2013
@@ -33,4 +33,4 @@ PREHOOK: Output: default@uservisits_web_
 POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@uservisits_web_text_none
-FAILED: SemanticException [Error 10004]: Line 1:21 Invalid table alias or column reference 'destIP': (possible column names are: sourceip, desturl, visitdate, adrevenue, useragent, ccode, lcode, skeyword, avgtimeonsite)
+FAILED: SemanticException [Error 10002]: Invalid column reference 'destIP' (possible columns are [sourceip, desturl, visitdate, adrevenue, useragent, ccode, lcode, skeyword, avgtimeonsite])

Modified: hive/branches/tez/ql/src/test/results/clientnegative/columnstats_tbllvl_incorrect_column.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/columnstats_tbllvl_incorrect_column.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/columnstats_tbllvl_incorrect_column.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/columnstats_tbllvl_incorrect_column.q.out Tue Oct  1 04:48:44 2013
@@ -33,4 +33,4 @@ PREHOOK: Output: default@uservisits_web_
 POSTHOOK: query: LOAD DATA LOCAL INPATH "../data/files/UserVisits.dat" INTO TABLE UserVisits_web_text_none
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@uservisits_web_text_none
-FAILED: SemanticException [Error 10004]: Line 1:21 Invalid table alias or column reference 'destIP': (possible column names are: sourceip, desturl, visitdate, adrevenue, useragent, ccode, lcode, skeyword, avgtimeonsite)
+FAILED: SemanticException [Error 10002]: Invalid column reference 'destIP' (possible columns are [sourceip, desturl, visitdate, adrevenue, useragent, ccode, lcode, skeyword, avgtimeonsite])

Modified: hive/branches/tez/ql/src/test/results/clientpositive/add_part_exist.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/add_part_exist.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/add_part_exist.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/add_part_exist.q.out Tue Oct  1 04:48:44 2013
@@ -75,6 +75,7 @@ PREHOOK: query: SHOW TABLES
 PREHOOK: type: SHOWTABLES
 POSTHOOK: query: SHOW TABLES
 POSTHOOK: type: SHOWTABLES
+alltypesorc
 src
 src1
 src_json

Modified: hive/branches/tez/ql/src/test/results/clientpositive/alter1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/alter1.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/alter1.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/alter1.q.out Tue Oct  1 04:48:44 2013
@@ -168,6 +168,7 @@ PREHOOK: query: SHOW TABLES
 PREHOOK: type: SHOWTABLES
 POSTHOOK: query: SHOW TABLES
 POSTHOOK: type: SHOWTABLES
+alltypesorc
 src
 src1
 src_json

Modified: hive/branches/tez/ql/src/test/results/clientpositive/alter2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/alter2.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/alter2.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/alter2.q.out Tue Oct  1 04:48:44 2013
@@ -172,6 +172,7 @@ PREHOOK: query: SHOW TABLES
 PREHOOK: type: SHOWTABLES
 POSTHOOK: query: SHOW TABLES
 POSTHOOK: type: SHOWTABLES
+alltypesorc
 src
 src1
 src_json

Modified: hive/branches/tez/ql/src/test/results/clientpositive/alter3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/alter3.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/alter3.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/alter3.q.out Tue Oct  1 04:48:44 2013
@@ -184,6 +184,7 @@ POSTHOOK: query: SHOW TABLES
 POSTHOOK: type: SHOWTABLES
 POSTHOOK: Lineage: alter3 PARTITION(pcol1=test_part:,pcol2=test_part:).col1 SIMPLE [(alter3_src)alter3_src.FieldSchema(name:col1, type:string, comment:null), ]
 POSTHOOK: Lineage: alter3_like PARTITION(pcol1=test_part:,pcol2=test_part:).col1 SIMPLE [(alter3_src)alter3_src.FieldSchema(name:col1, type:string, comment:null), ]
+alltypesorc
 src
 src1
 src_json

Modified: hive/branches/tez/ql/src/test/results/clientpositive/alter4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/alter4.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/alter4.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/alter4.q.out Tue Oct  1 04:48:44 2013
@@ -41,6 +41,7 @@ PREHOOK: query: SHOW TABLES
 PREHOOK: type: SHOWTABLES
 POSTHOOK: query: SHOW TABLES
 POSTHOOK: type: SHOWTABLES
+alltypesorc
 src
 src1
 src_json

Modified: hive/branches/tez/ql/src/test/results/clientpositive/alter5.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/alter5.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/alter5.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/alter5.q.out Tue Oct  1 04:48:44 2013
@@ -115,6 +115,7 @@ PREHOOK: type: SHOWTABLES
 POSTHOOK: query: SHOW TABLES
 POSTHOOK: type: SHOWTABLES
 POSTHOOK: Lineage: alter5 PARTITION(dt=a).col1 SIMPLE [(alter5_src)alter5_src.FieldSchema(name:col1, type:string, comment:null), ]
+alltypesorc
 src
 src1
 src_json

Modified: hive/branches/tez/ql/src/test/results/clientpositive/alter_index.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/alter_index.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/alter_index.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/alter_index.q.out Tue Oct  1 04:48:44 2013
@@ -37,6 +37,7 @@ PREHOOK: query: show tables
 PREHOOK: type: SHOWTABLES
 POSTHOOK: query: show tables
 POSTHOOK: type: SHOWTABLES
+alltypesorc
 src
 src1
 src_json

Modified: hive/branches/tez/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/alter_partition_coltype.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/alter_partition_coltype.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/alter_partition_coltype.q.out Tue Oct  1 04:48:44 2013
@@ -242,6 +242,17 @@ POSTHOOK: Lineage: alter_coltype PARTITI
 POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: alter table alter_coltype partition column (dt string)
+PREHOOK: type: null
+PREHOOK: Input: default@alter_coltype
+POSTHOOK: query: alter table alter_coltype partition column (dt string)
+POSTHOOK: type: null
+POSTHOOK: Input: default@alter_coltype
+POSTHOOK: Output: default@alter_coltype
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: -- load a new partition using new data type.
 insert overwrite table alter_coltype partition(dt='100x', ts=3.0) select * from src1
 PREHOOK: type: QUERY
@@ -1239,13 +1250,13 @@ POSTHOOK: Lineage: alter_coltype PARTITI
 POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
 key                 	string              	None                
 value               	string              	None                
-dt                  	int                 	None                
+dt                  	string              	None                
 ts                  	double              	None                
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
-dt                  	int                 	None                
+dt                  	string              	None                
 ts                  	double              	None                
 PREHOOK: query: desc alter_coltype partition (dt='100x', ts='6:30pm')
 PREHOOK: type: DESCTABLE
@@ -1259,13 +1270,13 @@ POSTHOOK: Lineage: alter_coltype PARTITI
 POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
 key                 	string              	None                
 value               	string              	None                
-dt                  	int                 	None                
+dt                  	string              	None                
 ts                  	double              	None                
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
-dt                  	int                 	None                
+dt                  	string              	None                
 ts                  	double              	None                
 PREHOOK: query: desc alter_coltype partition (dt='100x', ts=3.0)
 PREHOOK: type: DESCTABLE
@@ -1279,13 +1290,13 @@ POSTHOOK: Lineage: alter_coltype PARTITI
 POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
 key                 	string              	None                
 value               	string              	None                
-dt                  	int                 	None                
+dt                  	string              	None                
 ts                  	double              	None                
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
-dt                  	int                 	None                
+dt                  	string              	None                
 ts                  	double              	None                
 PREHOOK: query: desc alter_coltype partition (dt=10, ts=3.0)
 PREHOOK: type: DESCTABLE
@@ -1299,13 +1310,13 @@ POSTHOOK: Lineage: alter_coltype PARTITI
 POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
 key                 	string              	None                
 value               	string              	None                
-dt                  	int                 	None                
+dt                  	string              	None                
 ts                  	double              	None                
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
-dt                  	int                 	None                
+dt                  	string              	None                
 ts                  	double              	None                
 PREHOOK: query: drop table alter_coltype
 PREHOOK: type: DROPTABLE

Modified: hive/branches/tez/ql/src/test/results/clientpositive/alter_rename_partition.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/alter_rename_partition.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/alter_rename_partition.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/alter_rename_partition.q.out Tue Oct  1 04:48:44 2013
@@ -12,6 +12,7 @@ PREHOOK: query: SHOW TABLES
 PREHOOK: type: SHOWTABLES
 POSTHOOK: query: SHOW TABLES
 POSTHOOK: type: SHOWTABLES
+alltypesorc
 src
 src1
 src_json
@@ -130,6 +131,7 @@ PREHOOK: type: SHOWTABLES
 POSTHOOK: query: SHOW TABLES
 POSTHOOK: type: SHOWTABLES
 POSTHOOK: Lineage: alter_rename_partition PARTITION(pcol1=old_part1:,pcol2=old_part2:).col1 SIMPLE [(alter_rename_partition_src)alter_rename_partition_src.FieldSchema(name:col1, type:string, comment:null), ]
+alltypesorc
 src
 src1
 src_json

Modified: hive/branches/tez/ql/src/test/results/clientpositive/describe_table_json.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/describe_table_json.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/describe_table_json.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/describe_table_json.q.out Tue Oct  1 04:48:44 2013
@@ -7,7 +7,7 @@ PREHOOK: query: SHOW TABLES
 PREHOOK: type: SHOWTABLES
 POSTHOOK: query: SHOW TABLES
 POSTHOOK: type: SHOWTABLES
-{"tables":["jsontable","src","src1","src_json","src_sequencefile","src_thrift","srcbucket","srcbucket2","srcpart"]}
+{"tables":["alltypesorc","jsontable","src","src1","src_json","src_sequencefile","src_thrift","srcbucket","srcbucket2","srcpart"]}
 PREHOOK: query: SHOW TABLES LIKE 'json*'
 PREHOOK: type: SHOWTABLES
 POSTHOOK: query: SHOW TABLES LIKE 'json*'

Modified: hive/branches/tez/ql/src/test/results/clientpositive/index_creation.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/index_creation.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/index_creation.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/index_creation.q.out Tue Oct  1 04:48:44 2013
@@ -306,6 +306,7 @@ POSTHOOK: Lineage: default___t_x2__._off
 POSTHOOK: Lineage: default___t_x__._bucketname SIMPLE [(_t)_t.FieldSchema(name:INPUT__FILE__NAME, type:string, comment:), ]
 POSTHOOK: Lineage: default___t_x__._j SIMPLE [(_t)_t.FieldSchema(name:_j, type:int, comment:null), ]
 POSTHOOK: Lineage: default___t_x__._offsets EXPRESSION [(_t)_t.FieldSchema(name:BLOCK__OFFSET__INSIDE__FILE, type:bigint, comment:), ]
+alltypesorc
 src
 src1
 src_json

Modified: hive/branches/tez/ql/src/test/results/clientpositive/input2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/input2.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/input2.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/input2.q.out Tue Oct  1 04:48:44 2013
@@ -31,6 +31,7 @@ PREHOOK: query: SHOW TABLES
 PREHOOK: type: SHOWTABLES
 POSTHOOK: query: SHOW TABLES
 POSTHOOK: type: SHOWTABLES
+alltypesorc
 src
 src1
 src_json
@@ -53,6 +54,7 @@ PREHOOK: query: SHOW TABLES
 PREHOOK: type: SHOWTABLES
 POSTHOOK: query: SHOW TABLES
 POSTHOOK: type: SHOWTABLES
+alltypesorc
 src
 src1
 src_json

Modified: hive/branches/tez/ql/src/test/results/clientpositive/input3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/input3.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/input3.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/input3.q.out Tue Oct  1 04:48:44 2013
@@ -25,6 +25,7 @@ PREHOOK: query: SHOW TABLES
 PREHOOK: type: SHOWTABLES
 POSTHOOK: query: SHOW TABLES
 POSTHOOK: type: SHOWTABLES
+alltypesorc
 src
 src1
 src_json
@@ -114,6 +115,7 @@ PREHOOK: query: SHOW TABLES
 PREHOOK: type: SHOWTABLES
 POSTHOOK: query: SHOW TABLES
 POSTHOOK: type: SHOWTABLES
+alltypesorc
 src
 src1
 src_json

Modified: hive/branches/tez/ql/src/test/results/clientpositive/input4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/input4.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/input4.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/input4.q.out Tue Oct  1 04:48:44 2013
@@ -48,7 +48,7 @@ PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN FORMATTED
 SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
 POSTHOOK: type: QUERY
-{"STAGE PLANS":{"Stage-1":{"Map Reduce":{"Map:":{"Split Sample:":{},"Alias -> Map Operator Tree:":{"input4alias":{"TS_0":{"SEL_1":{"FS_2":{"File Output Operator":{"GlobalTableId:":"0","compressed:":"false","table:":{"serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe","input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"}}}}}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1"}}},"STAGE DEPENDENCIES":{"Stage-1":{"ROOT STAGE":"TRUE"},"Stage-0":{"ROOT STAGE":"TRUE"}},"ABSTRACT SYNTAX TREE":"(TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME INPUT4) Input4Alias)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL Input4Alias) VALUE)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL Input4Alias) KEY)))))"}
+{"STAGE PLANS":{"Stage-1":{"Map Reduce":{"Vectorized execution:":"false","Map:":{"Vectorized execution:":"false","Split Sample:":{},"Alias -> Map Operator Tree:":{"input4alias":{"TS_0":{"SEL_1":{"FS_2":{"File Output Operator":{"Vectorized execution:":"false","GlobalTableId:":"0","compressed:":"false","table:":{"serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe","input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"}}}}}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1"}}},"STAGE DEPENDENCIES":{"Stage-1":{"ROOT STAGE":"TRUE"},"Stage-0":{"ROOT STAGE":"TRUE"}},"ABSTRACT SYNTAX TREE":"(TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME INPUT4) Input4Alias)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL Input4Alias) VALUE)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL Input4Alias) KEY)))))"}
 PREHOOK: query: SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
 PREHOOK: type: QUERY
 PREHOOK: Input: default@input4

Modified: hive/branches/tez/ql/src/test/results/clientpositive/plan_json.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/plan_json.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/plan_json.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/plan_json.q.out Tue Oct  1 04:48:44 2013
@@ -6,4 +6,4 @@ POSTHOOK: query: -- explain plan json:  
 
 EXPLAIN FORMATTED SELECT count(1) FROM src
 POSTHOOK: type: QUERY
-{"STAGE PLANS":{"Stage-1":{"Map Reduce":{"Map:":{"Split Sample:":{},"Alias -> Map Operator Tree:":{"src":{"TS_0":{"SEL_1":{"GBY_2":{"RS_3":{"Reduce Output Operator":{"Map-reduce partition columns:":[],"sort order:":"","tag:":"-1","value expressions:":[{"type:":"bigint","expr:":"_col0"}],"key expressions:":[]}}}}}}}},"Reduce:":{"Reduce Operator Tree:":{"GBY_4":{"SEL_5":{"FS_6":{"File Output Operator":{"GlobalTableId:":"0","compressed:":"false","table:":{"serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe","input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"}}}}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1"}}},"STAGE DEPENDENCIES":{"Stage-1":{"ROOT STAGE":"TRUE"},"Stage-0":{"ROOT STAGE":"TRUE"}},"ABSTRACT SYNTAX TREE":"(TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)))))"}
+{"STAGE PLANS":{"Stage-1":{"Map Reduce":{"Vectorized execution:":"false","Map:":{"Vectorized execution:":"false","Split Sample:":{},"Alias -> Map Operator Tree:":{"src":{"TS_0":{"SEL_1":{"GBY_2":{"RS_3":{"Reduce Output Operator":{"Vectorized execution:":"false","Map-reduce partition columns:":[],"sort order:":"","tag:":"-1","value expressions:":[{"type:":"bigint","expr:":"_col0"}],"key expressions:":[]}}}}}}}},"Reduce:":{"Vectorized execution:":"false","Reduce Operator Tree:":{"GBY_4":{"SEL_5":{"FS_6":{"File Output Operator":{"Vectorized execution:":"false","GlobalTableId:":"0","compressed:":"false","table:":{"serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe","input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"}}}}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1"}}},"STAGE DEPENDENCIES":{"Stage-1":{"ROOT STAGE":"TRUE"},"Stage-0":{"ROOT STAGE":"TRUE"}},"ABSTRACT SYNTAX TREE":"(TOK_QUERY (TOK
 _FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)))))"}

Modified: hive/branches/tez/ql/src/test/results/clientpositive/reduce_deduplicate_extended.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/reduce_deduplicate_extended.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/reduce_deduplicate_extended.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/reduce_deduplicate_extended.q.out Tue Oct  1 04:48:44 2013
@@ -659,6 +659,97 @@ STAGE PLANS:
       limit: -1
 
 
+PREHOOK: query: explain select key, count(distinct value) from (select key, value from src group by key, value) t group by key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select key, count(distinct value) from (select key, value from src group by key, value) t group by key
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_GROUPBY (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)))) t)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL value)))) (TOK_GROUPBY (TOK_TABLE_OR_COL key))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        t:src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+              outputColumnNames: key, value
+              Group By Operator
+                bucketGroup: false
+                keys:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col1
+                        type: string
+                  sort order: ++
+                  Map-reduce partition columns:
+                        expr: _col0
+                        type: string
+                  tag: -1
+      Reduce Operator Tree:
+        Group By Operator
+          bucketGroup: false
+          keys:
+                expr: KEY._col0
+                type: string
+                expr: KEY._col1
+                type: string
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: _col1
+                  type: string
+            outputColumnNames: _col0, _col1
+            Group By Operator
+              aggregations:
+                    expr: count(DISTINCT _col1)
+              bucketGroup: false
+              keys:
+                    expr: _col0
+                    type: string
+              mode: complete
+              outputColumnNames: _col0, _col1
+              Select Operator
+                expressions:
+                      expr: _col0
+                      type: string
+                      expr: _col1
+                      type: bigint
+                outputColumnNames: _col0, _col1
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
 PREHOOK: query: select key, sum(key) from (select * from src distribute by key sort by key, value) Q1 group by key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -2316,6 +2407,323 @@ POSTHOOK: Input: default@src
 96
 97
 98
+PREHOOK: query: select key, count(distinct value) from (select key, value from src group by key, value) t group by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select key, count(distinct value) from (select key, value from src group by key, value) t group by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	1
+10	1
+100	1
+103	1
+104	1
+105	1
+11	1
+111	1
+113	1
+114	1
+116	1
+118	1
+119	1
+12	1
+120	1
+125	1
+126	1
+128	1
+129	1
+131	1
+133	1
+134	1
+136	1
+137	1
+138	1
+143	1
+145	1
+146	1
+149	1
+15	1
+150	1
+152	1
+153	1
+155	1
+156	1
+157	1
+158	1
+160	1
+162	1
+163	1
+164	1
+165	1
+166	1
+167	1
+168	1
+169	1
+17	1
+170	1
+172	1
+174	1
+175	1
+176	1
+177	1
+178	1
+179	1
+18	1
+180	1
+181	1
+183	1
+186	1
+187	1
+189	1
+19	1
+190	1
+191	1
+192	1
+193	1
+194	1
+195	1
+196	1
+197	1
+199	1
+2	1
+20	1
+200	1
+201	1
+202	1
+203	1
+205	1
+207	1
+208	1
+209	1
+213	1
+214	1
+216	1
+217	1
+218	1
+219	1
+221	1
+222	1
+223	1
+224	1
+226	1
+228	1
+229	1
+230	1
+233	1
+235	1
+237	1
+238	1
+239	1
+24	1
+241	1
+242	1
+244	1
+247	1
+248	1
+249	1
+252	1
+255	1
+256	1
+257	1
+258	1
+26	1
+260	1
+262	1
+263	1
+265	1
+266	1
+27	1
+272	1
+273	1
+274	1
+275	1
+277	1
+278	1
+28	1
+280	1
+281	1
+282	1
+283	1
+284	1
+285	1
+286	1
+287	1
+288	1
+289	1
+291	1
+292	1
+296	1
+298	1
+30	1
+302	1
+305	1
+306	1
+307	1
+308	1
+309	1
+310	1
+311	1
+315	1
+316	1
+317	1
+318	1
+321	1
+322	1
+323	1
+325	1
+327	1
+33	1
+331	1
+332	1
+333	1
+335	1
+336	1
+338	1
+339	1
+34	1
+341	1
+342	1
+344	1
+345	1
+348	1
+35	1
+351	1
+353	1
+356	1
+360	1
+362	1
+364	1
+365	1
+366	1
+367	1
+368	1
+369	1
+37	1
+373	1
+374	1
+375	1
+377	1
+378	1
+379	1
+382	1
+384	1
+386	1
+389	1
+392	1
+393	1
+394	1
+395	1
+396	1
+397	1
+399	1
+4	1
+400	1
+401	1
+402	1
+403	1
+404	1
+406	1
+407	1
+409	1
+41	1
+411	1
+413	1
+414	1
+417	1
+418	1
+419	1
+42	1
+421	1
+424	1
+427	1
+429	1
+43	1
+430	1
+431	1
+432	1
+435	1
+436	1
+437	1
+438	1
+439	1
+44	1
+443	1
+444	1
+446	1
+448	1
+449	1
+452	1
+453	1
+454	1
+455	1
+457	1
+458	1
+459	1
+460	1
+462	1
+463	1
+466	1
+467	1
+468	1
+469	1
+47	1
+470	1
+472	1
+475	1
+477	1
+478	1
+479	1
+480	1
+481	1
+482	1
+483	1
+484	1
+485	1
+487	1
+489	1
+490	1
+491	1
+492	1
+493	1
+494	1
+495	1
+496	1
+497	1
+498	1
+5	1
+51	1
+53	1
+54	1
+57	1
+58	1
+64	1
+65	1
+66	1
+67	1
+69	1
+70	1
+72	1
+74	1
+76	1
+77	1
+78	1
+8	1
+80	1
+82	1
+83	1
+84	1
+85	1
+86	1
+87	1
+9	1
+90	1
+92	1
+95	1
+96	1
+97	1
+98	1
 PREHOOK: query: -- RS-RS-GBY
 explain select key, sum(key) from (select * from src distribute by key sort by key, value) Q1 group by key
 PREHOOK: type: QUERY
@@ -2934,6 +3342,88 @@ STAGE PLANS:
       limit: -1
 
 
+PREHOOK: query: explain select key, count(distinct value) from (select key, value from src group by key, value) t group by key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select key, count(distinct value) from (select key, value from src group by key, value) t group by key
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_GROUPBY (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)))) t)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL value)))) (TOK_GROUPBY (TOK_TABLE_OR_COL key))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        t:src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+              outputColumnNames: key, value
+              Reduce Output Operator
+                key expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                sort order: ++
+                Map-reduce partition columns:
+                      expr: key
+                      type: string
+                tag: -1
+      Reduce Operator Tree:
+        Group By Operator
+          bucketGroup: false
+          keys:
+                expr: KEY._col0
+                type: string
+                expr: KEY._col1
+                type: string
+          mode: complete
+          outputColumnNames: _col0, _col1
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: _col1
+                  type: string
+            outputColumnNames: _col0, _col1
+            Group By Operator
+              aggregations:
+                    expr: count(DISTINCT _col1)
+              bucketGroup: false
+              keys:
+                    expr: _col0
+                    type: string
+              mode: complete
+              outputColumnNames: _col0, _col1
+              Select Operator
+                expressions:
+                      expr: _col0
+                      type: string
+                      expr: _col1
+                      type: bigint
+                outputColumnNames: _col0, _col1
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
 PREHOOK: query: select key, sum(key) from (select * from src distribute by key sort by key, value) Q1 group by key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -4591,3 +5081,320 @@ POSTHOOK: Input: default@src
 96
 97
 98
+PREHOOK: query: select key, count(distinct value) from (select key, value from src group by key, value) t group by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select key, count(distinct value) from (select key, value from src group by key, value) t group by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	1
+10	1
+100	1
+103	1
+104	1
+105	1
+11	1
+111	1
+113	1
+114	1
+116	1
+118	1
+119	1
+12	1
+120	1
+125	1
+126	1
+128	1
+129	1
+131	1
+133	1
+134	1
+136	1
+137	1
+138	1
+143	1
+145	1
+146	1
+149	1
+15	1
+150	1
+152	1
+153	1
+155	1
+156	1
+157	1
+158	1
+160	1
+162	1
+163	1
+164	1
+165	1
+166	1
+167	1
+168	1
+169	1
+17	1
+170	1
+172	1
+174	1
+175	1
+176	1
+177	1
+178	1
+179	1
+18	1
+180	1
+181	1
+183	1
+186	1
+187	1
+189	1
+19	1
+190	1
+191	1
+192	1
+193	1
+194	1
+195	1
+196	1
+197	1
+199	1
+2	1
+20	1
+200	1
+201	1
+202	1
+203	1
+205	1
+207	1
+208	1
+209	1
+213	1
+214	1
+216	1
+217	1
+218	1
+219	1
+221	1
+222	1
+223	1
+224	1
+226	1
+228	1
+229	1
+230	1
+233	1
+235	1
+237	1
+238	1
+239	1
+24	1
+241	1
+242	1
+244	1
+247	1
+248	1
+249	1
+252	1
+255	1
+256	1
+257	1
+258	1
+26	1
+260	1
+262	1
+263	1
+265	1
+266	1
+27	1
+272	1
+273	1
+274	1
+275	1
+277	1
+278	1
+28	1
+280	1
+281	1
+282	1
+283	1
+284	1
+285	1
+286	1
+287	1
+288	1
+289	1
+291	1
+292	1
+296	1
+298	1
+30	1
+302	1
+305	1
+306	1
+307	1
+308	1
+309	1
+310	1
+311	1
+315	1
+316	1
+317	1
+318	1
+321	1
+322	1
+323	1
+325	1
+327	1
+33	1
+331	1
+332	1
+333	1
+335	1
+336	1
+338	1
+339	1
+34	1
+341	1
+342	1
+344	1
+345	1
+348	1
+35	1
+351	1
+353	1
+356	1
+360	1
+362	1
+364	1
+365	1
+366	1
+367	1
+368	1
+369	1
+37	1
+373	1
+374	1
+375	1
+377	1
+378	1
+379	1
+382	1
+384	1
+386	1
+389	1
+392	1
+393	1
+394	1
+395	1
+396	1
+397	1
+399	1
+4	1
+400	1
+401	1
+402	1
+403	1
+404	1
+406	1
+407	1
+409	1
+41	1
+411	1
+413	1
+414	1
+417	1
+418	1
+419	1
+42	1
+421	1
+424	1
+427	1
+429	1
+43	1
+430	1
+431	1
+432	1
+435	1
+436	1
+437	1
+438	1
+439	1
+44	1
+443	1
+444	1
+446	1
+448	1
+449	1
+452	1
+453	1
+454	1
+455	1
+457	1
+458	1
+459	1
+460	1
+462	1
+463	1
+466	1
+467	1
+468	1
+469	1
+47	1
+470	1
+472	1
+475	1
+477	1
+478	1
+479	1
+480	1
+481	1
+482	1
+483	1
+484	1
+485	1
+487	1
+489	1
+490	1
+491	1
+492	1
+493	1
+494	1
+495	1
+496	1
+497	1
+498	1
+5	1
+51	1
+53	1
+54	1
+57	1
+58	1
+64	1
+65	1
+66	1
+67	1
+69	1
+70	1
+72	1
+74	1
+76	1
+77	1
+78	1
+8	1
+80	1
+82	1
+83	1
+84	1
+85	1
+86	1
+87	1
+9	1
+90	1
+92	1
+95	1
+96	1
+97	1
+98	1

Modified: hive/branches/tez/ql/src/test/results/clientpositive/rename_column.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/rename_column.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/rename_column.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/rename_column.q.out Tue Oct  1 04:48:44 2013
@@ -127,6 +127,7 @@ PREHOOK: query: SHOW TABLES
 PREHOOK: type: SHOWTABLES
 POSTHOOK: query: SHOW TABLES
 POSTHOOK: type: SHOWTABLES
+alltypesorc
 src
 src1
 src_json

Modified: hive/branches/tez/ql/src/test/results/clientpositive/show_tables.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/show_tables.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/show_tables.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/show_tables.q.out Tue Oct  1 04:48:44 2013
@@ -112,6 +112,7 @@ PREHOOK: query: SHOW TABLES FROM default
 PREHOOK: type: SHOWTABLES
 POSTHOOK: query: SHOW TABLES FROM default
 POSTHOOK: type: SHOWTABLES
+alltypesorc
 shtb_test1
 shtb_test2
 src
@@ -133,6 +134,7 @@ PREHOOK: query: SHOW TABLES IN default
 PREHOOK: type: SHOWTABLES
 POSTHOOK: query: SHOW TABLES IN default
 POSTHOOK: type: SHOWTABLES
+alltypesorc
 shtb_test1
 shtb_test2
 src

Modified: hive/branches/tez/ql/src/test/results/clientpositive/union_null.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/union_null.q.out?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/union_null.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/union_null.q.out Tue Oct  1 04:48:44 2013
@@ -18,3 +18,45 @@ val_27
 NULL
 val_165
 NULL
+PREHOOK: query: -- HIVE-4837
+select * from (select null as N from src1 group by key UNION ALL select null as N from src1 group by key ) a
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src1
+#### A masked pattern was here ####
+POSTHOOK: query: -- HIVE-4837
+select * from (select null as N from src1 group by key UNION ALL select null as N from src1 group by key ) a
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src1
+#### A masked pattern was here ####
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/SerDeStats.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/SerDeStats.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/SerDeStats.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/SerDeStats.java Tue Oct  1 04:48:44 2013
@@ -27,9 +27,11 @@ public class SerDeStats {
 
   // currently we support only raw data size stat
   private long rawDataSize;
+  private long rowCount;
 
   public SerDeStats() {
     rawDataSize = 0;
+    rowCount = 0;
   }
 
   /**
@@ -48,4 +50,20 @@ public class SerDeStats {
     rawDataSize = uSize;
   }
 
+  /**
+   * Return the row count
+   * @return row count
+   */
+  public long getRowCount() {
+    return rowCount;
+  }
+
+  /**
+   * Set the row count
+   * @param rowCount - count of rows
+   */
+  public void setRowCount(long rowCount) {
+    this.rowCount = rowCount;
+  }
+
 }

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java Tue Oct  1 04:48:44 2013
@@ -71,7 +71,7 @@ public class ColumnarSerDe extends Colum
   public ColumnarSerDe() throws SerDeException {
   }
 
-  SerDeParameters serdeParams = null;
+  protected SerDeParameters serdeParams = null;
 
   /**
    * Initialize the SerDe given the parameters.

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyInteger.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyInteger.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyInteger.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyInteger.java Tue Oct  1 04:48:44 2013
@@ -136,11 +136,18 @@ public class LazyInteger extends
    */
   private static int parse(byte[] bytes, int start, int length, int offset,
       int radix, boolean negative) {
+    byte separator = '.';
     int max = Integer.MIN_VALUE / radix;
     int result = 0, end = start + length;
     while (offset < end) {
       int digit = LazyUtils.digit(bytes[offset++], radix);
       if (digit == -1) {
+        if (bytes[offset-1] == separator) {
+          // We allow decimals and will return a truncated integer in that case.
+          // Therefore we won't throw an exception here (checking the fractional
+          // part happens below.)
+          break;
+        }
         throw new NumberFormatException(LazyUtils.convertToString(bytes, start,
             length));
       }
@@ -155,6 +162,18 @@ public class LazyInteger extends
       }
       result = next;
     }
+
+    // This is the case when we've encountered a decimal separator. The fractional
+    // part will not change the number, but we will verify that the fractional part
+    // is well formed.
+    while (offset < end) {
+      int digit = LazyUtils.digit(bytes[offset++], radix);
+      if (digit == -1) {
+        throw new NumberFormatException(LazyUtils.convertToString(bytes, start,
+            length));
+      }
+    }
+
     if (!negative) {
       result = -result;
       if (result < 0) {

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyLong.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyLong.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyLong.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyLong.java Tue Oct  1 04:48:44 2013
@@ -138,11 +138,18 @@ public class LazyLong extends
    */
   private static long parse(byte[] bytes, int start, int length, int offset,
       int radix, boolean negative) {
+    byte separator = '.';
     long max = Long.MIN_VALUE / radix;
     long result = 0, end = start + length;
     while (offset < end) {
       int digit = LazyUtils.digit(bytes[offset++], radix);
       if (digit == -1 || max > result) {
+        if (bytes[offset-1] == separator) {
+          // We allow decimals and will return a truncated integer in that case.
+          // Therefore we won't throw an exception here (checking the fractional
+          // part happens below.)
+          break;
+        }
         throw new NumberFormatException(LazyUtils.convertToString(bytes, start,
             length));
       }
@@ -153,6 +160,18 @@ public class LazyLong extends
       }
       result = next;
     }
+
+    // This is the case when we've encountered a decimal separator. The fractional
+    // part will not change the number, but we will verify that the fractional part
+    // is well formed.
+    while (offset < end) {
+      int digit = LazyUtils.digit(bytes[offset++], radix);
+      if (digit == -1) {
+        throw new NumberFormatException(LazyUtils.convertToString(bytes, start,
+            length));
+      }
+    }
+
     if (!negative) {
       result = -result;
       if (result < 0) {

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java Tue Oct  1 04:48:44 2013
@@ -34,12 +34,12 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
@@ -130,8 +130,8 @@ public final class LazyUtils {
     }
   }
 
-  private static byte[] trueBytes = {(byte) 't', 'r', 'u', 'e'};
-  private static byte[] falseBytes = {(byte) 'f', 'a', 'l', 's', 'e'};
+  public static byte[] trueBytes = {(byte) 't', 'r', 'u', 'e'};
+  public static byte[] falseBytes = {(byte) 'f', 'a', 'l', 's', 'e'};
 
   /**
    * Write the bytes with special characters escaped.
@@ -144,7 +144,7 @@ public final class LazyUtils {
    *          if escaped, whether a specific character needs escaping. This
    *          array should have size of 128.
    */
-  private static void writeEscaped(OutputStream out, byte[] bytes, int start,
+  public static void writeEscaped(OutputStream out, byte[] bytes, int start,
       int len, boolean escaped, byte escapeChar, boolean[] needsEscape)
       throws IOException {
     if (escaped) {

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveVarcharObjectInspector.java Tue Oct  1 04:48:44 2013
@@ -22,11 +22,9 @@ import org.apache.hadoop.hive.common.typ
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyHiveVarchar;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
-import org.apache.hadoop.hive.serde2.typeinfo.BaseTypeParams;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 
 public class LazyHiveVarcharObjectInspector
     extends AbstractPrimitiveLazyObjectInspector<HiveVarcharWritable>
@@ -62,14 +60,16 @@ public class LazyHiveVarcharObjectInspec
     }
 
     HiveVarchar ret = ((LazyHiveVarchar) o).getWritableObject().getHiveVarchar();
+    VarcharTypeParams typeParams = (VarcharTypeParams)getTypeParams();
     if (!ParameterizedPrimitiveTypeUtils.doesPrimitiveMatchTypeParams(
-        ret, (VarcharTypeParams) typeParams)) {
-      HiveVarchar newValue = new HiveVarchar(ret, ((VarcharTypeParams) typeParams).length);
+        ret, typeParams)) {
+      HiveVarchar newValue = new HiveVarchar(ret, typeParams.length);
       return newValue;
     }
     return ret;
   }
 
+  @Override
   public String toString() {
     return getTypeName();
   }

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveObjectInspector.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveObjectInspector.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/AbstractPrimitiveObjectInspector.java Tue Oct  1 04:48:44 2013
@@ -29,7 +29,6 @@ public abstract class AbstractPrimitiveO
     PrimitiveObjectInspector {
 
   protected PrimitiveTypeEntry typeEntry;
-  protected BaseTypeParams typeParams;
 
   protected AbstractPrimitiveObjectInspector() {
     super();
@@ -85,14 +84,15 @@ public abstract class AbstractPrimitiveO
   }
 
   public BaseTypeParams getTypeParams() {
-    return typeParams;
+    return typeEntry.typeParams;
   }
 
   public void setTypeParams(BaseTypeParams newParams) {
+    BaseTypeParams typeParams = typeEntry.typeParams;
     if (typeParams != null && !typeEntry.isParameterized()) {
       throw new UnsupportedOperationException(
           "Attempting to add type parameters " + typeParams + " to type " + getTypeName());
     }
-    this.typeParams = newParams;
+    typeEntry.typeParams = newParams;
   }
 }

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java Tue Oct  1 04:48:44 2013
@@ -18,11 +18,8 @@
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
-import org.apache.hadoop.hive.serde2.typeinfo.BaseTypeParams;
 import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 
@@ -42,13 +39,14 @@ public class JavaHiveVarcharObjectInspec
     }
   }
 
+  @Override
   public HiveVarchar getPrimitiveJavaObject(Object o) {
     if (o == null) {
       return null;
     }
     HiveVarchar value = (HiveVarchar)o;
     if (ParameterizedPrimitiveTypeUtils.doesPrimitiveMatchTypeParams(
-        value, (VarcharTypeParams) typeParams)) {
+        value, (VarcharTypeParams) getTypeParams())) {
       return value;
     }
     // value needs to be converted to match the type params (length, etc).
@@ -78,7 +76,7 @@ public class JavaHiveVarcharObjectInspec
   public Object set(Object o, HiveVarchar value) {
     HiveVarchar setValue = (HiveVarchar)o;
     if (ParameterizedPrimitiveTypeUtils.doesPrimitiveMatchTypeParams(
-        value, (VarcharTypeParams) typeParams)) {
+        value, (VarcharTypeParams) getTypeParams())) {
       setValue.setValue(value);
     } else {
       // Otherwise value may be too long, convert to appropriate value based on params
@@ -102,6 +100,7 @@ public class JavaHiveVarcharObjectInspec
   }
 
   public int getMaxLength() {
-    return typeParams != null ? ((VarcharTypeParams) typeParams).length : -1;
+    VarcharTypeParams typeParams = (VarcharTypeParams)getTypeParams();
+    return typeParams != null ? typeParams.length : -1;
   }
 }

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java Tue Oct  1 04:48:44 2013
@@ -21,11 +21,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
-import org.apache.hadoop.hive.serde2.typeinfo.BaseTypeParams;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 
 public class WritableHiveVarcharObjectInspector
     extends AbstractPrimitiveWritableObjectInspector
@@ -59,6 +57,7 @@ public class WritableHiveVarcharObjectIn
     return getPrimitiveWithParams(writable);
   }
 
+  @Override
   public HiveVarcharWritable getPrimitiveWritableObject(Object o) {
     // check input object's length, if it doesn't match
     // then output new writable with correct params.
@@ -87,12 +86,7 @@ public class WritableHiveVarcharObjectIn
 
   private boolean doesWritableMatchTypeParams(HiveVarcharWritable writable) {
     return ParameterizedPrimitiveTypeUtils.doesWritableMatchTypeParams(
-        writable, (VarcharTypeParams) typeParams);
-  }
-
-  private boolean doesPrimitiveMatchTypeParams(HiveVarchar value) {
-    return ParameterizedPrimitiveTypeUtils.doesPrimitiveMatchTypeParams(
-        value, (VarcharTypeParams) typeParams);
+        writable, (VarcharTypeParams)getTypeParams());
   }
 
   @Override
@@ -130,6 +124,8 @@ public class WritableHiveVarcharObjectIn
   }
 
   public int getMaxLength() {
-    return typeParams != null ? ((VarcharTypeParams) typeParams).length : -1;
+    VarcharTypeParams typeParams = (VarcharTypeParams)getTypeParams();
+    return typeParams != null ? typeParams.length : -1;
   }
+
 }

Modified: hive/branches/tez/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java (original)
+++ hive/branches/tez/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java Tue Oct  1 04:48:44 2013
@@ -62,12 +62,12 @@ public class TestLazySimpleSerDe extends
       // Data
       Text t = new Text("123\t456\t789\t1000\t5.3\thive and hadoop\t1.\tNULL\t");
       t.append(new byte[]{(byte)Integer.parseInt("10111111", 2)}, 0, 1);
-      StringBuffer sb = new StringBuffer("123\t456\t789\t1000\t5.3\thive and hadoop\tNULL\tNULL\t");
+      StringBuffer sb = new StringBuffer("123\t456\t789\t1000\t5.3\thive and hadoop\t1\tNULL\t");
       String s = sb.append(new String(Base64.encodeBase64(new byte[]{(byte)Integer.parseInt("10111111", 2)}))).toString();
       Object[] expectedFieldsData = {new ByteWritable((byte) 123),
           new ShortWritable((short) 456), new IntWritable(789),
           new LongWritable(1000), new DoubleWritable(5.3),
-          new Text("hive and hadoop"), null, null, new BytesWritable(new byte[]{(byte)Integer.parseInt("10111111", 2)})};
+          new Text("hive and hadoop"), new IntWritable(1), null, new BytesWritable(new byte[]{(byte)Integer.parseInt("10111111", 2)})};
 
       // Test
       deserializeAndSerialize(serDe, t, s, expectedFieldsData);
@@ -128,11 +128,11 @@ public class TestLazySimpleSerDe extends
 
       // Data
       Text t = new Text("123\t456\t789\t1000\t5.3\thive and hadoop\t1.\ta\tb\t");
-      String s = "123\t456\t789\t1000\t5.3\thive and hadoop\tNULL\ta\tb\t";
+      String s = "123\t456\t789\t1000\t5.3\thive and hadoop\t1\ta\tb\t";
       Object[] expectedFieldsData = {new ByteWritable((byte) 123),
           new ShortWritable((short) 456), new IntWritable(789),
           new LongWritable(1000), new DoubleWritable(5.3),
-          new Text("hive and hadoop"), null, new Text("a\tb\t")};
+          new Text("hive and hadoop"), new IntWritable(1), new Text("a\tb\t")};
 
       // Test
       deserializeAndSerialize(serDe, t, s, expectedFieldsData);
@@ -156,11 +156,11 @@ public class TestLazySimpleSerDe extends
 
       // Data
       Text t = new Text("123\t456\t789\t1000\t5.3\thive and hadoop\t1.\ta\tb\t");
-      String s = "123\t456\t789\t1000\t5.3\thive and hadoop\tNULL\ta";
+      String s = "123\t456\t789\t1000\t5.3\thive and hadoop\t1\ta";
       Object[] expectedFieldsData = {new ByteWritable((byte) 123),
           new ShortWritable((short) 456), new IntWritable(789),
           new LongWritable(1000), new DoubleWritable(5.3),
-          new Text("hive and hadoop"), null, new Text("a")};
+          new Text("hive and hadoop"), new IntWritable(1), new Text("a")};
 
       // Test
       deserializeAndSerialize(serDe, t, s, expectedFieldsData);

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java Tue Oct  1 04:48:44 2013
@@ -42,45 +42,37 @@ import org.apache.hive.service.cli.RowSe
 import org.apache.hive.service.cli.SessionHandle;
 import org.apache.hive.service.cli.TableSchema;
 import org.apache.thrift.TException;
-import org.apache.thrift.TProcessorFactory;
-import org.apache.thrift.protocol.TBinaryProtocol;
 import org.apache.thrift.server.TServer;
-import org.apache.thrift.server.TThreadPoolServer;
-import org.apache.thrift.transport.TServerSocket;
-import org.apache.thrift.transport.TTransportFactory;
-
 
 /**
- * CLIService.
+ * ThriftCLIService.
  *
  */
-public class ThriftCLIService extends AbstractService implements TCLIService.Iface, Runnable {
+public abstract class ThriftCLIService extends AbstractService implements TCLIService.Iface, Runnable {
 
   public static final Log LOG = LogFactory.getLog(ThriftCLIService.class.getName());
 
-
   protected CLIService cliService;
   private static final TStatus OK_STATUS = new TStatus(TStatusCode.SUCCESS_STATUS);
   private static final TStatus ERROR_STATUS = new TStatus(TStatusCode.ERROR_STATUS);
 
-  private static HiveAuthFactory hiveAuthFactory;
-
-  private int portNum;
-  private InetSocketAddress serverAddress;
-  private TServer server;
+  protected int portNum;
+  protected InetSocketAddress serverAddress;
+  protected TServer server;
+  protected org.mortbay.jetty.Server httpServer;
 
   private boolean isStarted = false;
   protected boolean isEmbedded = false;
 
-  private HiveConf hiveConf;
-
-  private int minWorkerThreads;
-  private int maxWorkerThreads;
+  protected HiveConf hiveConf;
 
+  protected int minWorkerThreads;
+  protected int maxWorkerThreads;
 
+  protected static HiveAuthFactory hiveAuthFactory;
 
-  public ThriftCLIService(CLIService cliService) {
-    super("ThriftCLIService");
+  public ThriftCLIService(CLIService cliService, String serviceName) {
+    super(serviceName);
     this.cliService = cliService;
   }
 
@@ -102,7 +94,18 @@ public class ThriftCLIService extends Ab
   @Override
   public synchronized void stop() {
     if (isStarted && !isEmbedded) {
-      server.stop();
+      if(server != null) {
+        server.stop();
+        LOG.info("Thrift server has stopped");
+      }
+      if((httpServer != null) && httpServer.isStarted()) {
+        try {
+          httpServer.stop();
+          LOG.info("Http server has stopped");
+        } catch (Exception e) {
+          LOG.error("Error stopping Http server: ", e);
+        }
+      }
       isStarted = false;
     }
     super.stop();
@@ -155,10 +158,10 @@ public class ThriftCLIService extends Ab
         // The delegation token is not applicable in the given deployment mode
       }
       sessionHandle = cliService.openSessionWithImpersonation(userName, req.getPassword(),
-            req.getConfiguration(), delegationTokenStr);
+          req.getConfiguration(), delegationTokenStr);
     } else {
       sessionHandle = cliService.openSession(userName, req.getPassword(),
-            req.getConfiguration());
+          req.getConfiguration());
     }
     return sessionHandle;
   }
@@ -203,9 +206,9 @@ public class ThriftCLIService extends Ab
       Boolean runAsync = req.isRunAsync();
       OperationHandle operationHandle = runAsync ?
           cliService.executeStatementAsync(sessionHandle, statement, confOverlay)
-              : cliService.executeStatement(sessionHandle, statement, confOverlay);
-      resp.setOperationHandle(operationHandle.toTOperationHandle());
-      resp.setStatus(OK_STATUS);
+          : cliService.executeStatement(sessionHandle, statement, confOverlay);
+          resp.setOperationHandle(operationHandle.toTOperationHandle());
+          resp.setStatus(OK_STATUS);
     } catch (Exception e) {
       e.printStackTrace();
       resp.setStatus(HiveSQLException.toTStatus(e));
@@ -394,52 +397,6 @@ public class ThriftCLIService extends Ab
     return resp;
   }
 
-
   @Override
-  public void run() {
-    try {
-      hiveAuthFactory = new HiveAuthFactory();
-      TTransportFactory  transportFactory = hiveAuthFactory.getAuthTransFactory();
-      TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this);
-
-      String portString = System.getenv("HIVE_SERVER2_THRIFT_PORT");
-      if (portString != null) {
-        portNum = Integer.valueOf(portString);
-      } else {
-        portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT);
-      }
-
-      String hiveHost = System.getenv("HIVE_SERVER2_THRIFT_BIND_HOST");
-      if (hiveHost == null) {
-        hiveHost = hiveConf.getVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST);
-      }
-
-      if (hiveHost != null && !hiveHost.isEmpty()) {
-        serverAddress = new InetSocketAddress(hiveHost, portNum);
-      } else {
-        serverAddress = new  InetSocketAddress(portNum);
-      }
-
-
-      minWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS);
-      maxWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS);
-
-
-      TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(new TServerSocket(serverAddress))
-      .processorFactory(processorFactory)
-      .transportFactory(transportFactory)
-      .protocolFactory(new TBinaryProtocol.Factory())
-      .minWorkerThreads(minWorkerThreads)
-      .maxWorkerThreads(maxWorkerThreads);
-
-      server = new TThreadPoolServer(sargs);
-
-      LOG.info("ThriftCLIService listening on " + serverAddress);
-
-      server.serve();
-    } catch (Throwable t) {
-      t.printStackTrace();
-    }
-  }
-
+  public abstract void run();
 }

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/server/HiveServer2.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/server/HiveServer2.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/server/HiveServer2.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/server/HiveServer2.java Tue Oct  1 04:48:44 2013
@@ -26,7 +26,9 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hive.common.util.HiveStringUtils;
 import org.apache.hive.service.CompositeService;
 import org.apache.hive.service.cli.CLIService;
+import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService;
 import org.apache.hive.service.cli.thrift.ThriftCLIService;
+import org.apache.hive.service.cli.thrift.ThriftHttpCLIService;
 
 /**
  * HiveServer2.
@@ -50,9 +52,19 @@ public class HiveServer2 extends Composi
     cliService = new CLIService();
     addService(cliService);
 
-    thriftCLIService = new ThriftCLIService(cliService);
-    addService(thriftCLIService);
+    String transportMode = System.getenv("HIVE_SERVER2_TRANSPORT_MODE");
+    if(transportMode == null) {
+      transportMode = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE);
+    }
+    if(transportMode != null && (transportMode.equalsIgnoreCase("http") ||
+        transportMode.equalsIgnoreCase("https"))) {
+      thriftCLIService = new ThriftHttpCLIService(cliService);
+    }
+    else {
+      thriftCLIService = new ThriftBinaryCLIService(cliService);
+    }
 
+    addService(thriftCLIService);
     super.init(hiveConf);
   }
 
@@ -70,7 +82,6 @@ public class HiveServer2 extends Composi
    * @param args
    */
   public static void main(String[] args) {
-
     //NOTE: It is critical to do this here so that log4j is reinitialized
     // before any of the other core hive classes are loaded
     try {
@@ -97,3 +108,4 @@ public class HiveServer2 extends Composi
   }
 
 }
+

Modified: hive/branches/tez/service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java (original)
+++ hive/branches/tez/service/src/test/org/apache/hive/service/auth/TestPlainSaslHelper.java Tue Oct  1 04:48:44 2013
@@ -23,6 +23,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hive.service.cli.CLIService;
 import org.apache.hive.service.cli.thrift.ThriftCLIService;
+import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService;
 import org.apache.thrift.TProcessorFactory;
 
 public class TestPlainSaslHelper extends TestCase {
@@ -40,7 +41,7 @@ public class TestPlainSaslHelper extends
 
     CLIService cliService = new CLIService();
     cliService.init(hconf);
-    ThriftCLIService tcliService = new ThriftCLIService(cliService);
+    ThriftCLIService tcliService = new ThriftBinaryCLIService(cliService);
     tcliService.init(hconf);
     TProcessorFactory procFactory = PlainSaslHelper.getPlainProcessorFactory(tcliService);
     assertEquals("doAs enabled processor for unsecure mode",

Modified: hive/branches/tez/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java (original)
+++ hive/branches/tez/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java Tue Oct  1 04:48:44 2013
@@ -51,7 +51,7 @@ public abstract class CLIServiceTest {
   }
 
   @Test
-  public void createSessionTest() throws Exception {
+  public void openSessionTest() throws Exception {
     SessionHandle sessionHandle = client
         .openSession("tom", "password", Collections.<String, String>emptyMap());
     assertNotNull(sessionHandle);

Modified: hive/branches/tez/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java (original)
+++ hive/branches/tez/service/src/test/org/apache/hive/service/cli/session/TestSessionHooks.java Tue Oct  1 04:48:44 2013
@@ -27,7 +27,7 @@ import junit.framework.TestCase;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hive.service.cli.HiveSQLException;
 import org.apache.hive.service.cli.SessionHandle;
-import org.apache.hive.service.cli.thrift.EmbeddedThriftCLIService;
+import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService;
 import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient;
 import org.junit.Before;
 import org.junit.Test;
@@ -35,7 +35,7 @@ import org.junit.Test;
 public class TestSessionHooks extends TestCase {
 
   public static final String SESSION_USER_NAME = "user1";
-  private EmbeddedThriftCLIService service;
+  private EmbeddedThriftBinaryCLIService service;
   private ThriftCLIServiceClient client;
 
   public static class SessionHookTest implements HiveSessionHook {
@@ -58,7 +58,7 @@ public class TestSessionHooks extends Te
     super.setUp();
     System.setProperty(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname,
         TestSessionHooks.SessionHookTest.class.getName());
-    service = new EmbeddedThriftCLIService();
+    service = new EmbeddedThriftBinaryCLIService();
     client = new ThriftCLIServiceClient(service);
   }
 

Modified: hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PTest.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PTest.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PTest.java (original)
+++ hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/PTest.java Tue Oct  1 04:48:44 2013
@@ -110,6 +110,7 @@ public class PTest {
     put("buildTag", buildTag).
     put("logDir", logDir.getAbsolutePath()).
     put("javaHome", configuration.getJavaHome()).
+    put("javaHomeForTests", configuration.getJavaHomeForTests()).
     put("antEnvOpts", configuration.getAntEnvOpts());
     final ImmutableMap<String, String> templateDefaults = templateDefaultsBuilder.build();
     TestParser testParser = new TestParser(configuration.getContext(),
@@ -221,12 +222,13 @@ public class PTest {
   }
 
   private static final String PROPERTIES = "properties";
-  private static final String REPOSITORY = "repository";
-  private static final String REPOSITORY_NAME = "repositoryName";
-  private static final String BRANCH = "branch";
+  private static final String REPOSITORY = TestConfiguration.REPOSITORY;
+  private static final String REPOSITORY_NAME = TestConfiguration.REPOSITORY_NAME;
+  private static final String BRANCH = TestConfiguration.BRANCH;
   private static final String PATCH = "patch";
-  private static final String JAVA_HOME = "javaHome";
-  private static final String ANT_ENV_OPTS = "antEnvOpts";
+  private static final String JAVA_HOME = TestConfiguration.JAVA_HOME;
+  private static final String JAVA_HOME_TEST = TestConfiguration.JAVA_HOME_TEST;
+  private static final String ANT_ENV_OPTS = TestConfiguration.ANT_ENV_OPTS;
   /**
    * All args override properties file settings except
    * for this one which is additive.
@@ -243,7 +245,8 @@ public class PTest {
     options.addOption(null, BRANCH, true, "Overrides git branch in properties file");
     options.addOption(null, PATCH, true, "URI to patch, either file:/// or http(s)://");
     options.addOption(ANT_ARG, null, true, "Supplemntal ant arguments");
-    options.addOption(null, JAVA_HOME, true, "Java Home for compiling and running tests");
+    options.addOption(null, JAVA_HOME, true, "Java Home for compiling and running tests (unless " + JAVA_HOME_TEST + " is specified)");
+    options.addOption(null, JAVA_HOME_TEST, true, "Java Home for running tests (optional)");
     options.addOption(null, ANT_ENV_OPTS, true, "ANT_OPTS environemnt variable setting");
     CommandLine commandLine = parser.parse(options, args);
     if(!commandLine.hasOption(PROPERTIES)) {
@@ -282,6 +285,10 @@ public class PTest {
         if(!javaHome.isEmpty()) {
           conf.setJavaHome(javaHome);
         }
+        String javaHomeForTests = Strings.nullToEmpty(commandLine.getOptionValue(JAVA_HOME_TEST)).trim();
+        if(!javaHomeForTests.isEmpty()) {
+          conf.setJavaHomeForTests(javaHomeForTests);
+        }
         String antEnvOpts = Strings.nullToEmpty(commandLine.getOptionValue(ANT_ENV_OPTS)).trim();
         if(!antEnvOpts.isEmpty()) {
           conf.setAntEnvOpts(antEnvOpts);

Modified: hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestConfiguration.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestConfiguration.java (original)
+++ hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/conf/TestConfiguration.java Tue Oct  1 04:48:44 2013
@@ -32,19 +32,21 @@ import com.google.common.base.Strings;
 import com.google.common.collect.Maps;
 
 public class TestConfiguration {
+  public static final String REPOSITORY = "repository";
+  public static final String REPOSITORY_NAME = "repositoryName";
+  public static final String BRANCH = "branch";
+  public static final String JAVA_HOME = "javaHome";
+  public static final String JAVA_HOME_TEST = "javaHomeForTests";
+  public static final String ANT_ENV_OPTS = "antEnvOpts";
+  
   private static final String REPOSITORY_TYPE = "repositoryType";
   private static final String GIT = "git";
   private static final String SVN = "svn";
-  private static final String REPOSITORY = "repository";
-  private static final String REPOSITORY_NAME = "repositoryName";
-  private static final String BRANCH = "branch";
   private static final String ANT_ARGS = "antArgs";
-  private static final String ANT_ENV_OPTS = "antEnvOpts";
-  private static final String JAVA_HOME = "javaHome";
   private static final String JIRA_URL = "jiraUrl";
   private static final String JIRA_USER = "jiraUser";
   private static final String JIRA_PASSWORD = "jiraPassword";
-  public static final String JENKINS_URL = "jenkinsURL";
+  private static final String JENKINS_URL = "jenkinsURL";
 
   private final Context context;
   private String antArgs;
@@ -54,6 +56,7 @@ public class TestConfiguration {
   private String repositoryName;
   private String patch;
   private String javaHome;
+  private String javaHomeForTests;
   private String branch;
   private final String jenkinsURL;
   private final String jiraUrl;
@@ -79,6 +82,7 @@ public class TestConfiguration {
     antArgs =  Preconditions.checkNotNull(context.getString(ANT_ARGS), ANT_ARGS).trim();
     antEnvOpts =  context.getString(ANT_ENV_OPTS, "").trim();
     javaHome =  context.getString(JAVA_HOME, "").trim();
+    javaHomeForTests = context.getString(JAVA_HOME_TEST, "").trim();
     patch = Strings.nullToEmpty(null);
     jiraName = Strings.nullToEmpty(null);
     jiraUrl = context.getString(JIRA_URL, "").trim();
@@ -132,6 +136,9 @@ public class TestConfiguration {
   public String getJavaHome() {
     return javaHome;
   }
+  public String getJavaHomeForTests() {
+    return javaHomeForTests;
+  }
   public String getPatch() {
     return patch;
   }
@@ -150,6 +157,9 @@ public class TestConfiguration {
   public void setJavaHome(String javaHome) {
     this.javaHome = Strings.nullToEmpty(javaHome);
   }
+  public void setJavaHomeForTests(String javaHomeForTests) {
+      this.javaHomeForTests = javaHomeForTests;
+  }
   public void setAntArgs(String antArgs) {
     this.antArgs = Strings.nullToEmpty(antArgs);
   }
@@ -161,10 +171,14 @@ public class TestConfiguration {
   }
   @Override
   public String toString() {
-    return "Configuration [context=" + context + ", antArgs=" + antArgs
-        + ", antEnvOpts=" + antEnvOpts + ", repository=" + repository
-        + ", repositoryName=" + repositoryName + ", patch=" + patch
-        + ", javaHome=" + javaHome + ", branch=" + branch + "]";
+    return "TestConfiguration [antArgs=" + antArgs + ", antEnvOpts="
+        + antEnvOpts + ", repositoryType=" + repositoryType + ", repository="
+        + repository + ", repositoryName=" + repositoryName + ", patch="
+        + patch + ", javaHome=" + javaHome + ", javaHomeForTests="
+        + javaHomeForTests + ", branch=" + branch + ", jenkinsURL="
+        + jenkinsURL + ", jiraUrl=" + jiraUrl + ", jiraUser=" + jiraUser
+        + ", jiraName=" + jiraName + ", clearLibraryCache=" + clearLibraryCache
+        + "]";
   }
   public static TestConfiguration fromInputStream(InputStream inputStream, Logger logger)
       throws IOException {

Modified: hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudExecutionContextProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudExecutionContextProvider.java?rev=1527883&r1=1527882&r2=1527883&view=diff
==============================================================================
--- hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudExecutionContextProvider.java (original)
+++ hive/branches/tez/testutils/ptest2/src/main/java/org/apache/hive/ptest/execution/context/CloudExecutionContextProvider.java Tue Oct  1 04:48:44 2013
@@ -106,7 +106,7 @@ public class CloudExecutionContextProvid
             return size() > 100;
           }
         });
-    mTerminationExecutor = Executors.newCachedThreadPool();
+    mTerminationExecutor = Executors.newSingleThreadExecutor();
     mHostLog = new RandomAccessFile(new File(dataDir, "hosts"), "rw");
     initialize();
   }



Mime
View raw message