hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1524874 [7/9] - in /hive/branches/vectorization: ./ cli/src/java/org/apache/hadoop/hive/cli/ cli/src/test/org/apache/hadoop/hive/cli/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/org/apache/hadoop/hive/conf/ common/s...
Date Fri, 20 Sep 2013 00:33:01 GMT
Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/partition_wise_fileformat17.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/partition_wise_fileformat17.q?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/partition_wise_fileformat17.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/partition_wise_fileformat17.q Fri Sep 20 00:32:55 2013
@@ -1,6 +1,6 @@
--- HIVE-5199 : CustomSerDe1 and CustomSerDe2 are used here.
+-- HIVE-5199, HIVE-5285 : CustomSerDe(1, 2, 3) are used here.
 -- The final results should be all NULL columns deserialized using 
--- CustomSerDe1 and CustomSerDe2 irrespective of the inserted values
+-- CustomSerDe(1, 2, 3) irrespective of the inserted values
 
 DROP TABLE PW17;
 ADD JAR ../build/ql/test/test-serdes.jar;
@@ -8,12 +8,27 @@ CREATE TABLE PW17(USER STRING, COMPLEXDT
 LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1');
 ALTER TABLE PW17 PARTITION(YEAR='1') SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe2';
 ALTER TABLE PW17 SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1';
--- Without the fix, will throw cast exception via FetchOperator
+-- Without the fix HIVE-5199, will throw cast exception via FetchOperator
 SELECT * FROM PW17;
 
 -- Test for non-parititioned table. 
 DROP TABLE PW17_2;
 CREATE TABLE PW17_2(USER STRING, COMPLEXDT ARRAY<INT>) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1';
 LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_2;
--- Without the fix, will throw cast exception via MapOperator
-SELECT COUNT(*) FROM PW17_2;
\ No newline at end of file
+-- Without the fix HIVE-5199, will throw cast exception via MapOperator
+SELECT COUNT(*) FROM PW17_2;
+
+DROP TABLE PW17_3;
+CREATE TABLE PW17_3(USER STRING, COMPLEXDT ARRAY<ARRAY<INT> >) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3';
+LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_3 PARTITION (YEAR='1');
+ALTER TABLE PW17_3 PARTITION(YEAR='1') SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe2';
+ALTER TABLE PW17_3 SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3';
+-- Without the fix HIVE-5285, will throw cast exception via FetchOperator
+SELECT * FROM PW17;
+
+DROP TABLE PW17_4;
+CREATE TABLE PW17_4(USER STRING, COMPLEXDT ARRAY<ARRAY<INT> >) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3';
+LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_4;
+-- Without the fix HIVE-5285, will throw cast exception via MapOperator
+SELECT COUNT(*) FROM PW17_4;
+

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/schemeAuthority2.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/schemeAuthority2.q?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/schemeAuthority2.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/schemeAuthority2.q Fri Sep 20 00:32:55 2013
@@ -1,5 +1,5 @@
-dfs -mkdir file:///tmp/test;
-dfs -mkdir hdfs:///tmp/test;
+dfs ${system:test.dfs.mkdir} file:///tmp/test;
+dfs ${system:test.dfs.mkdir} hdfs:///tmp/test;
 
 create external table dynPart (key string) partitioned by (value string, value2 string) row format delimited fields terminated by '\\t' stored as textfile;
 insert overwrite local directory "/tmp/test" select key from src where (key = 10) order by key;

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/scriptfile1.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/scriptfile1.q?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/scriptfile1.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/scriptfile1.q Fri Sep 20 00:32:55 2013
@@ -1,4 +1,6 @@
 set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
+
+-- EXCLUDE_OS_WINDOWS
 CREATE TABLE dest1(key INT, value STRING);
 
 ADD FILE src/test/scripts/testgrep;

Modified: hive/branches/vectorization/ql/src/test/queries/clientpositive/udaf_collect_set.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/udaf_collect_set.q?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/udaf_collect_set.q (original)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/udaf_collect_set.q Fri Sep 20 00:32:55 2013
@@ -1,6 +1,9 @@
 DESCRIBE FUNCTION collect_set;
 DESCRIBE FUNCTION EXTENDED collect_set;
 
+DESCRIBE FUNCTION collect_list;
+DESCRIBE FUNCTION EXTENDED collect_list;
+
 set hive.map.aggr = false;
 set hive.groupby.skewindata = false;
 
@@ -8,6 +11,10 @@ SELECT key, collect_set(value)
 FROM src
 GROUP BY key ORDER BY key limit 20;
 
+SELECT key, collect_list(value)
+FROM src
+GROUP BY key ORDER by key limit 20;
+
 set hive.map.aggr = true;
 set hive.groupby.skewindata = false;
 
@@ -15,6 +22,10 @@ SELECT key, collect_set(value)
 FROM src
 GROUP BY key ORDER BY key limit 20;
 
+SELECT key, collect_list(value)
+FROM src
+GROUP BY key ORDER BY key limit 20;
+
 set hive.map.aggr = false;
 set hive.groupby.skewindata = true;
 

Modified: hive/branches/vectorization/ql/src/test/results/clientpositive/newline.q.out
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/results/clientpositive/newline.q.out?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/results/clientpositive/newline.q.out (original)
+++ hive/branches/vectorization/ql/src/test/results/clientpositive/newline.q.out Fri Sep 20 00:32:55 2013
@@ -298,11 +298,11 @@ POSTHOOK: Lineage: tmp_tmp.value SCRIPT 
 1\	2	NULL
 1\\t2	NULL
 1\	2	NULL
-PREHOOK: query: SELECT key FROM (SELECT TRANSFORM ('a\tb', 'c') USING '/bin/cat' AS (key, value) FROM src limit 1)a ORDER BY key ASC
+PREHOOK: query: SELECT key FROM (SELECT TRANSFORM ('a\tb', 'c') USING 'cat' AS (key, value) FROM src limit 1)a ORDER BY key ASC
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT key FROM (SELECT TRANSFORM ('a\tb', 'c') USING '/bin/cat' AS (key, value) FROM src limit 1)a ORDER BY key ASC
+POSTHOOK: query: SELECT key FROM (SELECT TRANSFORM ('a\tb', 'c') USING 'cat' AS (key, value) FROM src limit 1)a ORDER BY key ASC
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -317,11 +317,11 @@ POSTHOOK: Lineage: tmp_tmp.value SCRIPT 
 POSTHOOK: Lineage: tmp_tmp.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: tmp_tmp.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
 a	b
-PREHOOK: query: SELECT value FROM (SELECT TRANSFORM ('a\tb', 'c') USING '/bin/cat' AS (key, value) FROM src limit 1)a ORDER BY value ASC
+PREHOOK: query: SELECT value FROM (SELECT TRANSFORM ('a\tb', 'c') USING 'cat' AS (key, value) FROM src limit 1)a ORDER BY value ASC
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT value FROM (SELECT TRANSFORM ('a\tb', 'c') USING '/bin/cat' AS (key, value) FROM src limit 1)a ORDER BY value ASC
+POSTHOOK: query: SELECT value FROM (SELECT TRANSFORM ('a\tb', 'c') USING 'cat' AS (key, value) FROM src limit 1)a ORDER BY value ASC
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####

Modified: hive/branches/vectorization/ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out (original)
+++ hive/branches/vectorization/ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out Fri Sep 20 00:32:55 2013
@@ -1,12 +1,12 @@
-PREHOOK: query: -- HIVE-5199 : CustomSerDe1 and CustomSerDe2 are used here.
+PREHOOK: query: -- HIVE-5199, HIVE-5285 : CustomSerDe(1, 2, 3) are used here.
 -- The final results should be all NULL columns deserialized using 
--- CustomSerDe1 and CustomSerDe2 irrespective of the inserted values
+-- CustomSerDe(1, 2, 3) irrespective of the inserted values
 
 DROP TABLE PW17
 PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- HIVE-5199 : CustomSerDe1 and CustomSerDe2 are used here.
+POSTHOOK: query: -- HIVE-5199, HIVE-5285 : CustomSerDe(1, 2, 3) are used here.
 -- The final results should be all NULL columns deserialized using 
--- CustomSerDe1 and CustomSerDe2 irrespective of the inserted values
+-- CustomSerDe(1, 2, 3) irrespective of the inserted values
 
 DROP TABLE PW17
 POSTHOOK: type: DROPTABLE
@@ -39,13 +39,13 @@ POSTHOOK: query: ALTER TABLE PW17 SET SE
 POSTHOOK: type: ALTERTABLE_SERIALIZER
 POSTHOOK: Input: default@pw17
 POSTHOOK: Output: default@pw17
-PREHOOK: query: -- Without the fix, will throw cast exception via FetchOperator
+PREHOOK: query: -- Without the fix HIVE-5199, will throw cast exception via FetchOperator
 SELECT * FROM PW17
 PREHOOK: type: QUERY
 PREHOOK: Input: default@pw17
 PREHOOK: Input: default@pw17@year=1
 #### A masked pattern was here ####
-POSTHOOK: query: -- Without the fix, will throw cast exception via FetchOperator
+POSTHOOK: query: -- Without the fix HIVE-5199, will throw cast exception via FetchOperator
 SELECT * FROM PW17
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@pw17
@@ -72,14 +72,89 @@ PREHOOK: Output: default@pw17_2
 POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_2
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@pw17_2
-PREHOOK: query: -- Without the fix, will throw cast exception via MapOperator
+PREHOOK: query: -- Without the fix HIVE-5199, will throw cast exception via MapOperator
 SELECT COUNT(*) FROM PW17_2
 PREHOOK: type: QUERY
 PREHOOK: Input: default@pw17_2
 #### A masked pattern was here ####
-POSTHOOK: query: -- Without the fix, will throw cast exception via MapOperator
+POSTHOOK: query: -- Without the fix HIVE-5199, will throw cast exception via MapOperator
 SELECT COUNT(*) FROM PW17_2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@pw17_2
 #### A masked pattern was here ####
 4
+PREHOOK: query: DROP TABLE PW17_3
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE PW17_3
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE PW17_3(USER STRING, COMPLEXDT ARRAY<ARRAY<INT> >) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3'
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE PW17_3(USER STRING, COMPLEXDT ARRAY<ARRAY<INT> >) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@PW17_3
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_3 PARTITION (YEAR='1')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@pw17_3
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_3 PARTITION (YEAR='1')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@pw17_3
+POSTHOOK: Output: default@pw17_3@year=1
+PREHOOK: query: ALTER TABLE PW17_3 PARTITION(YEAR='1') SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe2'
+PREHOOK: type: ALTERPARTITION_SERIALIZER
+PREHOOK: Input: default@pw17_3
+PREHOOK: Output: default@pw17_3@year=1
+POSTHOOK: query: ALTER TABLE PW17_3 PARTITION(YEAR='1') SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe2'
+POSTHOOK: type: ALTERPARTITION_SERIALIZER
+POSTHOOK: Input: default@pw17_3
+POSTHOOK: Input: default@pw17_3@year=1
+POSTHOOK: Output: default@pw17_3@year=1
+PREHOOK: query: ALTER TABLE PW17_3 SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3'
+PREHOOK: type: ALTERTABLE_SERIALIZER
+PREHOOK: Input: default@pw17_3
+PREHOOK: Output: default@pw17_3
+POSTHOOK: query: ALTER TABLE PW17_3 SET SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3'
+POSTHOOK: type: ALTERTABLE_SERIALIZER
+POSTHOOK: Input: default@pw17_3
+POSTHOOK: Output: default@pw17_3
+PREHOOK: query: -- Without the fix HIVE-5285, will throw cast exception via FetchOperator
+SELECT * FROM PW17
+PREHOOK: type: QUERY
+PREHOOK: Input: default@pw17
+PREHOOK: Input: default@pw17@year=1
+#### A masked pattern was here ####
+POSTHOOK: query: -- Without the fix HIVE-5285, will throw cast exception via FetchOperator
+SELECT * FROM PW17
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@pw17
+POSTHOOK: Input: default@pw17@year=1
+#### A masked pattern was here ####
+NULL	NULL	1
+NULL	NULL	1
+NULL	NULL	1
+NULL	NULL	1
+PREHOOK: query: DROP TABLE PW17_4
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE PW17_4
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE PW17_4(USER STRING, COMPLEXDT ARRAY<ARRAY<INT> >) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3'
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE PW17_4(USER STRING, COMPLEXDT ARRAY<ARRAY<INT> >) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@PW17_4
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_4
+PREHOOK: type: LOAD
+PREHOOK: Output: default@pw17_4
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_4
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@pw17_4
+PREHOOK: query: -- Without the fix HIVE-5285, will throw cast exception via MapOperator
+SELECT COUNT(*) FROM PW17_4
+PREHOOK: type: QUERY
+PREHOOK: Input: default@pw17_4
+#### A masked pattern was here ####
+POSTHOOK: query: -- Without the fix HIVE-5285, will throw cast exception via MapOperator
+SELECT COUNT(*) FROM PW17_4
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@pw17_4
+#### A masked pattern was here ####
+4

Modified: hive/branches/vectorization/ql/src/test/results/clientpositive/scriptfile1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/results/clientpositive/scriptfile1.q.out?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/results/clientpositive/scriptfile1.q.out (original)
+++ hive/branches/vectorization/ql/src/test/results/clientpositive/scriptfile1.q.out Fri Sep 20 00:32:55 2013
@@ -1,6 +1,8 @@
-PREHOOK: query: CREATE TABLE dest1(key INT, value STRING)
+PREHOOK: query: -- EXCLUDE_OS_WINDOWS
+CREATE TABLE dest1(key INT, value STRING)
 PREHOOK: type: CREATETABLE
-POSTHOOK: query: CREATE TABLE dest1(key INT, value STRING)
+POSTHOOK: query: -- EXCLUDE_OS_WINDOWS
+CREATE TABLE dest1(key INT, value STRING)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@dest1
 PREHOOK: query: FROM (

Modified: hive/branches/vectorization/ql/src/test/results/clientpositive/show_functions.q.out
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/results/clientpositive/show_functions.q.out?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/results/clientpositive/show_functions.q.out (original)
+++ hive/branches/vectorization/ql/src/test/results/clientpositive/show_functions.q.out Fri Sep 20 00:32:55 2013
@@ -36,6 +36,7 @@ case
 ceil
 ceiling
 coalesce
+collect_list
 collect_set
 compute_stats
 concat
@@ -202,6 +203,7 @@ case
 ceil
 ceiling
 coalesce
+collect_list
 collect_set
 compute_stats
 concat

Modified: hive/branches/vectorization/ql/src/test/results/clientpositive/udaf_collect_set.q.out
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/results/clientpositive/udaf_collect_set.q.out?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/results/clientpositive/udaf_collect_set.q.out (original)
+++ hive/branches/vectorization/ql/src/test/results/clientpositive/udaf_collect_set.q.out Fri Sep 20 00:32:55 2013
@@ -8,6 +8,16 @@ PREHOOK: type: DESCFUNCTION
 POSTHOOK: query: DESCRIBE FUNCTION EXTENDED collect_set
 POSTHOOK: type: DESCFUNCTION
 collect_set(x) - Returns a set of objects with duplicate elements eliminated
+PREHOOK: query: DESCRIBE FUNCTION collect_list
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION collect_list
+POSTHOOK: type: DESCFUNCTION
+collect_list(x) - Returns a list of objects with duplicates
+PREHOOK: query: DESCRIBE FUNCTION EXTENDED collect_list
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION EXTENDED collect_list
+POSTHOOK: type: DESCFUNCTION
+collect_list(x) - Returns a list of objects with duplicates
 PREHOOK: query: SELECT key, collect_set(value)
 FROM src
 GROUP BY key ORDER BY key limit 20
@@ -40,6 +50,38 @@ POSTHOOK: Input: default@src
 128	["val_128"]
 129	["val_129"]
 131	["val_131"]
+PREHOOK: query: SELECT key, collect_list(value)
+FROM src
+GROUP BY key ORDER by key limit 20
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT key, collect_list(value)
+FROM src
+GROUP BY key ORDER by key limit 20
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	["val_0","val_0","val_0"]
+10	["val_10"]
+100	["val_100","val_100"]
+103	["val_103","val_103"]
+104	["val_104","val_104"]
+105	["val_105"]
+11	["val_11"]
+111	["val_111"]
+113	["val_113","val_113"]
+114	["val_114"]
+116	["val_116"]
+118	["val_118","val_118"]
+119	["val_119","val_119","val_119"]
+12	["val_12","val_12"]
+120	["val_120","val_120"]
+125	["val_125","val_125"]
+126	["val_126"]
+128	["val_128","val_128","val_128"]
+129	["val_129","val_129"]
+131	["val_131"]
 PREHOOK: query: SELECT key, collect_set(value)
 FROM src
 GROUP BY key ORDER BY key limit 20
@@ -72,6 +114,38 @@ POSTHOOK: Input: default@src
 128	["val_128"]
 129	["val_129"]
 131	["val_131"]
+PREHOOK: query: SELECT key, collect_list(value)
+FROM src
+GROUP BY key ORDER BY key limit 20
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT key, collect_list(value)
+FROM src
+GROUP BY key ORDER BY key limit 20
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	["val_0","val_0","val_0"]
+10	["val_10"]
+100	["val_100","val_100"]
+103	["val_103","val_103"]
+104	["val_104","val_104"]
+105	["val_105"]
+11	["val_11"]
+111	["val_111"]
+113	["val_113","val_113"]
+114	["val_114"]
+116	["val_116"]
+118	["val_118","val_118"]
+119	["val_119","val_119","val_119"]
+12	["val_12","val_12"]
+120	["val_120","val_120"]
+125	["val_125","val_125"]
+126	["val_126"]
+128	["val_128","val_128","val_128"]
+129	["val_129","val_129"]
+131	["val_131"]
 PREHOOK: query: SELECT key, collect_set(value)
 FROM src
 GROUP BY key ORDER BY key limit 20

Modified: hive/branches/vectorization/ql/src/test/results/compiler/plan/groupby2.q.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/results/compiler/plan/groupby2.q.xml?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/results/compiler/plan/groupby2.q.xml (original)
+++ hive/branches/vectorization/ql/src/test/results/compiler/plan/groupby2.q.xml Fri Sep 20 00:32:55 2013
@@ -1532,14 +1532,7 @@
                  </object> 
                 </void> 
                 <void property="genericUDF"> 
-                 <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge"> 
-                  <void property="udfClassName"> 
-                   <string>org.apache.hadoop.hive.ql.udf.UDFConcat</string> 
-                  </void> 
-                  <void property="udfName"> 
-                   <string>concat</string> 
-                  </void> 
-                 </object> 
+                 <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcat"/> 
                 </void> 
                 <void property="typeInfo"> 
                  <object idref="PrimitiveTypeInfo0"/> 

Modified: hive/branches/vectorization/ql/src/test/results/compiler/plan/udf6.q.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/results/compiler/plan/udf6.q.xml?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/results/compiler/plan/udf6.q.xml (original)
+++ hive/branches/vectorization/ql/src/test/results/compiler/plan/udf6.q.xml Fri Sep 20 00:32:55 2013
@@ -385,14 +385,7 @@
                  </object> 
                 </void> 
                 <void property="genericUDF"> 
-                 <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge"> 
-                  <void property="udfClassName"> 
-                   <string>org.apache.hadoop.hive.ql.udf.UDFConcat</string> 
-                  </void> 
-                  <void property="udfName"> 
-                   <string>concat</string> 
-                  </void> 
-                 </object> 
+                 <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcat"/> 
                 </void> 
                 <void property="typeInfo"> 
                  <object idref="PrimitiveTypeInfo0"/> 

Modified: hive/branches/vectorization/serde/if/serde.thrift
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/if/serde.thrift?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/if/serde.thrift (original)
+++ hive/branches/vectorization/serde/if/serde.thrift Fri Sep 20 00:32:55 2013
@@ -50,6 +50,8 @@ const string BIGINT_TYPE_NAME    = "bigi
 const string FLOAT_TYPE_NAME     = "float";
 const string DOUBLE_TYPE_NAME    = "double";
 const string STRING_TYPE_NAME    = "string";
+const string CHAR_TYPE_NAME      = "char";
+const string VARCHAR_TYPE_NAME   = "varchar";
 const string DATE_TYPE_NAME      = "date";
 const string DATETIME_TYPE_NAME  = "datetime";
 const string TIMESTAMP_TYPE_NAME = "timestamp";
@@ -64,7 +66,7 @@ const string UNION_TYPE_NAME  = "unionty
 const string LIST_COLUMNS = "columns";
 const string LIST_COLUMN_TYPES = "columns.types";
 
-const set<string> PrimitiveTypes  = [ VOID_TYPE_NAME BOOLEAN_TYPE_NAME TINYINT_TYPE_NAME SMALLINT_TYPE_NAME INT_TYPE_NAME BIGINT_TYPE_NAME FLOAT_TYPE_NAME DOUBLE_TYPE_NAME STRING_TYPE_NAME  DATE_TYPE_NAME DATETIME_TYPE_NAME TIMESTAMP_TYPE_NAME DECIMAL_TYPE_NAME BINARY_TYPE_NAME],
+const set<string> PrimitiveTypes  = [ VOID_TYPE_NAME BOOLEAN_TYPE_NAME TINYINT_TYPE_NAME SMALLINT_TYPE_NAME INT_TYPE_NAME BIGINT_TYPE_NAME FLOAT_TYPE_NAME DOUBLE_TYPE_NAME STRING_TYPE_NAME  VARCHAR_TYPE_NAME CHAR_TYPE_NAME DATE_TYPE_NAME DATETIME_TYPE_NAME TIMESTAMP_TYPE_NAME DECIMAL_TYPE_NAME BINARY_TYPE_NAME],
 const set<string> CollectionTypes = [ LIST_TYPE_NAME MAP_TYPE_NAME ],
 
 

Modified: hive/branches/vectorization/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/gen/thrift/gen-cpp/serde_constants.cpp?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/gen/thrift/gen-cpp/serde_constants.cpp (original)
+++ hive/branches/vectorization/serde/src/gen/thrift/gen-cpp/serde_constants.cpp Fri Sep 20 00:32:55 2013
@@ -57,6 +57,10 @@ serdeConstants::serdeConstants() {
 
   STRING_TYPE_NAME = "string";
 
+  CHAR_TYPE_NAME = "char";
+
+  VARCHAR_TYPE_NAME = "varchar";
+
   DATE_TYPE_NAME = "date";
 
   DATETIME_TYPE_NAME = "datetime";
@@ -88,6 +92,8 @@ serdeConstants::serdeConstants() {
   PrimitiveTypes.insert("float");
   PrimitiveTypes.insert("double");
   PrimitiveTypes.insert("string");
+  PrimitiveTypes.insert("varchar");
+  PrimitiveTypes.insert("char");
   PrimitiveTypes.insert("date");
   PrimitiveTypes.insert("datetime");
   PrimitiveTypes.insert("timestamp");

Modified: hive/branches/vectorization/serde/src/gen/thrift/gen-cpp/serde_constants.h
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/gen/thrift/gen-cpp/serde_constants.h?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/gen/thrift/gen-cpp/serde_constants.h (original)
+++ hive/branches/vectorization/serde/src/gen/thrift/gen-cpp/serde_constants.h Fri Sep 20 00:32:55 2013
@@ -38,6 +38,8 @@ class serdeConstants {
   std::string FLOAT_TYPE_NAME;
   std::string DOUBLE_TYPE_NAME;
   std::string STRING_TYPE_NAME;
+  std::string CHAR_TYPE_NAME;
+  std::string VARCHAR_TYPE_NAME;
   std::string DATE_TYPE_NAME;
   std::string DATETIME_TYPE_NAME;
   std::string TIMESTAMP_TYPE_NAME;

Modified: hive/branches/vectorization/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java (original)
+++ hive/branches/vectorization/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java Fri Sep 20 00:32:55 2013
@@ -79,6 +79,10 @@ public class serdeConstants {
 
   public static final String STRING_TYPE_NAME = "string";
 
+  public static final String CHAR_TYPE_NAME = "char";
+
+  public static final String VARCHAR_TYPE_NAME = "varchar";
+
   public static final String DATE_TYPE_NAME = "date";
 
   public static final String DATETIME_TYPE_NAME = "datetime";
@@ -112,6 +116,8 @@ public class serdeConstants {
     PrimitiveTypes.add("float");
     PrimitiveTypes.add("double");
     PrimitiveTypes.add("string");
+    PrimitiveTypes.add("varchar");
+    PrimitiveTypes.add("char");
     PrimitiveTypes.add("date");
     PrimitiveTypes.add("datetime");
     PrimitiveTypes.add("timestamp");

Modified: hive/branches/vectorization/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java (original)
+++ hive/branches/vectorization/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/test/ThriftTestObj.java Fri Sep 20 00:32:55 2013
@@ -528,7 +528,7 @@ public class ThriftTestObj implements or
                 struct.field3 = new ArrayList<InnerStruct>(_list0.size);
                 for (int _i1 = 0; _i1 < _list0.size; ++_i1)
                 {
-                  InnerStruct _elem2; // required
+                  InnerStruct _elem2; // optional
                   _elem2 = new InnerStruct();
                   _elem2.read(iprot);
                   struct.field3.add(_elem2);
@@ -636,7 +636,7 @@ public class ThriftTestObj implements or
           struct.field3 = new ArrayList<InnerStruct>(_list5.size);
           for (int _i6 = 0; _i6 < _list5.size; ++_i6)
           {
-            InnerStruct _elem7; // required
+            InnerStruct _elem7; // optional
             _elem7 = new InnerStruct();
             _elem7.read(iprot);
             struct.field3.add(_elem7);

Modified: hive/branches/vectorization/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java (original)
+++ hive/branches/vectorization/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/Complex.java Fri Sep 20 00:32:55 2013
@@ -836,7 +836,7 @@ public class Complex implements org.apac
                 struct.lint = new ArrayList<Integer>(_list0.size);
                 for (int _i1 = 0; _i1 < _list0.size; ++_i1)
                 {
-                  int _elem2; // required
+                  int _elem2; // optional
                   _elem2 = iprot.readI32();
                   struct.lint.add(_elem2);
                 }
@@ -854,7 +854,7 @@ public class Complex implements org.apac
                 struct.lString = new ArrayList<String>(_list3.size);
                 for (int _i4 = 0; _i4 < _list3.size; ++_i4)
                 {
-                  String _elem5; // required
+                  String _elem5; // optional
                   _elem5 = iprot.readString();
                   struct.lString.add(_elem5);
                 }
@@ -872,7 +872,7 @@ public class Complex implements org.apac
                 struct.lintString = new ArrayList<IntString>(_list6.size);
                 for (int _i7 = 0; _i7 < _list6.size; ++_i7)
                 {
-                  IntString _elem8; // required
+                  IntString _elem8; // optional
                   _elem8 = new IntString();
                   _elem8.read(iprot);
                   struct.lintString.add(_elem8);
@@ -1074,7 +1074,7 @@ public class Complex implements org.apac
           struct.lint = new ArrayList<Integer>(_list21.size);
           for (int _i22 = 0; _i22 < _list21.size; ++_i22)
           {
-            int _elem23; // required
+            int _elem23; // optional
             _elem23 = iprot.readI32();
             struct.lint.add(_elem23);
           }
@@ -1087,7 +1087,7 @@ public class Complex implements org.apac
           struct.lString = new ArrayList<String>(_list24.size);
           for (int _i25 = 0; _i25 < _list24.size; ++_i25)
           {
-            String _elem26; // required
+            String _elem26; // optional
             _elem26 = iprot.readString();
             struct.lString.add(_elem26);
           }
@@ -1100,7 +1100,7 @@ public class Complex implements org.apac
           struct.lintString = new ArrayList<IntString>(_list27.size);
           for (int _i28 = 0; _i28 < _list27.size; ++_i28)
           {
-            IntString _elem29; // required
+            IntString _elem29; // optional
             _elem29 = new IntString();
             _elem29.read(iprot);
             struct.lintString.add(_elem29);

Modified: hive/branches/vectorization/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java (original)
+++ hive/branches/vectorization/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde2/thrift/test/MegaStruct.java Fri Sep 20 00:32:55 2013
@@ -2280,7 +2280,7 @@ public class MegaStruct implements org.a
                     _val19 = new ArrayList<String>(_list20.size);
                     for (int _i21 = 0; _i21 < _list20.size; ++_i21)
                     {
-                      String _elem22; // required
+                      String _elem22; // optional
                       _elem22 = iprot.readString();
                       _val19.add(_elem22);
                     }
@@ -2310,7 +2310,7 @@ public class MegaStruct implements org.a
                     _val26 = new ArrayList<MiniStruct>(_list27.size);
                     for (int _i28 = 0; _i28 < _list27.size; ++_i28)
                     {
-                      MiniStruct _elem29; // required
+                      MiniStruct _elem29; // optional
                       _elem29 = new MiniStruct();
                       _elem29.read(iprot);
                       _val26.add(_elem29);
@@ -2333,7 +2333,7 @@ public class MegaStruct implements org.a
                 struct.my_stringlist = new ArrayList<String>(_list30.size);
                 for (int _i31 = 0; _i31 < _list30.size; ++_i31)
                 {
-                  String _elem32; // required
+                  String _elem32; // optional
                   _elem32 = iprot.readString();
                   struct.my_stringlist.add(_elem32);
                 }
@@ -2351,7 +2351,7 @@ public class MegaStruct implements org.a
                 struct.my_structlist = new ArrayList<MiniStruct>(_list33.size);
                 for (int _i34 = 0; _i34 < _list33.size; ++_i34)
                 {
-                  MiniStruct _elem35; // required
+                  MiniStruct _elem35; // optional
                   _elem35 = new MiniStruct();
                   _elem35.read(iprot);
                   struct.my_structlist.add(_elem35);
@@ -2370,7 +2370,7 @@ public class MegaStruct implements org.a
                 struct.my_enumlist = new ArrayList<MyEnum>(_list36.size);
                 for (int _i37 = 0; _i37 < _list36.size; ++_i37)
                 {
-                  MyEnum _elem38; // required
+                  MyEnum _elem38; // optional
                   _elem38 = MyEnum.findByValue(iprot.readI32());
                   struct.my_enumlist.add(_elem38);
                 }
@@ -2388,7 +2388,7 @@ public class MegaStruct implements org.a
                 struct.my_stringset = new HashSet<String>(2*_set39.size);
                 for (int _i40 = 0; _i40 < _set39.size; ++_i40)
                 {
-                  String _elem41; // required
+                  String _elem41; // optional
                   _elem41 = iprot.readString();
                   struct.my_stringset.add(_elem41);
                 }
@@ -2406,7 +2406,7 @@ public class MegaStruct implements org.a
                 struct.my_enumset = new HashSet<MyEnum>(2*_set42.size);
                 for (int _i43 = 0; _i43 < _set42.size; ++_i43)
                 {
-                  MyEnum _elem44; // required
+                  MyEnum _elem44; // optional
                   _elem44 = MyEnum.findByValue(iprot.readI32());
                   struct.my_enumset.add(_elem44);
                 }
@@ -2424,7 +2424,7 @@ public class MegaStruct implements org.a
                 struct.my_structset = new HashSet<MiniStruct>(2*_set45.size);
                 for (int _i46 = 0; _i46 < _set45.size; ++_i46)
                 {
-                  MiniStruct _elem47; // required
+                  MiniStruct _elem47; // optional
                   _elem47 = new MiniStruct();
                   _elem47.read(iprot);
                   struct.my_structset.add(_elem47);
@@ -3023,7 +3023,7 @@ public class MegaStruct implements org.a
               _val95 = new ArrayList<String>(_list96.size);
               for (int _i97 = 0; _i97 < _list96.size; ++_i97)
               {
-                String _elem98; // required
+                String _elem98; // optional
                 _elem98 = iprot.readString();
                 _val95.add(_elem98);
               }
@@ -3047,7 +3047,7 @@ public class MegaStruct implements org.a
               _val102 = new ArrayList<MiniStruct>(_list103.size);
               for (int _i104 = 0; _i104 < _list103.size; ++_i104)
               {
-                MiniStruct _elem105; // required
+                MiniStruct _elem105; // optional
                 _elem105 = new MiniStruct();
                 _elem105.read(iprot);
                 _val102.add(_elem105);
@@ -3064,7 +3064,7 @@ public class MegaStruct implements org.a
           struct.my_stringlist = new ArrayList<String>(_list106.size);
           for (int _i107 = 0; _i107 < _list106.size; ++_i107)
           {
-            String _elem108; // required
+            String _elem108; // optional
             _elem108 = iprot.readString();
             struct.my_stringlist.add(_elem108);
           }
@@ -3077,7 +3077,7 @@ public class MegaStruct implements org.a
           struct.my_structlist = new ArrayList<MiniStruct>(_list109.size);
           for (int _i110 = 0; _i110 < _list109.size; ++_i110)
           {
-            MiniStruct _elem111; // required
+            MiniStruct _elem111; // optional
             _elem111 = new MiniStruct();
             _elem111.read(iprot);
             struct.my_structlist.add(_elem111);
@@ -3091,7 +3091,7 @@ public class MegaStruct implements org.a
           struct.my_enumlist = new ArrayList<MyEnum>(_list112.size);
           for (int _i113 = 0; _i113 < _list112.size; ++_i113)
           {
-            MyEnum _elem114; // required
+            MyEnum _elem114; // optional
             _elem114 = MyEnum.findByValue(iprot.readI32());
             struct.my_enumlist.add(_elem114);
           }
@@ -3104,7 +3104,7 @@ public class MegaStruct implements org.a
           struct.my_stringset = new HashSet<String>(2*_set115.size);
           for (int _i116 = 0; _i116 < _set115.size; ++_i116)
           {
-            String _elem117; // required
+            String _elem117; // optional
             _elem117 = iprot.readString();
             struct.my_stringset.add(_elem117);
           }
@@ -3117,7 +3117,7 @@ public class MegaStruct implements org.a
           struct.my_enumset = new HashSet<MyEnum>(2*_set118.size);
           for (int _i119 = 0; _i119 < _set118.size; ++_i119)
           {
-            MyEnum _elem120; // required
+            MyEnum _elem120; // optional
             _elem120 = MyEnum.findByValue(iprot.readI32());
             struct.my_enumset.add(_elem120);
           }
@@ -3130,7 +3130,7 @@ public class MegaStruct implements org.a
           struct.my_structset = new HashSet<MiniStruct>(2*_set121.size);
           for (int _i122 = 0; _i122 < _set121.size; ++_i122)
           {
-            MiniStruct _elem123; // required
+            MiniStruct _elem123; // optional
             _elem123 = new MiniStruct();
             _elem123.read(iprot);
             struct.my_structset.add(_elem123);

Modified: hive/branches/vectorization/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php (original)
+++ hive/branches/vectorization/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php Fri Sep 20 00:32:55 2013
@@ -62,6 +62,10 @@ $GLOBALS['serde_CONSTANTS']['DOUBLE_TYPE
 
 $GLOBALS['serde_CONSTANTS']['STRING_TYPE_NAME'] = "string";
 
+$GLOBALS['serde_CONSTANTS']['CHAR_TYPE_NAME'] = "char";
+
+$GLOBALS['serde_CONSTANTS']['VARCHAR_TYPE_NAME'] = "varchar";
+
 $GLOBALS['serde_CONSTANTS']['DATE_TYPE_NAME'] = "date";
 
 $GLOBALS['serde_CONSTANTS']['DATETIME_TYPE_NAME'] = "datetime";
@@ -94,6 +98,8 @@ $GLOBALS['serde_CONSTANTS']['PrimitiveTy
   "float" => true,
   "double" => true,
   "string" => true,
+  "varchar" => true,
+  "char" => true,
   "date" => true,
   "datetime" => true,
   "timestamp" => true,

Modified: hive/branches/vectorization/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py (original)
+++ hive/branches/vectorization/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py Fri Sep 20 00:32:55 2013
@@ -32,6 +32,8 @@ BIGINT_TYPE_NAME = "bigint"
 FLOAT_TYPE_NAME = "float"
 DOUBLE_TYPE_NAME = "double"
 STRING_TYPE_NAME = "string"
+CHAR_TYPE_NAME = "char"
+VARCHAR_TYPE_NAME = "varchar"
 DATE_TYPE_NAME = "date"
 DATETIME_TYPE_NAME = "datetime"
 TIMESTAMP_TYPE_NAME = "timestamp"
@@ -53,6 +55,8 @@ PrimitiveTypes = set([
   "float",
   "double",
   "string",
+  "varchar",
+  "char",
   "date",
   "datetime",
   "timestamp",

Modified: hive/branches/vectorization/serde/src/gen/thrift/gen-rb/serde_constants.rb
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/gen/thrift/gen-rb/serde_constants.rb?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/gen/thrift/gen-rb/serde_constants.rb (original)
+++ hive/branches/vectorization/serde/src/gen/thrift/gen-rb/serde_constants.rb Fri Sep 20 00:32:55 2013
@@ -53,6 +53,10 @@ DOUBLE_TYPE_NAME = %q"double"
 
 STRING_TYPE_NAME = %q"string"
 
+CHAR_TYPE_NAME = %q"char"
+
+VARCHAR_TYPE_NAME = %q"varchar"
+
 DATE_TYPE_NAME = %q"date"
 
 DATETIME_TYPE_NAME = %q"datetime"
@@ -85,6 +89,8 @@ PrimitiveTypes = Set.new([
   %q"float",
   %q"double",
   %q"string",
+  %q"varchar",
+  %q"char",
   %q"date",
   %q"datetime",
   %q"timestamp",

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java Fri Sep 20 00:32:55 2013
@@ -30,13 +30,19 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 
@@ -119,7 +125,8 @@ public class RegexSerDe extends Abstract
      */
     List<ObjectInspector> columnOIs = new ArrayList<ObjectInspector>(columnNames.size());
     for (int c = 0; c < numColumns; c++) {
-      String typeName = columnTypes.get(c).getTypeName();
+      TypeInfo typeInfo = columnTypes.get(c);
+      String typeName = typeInfo.getTypeName();
       if (typeName.equals(serdeConstants.STRING_TYPE_NAME)) {
         columnOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
       } else if (typeName.equals(serdeConstants.TINYINT_TYPE_NAME)) {
@@ -142,6 +149,13 @@ public class RegexSerDe extends Abstract
         columnOIs.add(PrimitiveObjectInspectorFactory.javaDateObjectInspector);
       } else if (typeName.equals(serdeConstants.DECIMAL_TYPE_NAME)) {
         columnOIs.add(PrimitiveObjectInspectorFactory.javaHiveDecimalObjectInspector);
+      }  else if (typeInfo instanceof PrimitiveTypeInfo
+          &&
+          ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory() == PrimitiveCategory.VARCHAR) {
+        VarcharTypeParams varcharParams = (VarcharTypeParams)
+            ParameterizedPrimitiveTypeUtils.getTypeParamsFromTypeInfo(typeInfo);
+        columnOIs.add(PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(
+            (PrimitiveTypeInfo) typeInfo));
       } else {
          throw new SerDeException(getClass().getName()
          + " doesn't allow column [" + c + "] named "
@@ -202,7 +216,8 @@ public class RegexSerDe extends Abstract
     for (int c = 0; c < numColumns; c++) {
       try {
         String t = m.group(c+1);
-        String typeName = columnTypes.get(c).getTypeName();
+        TypeInfo typeInfo = columnTypes.get(c);
+        String typeName = typeInfo.getTypeName();
 
         // Convert the column to the correct type when needed and set in row obj
         if (typeName.equals(serdeConstants.STRING_TYPE_NAME)) {
@@ -247,6 +262,13 @@ public class RegexSerDe extends Abstract
           HiveDecimal bd;
           bd = new HiveDecimal(t);
           row.set(c, bd);
+        } else if (typeInfo instanceof PrimitiveTypeInfo
+            &&
+            ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory() == PrimitiveCategory.VARCHAR) {
+          VarcharTypeParams varcharParams = (VarcharTypeParams)
+              ParameterizedPrimitiveTypeUtils.getTypeParamsFromTypeInfo(typeInfo);
+          HiveVarchar hv = new HiveVarchar(t, varcharParams != null ? varcharParams.length : -1);
+          row.set(c, hv);
         }
       } catch (RuntimeException e) {
          partialMatchedRowsCount++;

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java Fri Sep 20 00:32:55 2013
@@ -39,6 +39,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
@@ -283,6 +284,13 @@ public final class SerDeUtils {
           sb.append('"');
           break;
         }
+        case VARCHAR: {
+          sb.append('"');
+          sb.append(escapeString(((HiveVarcharObjectInspector) poi)
+              .getPrimitiveJavaObject(o).toString()));
+          sb.append('"');
+          break;
+        }
         case DATE: {
           sb.append('"');
           sb.append(((DateObjectInspector) poi)

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorGenerator.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorGenerator.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorGenerator.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorGenerator.java Fri Sep 20 00:32:55 2013
@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -91,8 +92,7 @@ class AvroObjectInspectorGenerator {
     switch(ti.getCategory()) {
       case PRIMITIVE:
         PrimitiveTypeInfo pti = (PrimitiveTypeInfo)ti;
-        result = PrimitiveObjectInspectorFactory
-                .getPrimitiveJavaObjectInspector(pti.getPrimitiveCategory());
+        result = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pti);
         break;
       case STRUCT:
         StructTypeInfo sti = (StructTypeInfo)ti;

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java Fri Sep 20 00:32:55 2013
@@ -20,6 +20,8 @@ package org.apache.hadoop.hive.serde2.bi
 
 import java.io.IOException;
 import java.math.BigInteger;
+import java.nio.ByteBuffer;
+import java.nio.charset.CharacterCodingException;
 import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -31,6 +33,7 @@ import java.util.Properties;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
@@ -39,6 +42,7 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -56,14 +60,18 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils.HiveVarcharSerDeHelper;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -276,48 +284,18 @@ public class BinarySortableSerDe extends
       }
       case STRING: {
         Text r = reuse == null ? new Text() : (Text) reuse;
-        // Get the actual length first
-        int start = buffer.tell();
-        int length = 0;
-        do {
-          byte b = buffer.read(invert);
-          if (b == 0) {
-            // end of string
-            break;
-          }
-          if (b == 1) {
-            // the last char is an escape char. read the actual char
-            buffer.read(invert);
-          }
-          length++;
-        } while (true);
+        return deserializeText(buffer, invert, r);
+      }
 
-        if (length == buffer.tell() - start) {
-          // No escaping happened, so we are already done.
-          r.set(buffer.getData(), start, length);
-        } else {
-          // Escaping happened, we need to copy byte-by-byte.
-          // 1. Set the length first.
-          r.set(buffer.getData(), start, length);
-          // 2. Reset the pointer.
-          buffer.seek(start);
-          // 3. Copy the data.
-          byte[] rdata = r.getBytes();
-          for (int i = 0; i < length; i++) {
-            byte b = buffer.read(invert);
-            if (b == 1) {
-              // The last char is an escape char, read the actual char.
-              // The serialization format escape \0 to \1, and \1 to \2,
-              // to make sure the string is null-terminated.
-              b = (byte) (buffer.read(invert) - 1);
-            }
-            rdata[i] = b;
-          }
-          // 4. Read the null terminator.
-          byte b = buffer.read(invert);
-          assert (b == 0);
-        }
-        return r;
+      case VARCHAR: {
+        HiveVarcharWritable r =
+            reuse == null ? new HiveVarcharWritable() : (HiveVarcharWritable) reuse;
+            // Use HiveVarchar's internal Text member to read the value.
+            deserializeText(buffer, invert, r.getTextValue());
+            // If we cache helper data for deserialization we could avoid having
+            // to call getVarcharMaxLength() on every deserialize call.
+            r.enforceMaxLength(getVarcharMaxLength(type));
+            return r;
       }
 
       case BINARY: {
@@ -552,6 +530,60 @@ public class BinarySortableSerDe extends
     return v;
   }
 
+  static int getVarcharMaxLength(TypeInfo type) {
+    VarcharTypeParams typeParams = (VarcharTypeParams) ((PrimitiveTypeInfo) type).getTypeParams();
+    if (typeParams != null ) {
+      return typeParams.length;
+    }
+    return -1;
+  }
+
+  static Text deserializeText(InputByteBuffer buffer, boolean invert, Text r)
+      throws IOException {
+    // Get the actual length first
+    int start = buffer.tell();
+    int length = 0;
+    do {
+      byte b = buffer.read(invert);
+      if (b == 0) {
+        // end of string
+        break;
+      }
+      if (b == 1) {
+        // the last char is an escape char. read the actual char
+        buffer.read(invert);
+      }
+      length++;
+    } while (true);
+
+    if (length == buffer.tell() - start) {
+      // No escaping happened, so we are already done.
+      r.set(buffer.getData(), start, length);
+    } else {
+      // Escaping happened, we need to copy byte-by-byte.
+      // 1. Set the length first.
+      r.set(buffer.getData(), start, length);
+      // 2. Reset the pointer.
+      buffer.seek(start);
+      // 3. Copy the data.
+      byte[] rdata = r.getBytes();
+      for (int i = 0; i < length; i++) {
+        byte b = buffer.read(invert);
+        if (b == 1) {
+          // The last char is an escape char, read the actual char.
+          // The serialization format escape \0 to \1, and \1 to \2,
+          // to make sure the string is null-terminated.
+          b = (byte) (buffer.read(invert) - 1);
+        }
+        rdata[i] = b;
+      }
+      // 4. Read the null terminator.
+      byte b = buffer.read(invert);
+      assert (b == 0);
+    }
+    return r;
+  }
+
   BytesWritable serializeBytesWritable = new BytesWritable();
   OutputByteBuffer outputByteBuffer = new OutputByteBuffer();
 
@@ -572,7 +604,7 @@ public class BinarySortableSerDe extends
   }
 
   static void serialize(OutputByteBuffer buffer, Object o, ObjectInspector oi,
-      boolean invert) {
+      boolean invert) throws SerDeException {
     // Is this field a null?
     if (o == null) {
       buffer.write((byte) 0, invert);
@@ -668,6 +700,15 @@ public class BinarySortableSerDe extends
         return;
           }
 
+      case VARCHAR: {
+        HiveVarcharObjectInspector hcoi = (HiveVarcharObjectInspector)poi;
+        HiveVarcharWritable hc = hcoi.getPrimitiveWritableObject(o);
+        // use varchar's text field directly
+        Text t = hc.getTextValue();
+        serializeBytes(buffer, t.getBytes(), t.getLength(), invert);
+        return;
+      }
+
       case BINARY: {
         BinaryObjectInspector baoi = (BinaryObjectInspector) poi;
         BytesWritable ba = baoi.getPrimitiveWritableObject(o);

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java Fri Sep 20 00:32:55 2013
@@ -35,8 +35,10 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
 import org.apache.hadoop.hive.serde2.thrift.ConfigurableTProtocol;
 import org.apache.hadoop.hive.serde2.thrift.TReflectionUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.ParameterizedPrimitiveTypeUtils;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -173,9 +175,9 @@ public class DynamicSerDe extends Abstra
           dynamicSerDeStructBaseToObjectInspector(btMap.getKeyType()),
           dynamicSerDeStructBaseToObjectInspector(btMap.getValueType()));
     } else if (bt.isPrimitive()) {
-      return PrimitiveObjectInspectorFactory
-          .getPrimitiveJavaObjectInspector(PrimitiveObjectInspectorUtils
-          .getTypeEntryFromPrimitiveJavaClass(bt.getRealType()).primitiveCategory);
+      PrimitiveTypeEntry pte = PrimitiveObjectInspectorUtils
+          .getTypeEntryFromPrimitiveJavaClass(bt.getRealType());
+      return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pte);
     } else {
       // Must be a struct
       DynamicSerDeStructBase btStruct = (DynamicSerDeStructBase) bt;

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/io/DoubleWritable.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/io/DoubleWritable.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/io/DoubleWritable.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/io/DoubleWritable.java Fri Sep 20 00:32:55 2013
@@ -17,7 +17,7 @@
  */
 
 /**
- * This file is back-ported from hadoop-0.19, to make sure hive can run 
+ * This file is back-ported from hadoop-0.19, to make sure hive can run
  * with hadoop-0.17.
  */
 package org.apache.hadoop.hive.serde2.io;

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java Fri Sep 20 00:32:55 2013
@@ -27,18 +27,19 @@ import org.apache.hadoop.hive.serde2.laz
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyUnionObjectInspector;
-import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveDecimalObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyBinaryObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyBooleanObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyByteObjectInspector;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDateObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyFloatObjectInspector;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveDecimalObjectInspector;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyIntObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyLongObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyPrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyShortObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyStringObjectInspector;
-import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDateObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyVoidObjectInspector;
 import org.apache.hadoop.hive.serde2.lazydio.LazyDioBoolean;
@@ -53,6 +54,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.typeinfo.BaseTypeParams;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
@@ -112,6 +114,8 @@ public final class LazyFactory {
       return new LazyDouble((LazyDoubleObjectInspector) oi);
     case STRING:
       return new LazyString((LazyStringObjectInspector) oi);
+    case VARCHAR:
+      return new LazyHiveVarchar((LazyHiveVarcharObjectInspector) oi);
     case DATE:
       return new LazyDate((LazyDateObjectInspector) oi);
     case TIMESTAMP:
@@ -215,9 +219,9 @@ public final class LazyFactory {
     ObjectInspector.Category c = typeInfo.getCategory();
     switch (c) {
     case PRIMITIVE:
+      BaseTypeParams typeParams = ((PrimitiveTypeInfo)typeInfo).getTypeParams();
       return LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(
-          ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory(), escaped,
-          escapeChar);
+          (PrimitiveTypeInfo) typeInfo, escaped, escapeChar);
     case MAP:
       return LazyObjectInspectorFactory.getLazySimpleMapObjectInspector(
           createLazyObjectInspector(((MapTypeInfo) typeInfo)

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyPrimitive.java Fri Sep 20 00:32:55 2013
@@ -69,10 +69,12 @@ public abstract class LazyPrimitive<OI e
 
   public void logExceptionMessage(ByteArrayRef bytes, int start, int length, String dataType) {
     try {
-      String byteData = Text.decode(bytes.getData(), start, length);
-      LOG.debug("Data not in the " + dataType
-          + " data type range so converted to null. Given data is :" +
-                  byteData);
+      if(LOG.isDebugEnabled()) {
+        String byteData = Text.decode(bytes.getData(), start, length);
+        LOG.debug("Data not in the " + dataType
+            + " data type range so converted to null. Given data is :" +
+                    byteData, new Exception("For debugging purposes"));
+      }
     } catch (CharacterCodingException e1) {
       LOG.debug("Data not in the " + dataType + " data type range so converted to null.", e1);
     }

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java Fri Sep 20 00:32:55 2013
@@ -27,10 +27,11 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Properties;
 
-import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.commons.codec.binary.Base64;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector;
@@ -39,6 +40,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
@@ -225,6 +227,13 @@ public final class LazyUtils {
       break;
     }
 
+    case VARCHAR: {
+      HiveVarcharWritable hc = ((HiveVarcharObjectInspector)oi).getPrimitiveWritableObject(o);
+      Text t = hc.getTextValue();
+      writeEscaped(out, t.getBytes(), 0, t.getLength(), escaped, escapeChar,
+          needsEscape);
+      break;
+    }
     case BINARY: {
       BytesWritable bw = ((BinaryObjectInspector) oi).getPrimitiveWritableObject(o);
       byte[] toEncode = new byte[bw.getLength()];

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyListObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyListObjectInspector.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyListObjectInspector.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyListObjectInspector.java Fri Sep 20 00:32:55 2013
@@ -29,7 +29,7 @@ import org.apache.hadoop.io.Text;
 
 /**
  * LazyListObjectInspector works on array data that is stored in LazyArray.
- * 
+ *
  * Always use the ObjectInspectorFactory to create new ObjectInspector objects,
  * instead of directly creating an instance of this class.
  */
@@ -38,13 +38,15 @@ public class LazyListObjectInspector imp
   public static final Log LOG = LogFactory.getLog(LazyListObjectInspector.class
       .getName());
 
-  ObjectInspector listElementObjectInspector;
-
-  byte separator;
-  Text nullSequence;
-  boolean escaped;
-  byte escapeChar;
+  private ObjectInspector listElementObjectInspector;
+  private byte separator;
+  private Text nullSequence;
+  private boolean escaped;
+  private byte escapeChar;
 
+  protected LazyListObjectInspector() {
+    super();
+  }
   /**
    * Call ObjectInspectorFactory.getLazySimpleListObjectInspector instead.
    */

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyMapObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyMapObjectInspector.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyMapObjectInspector.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyMapObjectInspector.java Fri Sep 20 00:32:55 2013
@@ -29,7 +29,7 @@ import org.apache.hadoop.io.Text;
 
 /**
  * LazyMapObjectInspector works on struct data that is stored in LazyStruct.
- * 
+ *
  * Always use the ObjectInspectorFactory to create new ObjectInspector objects,
  * instead of directly creating an instance of this class.
  */
@@ -38,15 +38,17 @@ public class LazyMapObjectInspector impl
   public static final Log LOG = LogFactory.getLog(LazyMapObjectInspector.class
       .getName());
 
-  ObjectInspector mapKeyObjectInspector;
-  ObjectInspector mapValueObjectInspector;
-
-  byte itemSeparator;
-  byte keyValueSeparator;
-  Text nullSequence;
-  boolean escaped;
-  byte escapeChar;
+  private ObjectInspector mapKeyObjectInspector;
+  private ObjectInspector mapValueObjectInspector;
+  private byte itemSeparator;
+  private byte keyValueSeparator;
+  private Text nullSequence;
+  private boolean escaped;
+  private byte escapeChar;
 
+  protected LazyMapObjectInspector() {
+    super();
+  }
   /**
    * Call ObjectInspectorFactory.getStandardListObjectInspector instead.
    */

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazySimpleStructObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazySimpleStructObjectInspector.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazySimpleStructObjectInspector.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazySimpleStructObjectInspector.java Fri Sep 20 00:32:55 2013
@@ -33,10 +33,10 @@ import org.apache.hadoop.io.Text;
 /**
  * LazySimpleStructObjectInspector works on struct data that is stored in
  * LazyStruct.
- * 
+ *
  * The names of the struct fields and the internal structure of the struct
  * fields are specified in the ctor of the LazySimpleStructObjectInspector.
- * 
+ *
  * Always use the ObjectInspectorFactory to create new ObjectInspector objects,
  * instead of directly creating an instance of this class.
  */
@@ -51,6 +51,9 @@ public class LazySimpleStructObjectInspe
     protected ObjectInspector fieldObjectInspector;
     protected String fieldComment;
 
+    protected MyField() {
+      super();
+    }
     public MyField(int fieldID, String fieldName,
         ObjectInspector fieldObjectInspector) {
       this.fieldID = fieldID;
@@ -85,19 +88,16 @@ public class LazySimpleStructObjectInspe
     }
   }
 
-  protected List<MyField> fields;
+  private List<MyField> fields;
+  private byte separator;
+  private Text nullSequence;
+  private boolean lastColumnTakesRest;
+  private boolean escaped;
+  private byte escapeChar;
 
-  @Override
-  public String getTypeName() {
-    return ObjectInspectorUtils.getStandardStructTypeName(this);
+  protected LazySimpleStructObjectInspector() {
+    super();
   }
-
-  byte separator;
-  Text nullSequence;
-  boolean lastColumnTakesRest;
-  boolean escaped;
-  byte escapeChar;
-
   /**
    * Call ObjectInspectorFactory.getLazySimpleStructObjectInspector instead.
    */
@@ -158,6 +158,11 @@ public class LazySimpleStructObjectInspe
   }
 
   @Override
+  public String getTypeName() {
+    return ObjectInspectorUtils.getStandardStructTypeName(this);
+  }
+
+  @Override
   public final Category getCategory() {
     return Category.STRUCT;
   }

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyUnionObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyUnionObjectInspector.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyUnionObjectInspector.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyUnionObjectInspector.java Fri Sep 20 00:32:55 2013
@@ -41,18 +41,16 @@ public class LazyUnionObjectInspector im
   public static final Log LOG = LogFactory
       .getLog(LazyUnionObjectInspector.class.getName());
 
-  protected List<ObjectInspector> ois;
 
-  @Override
-  public String getTypeName() {
-    return ObjectInspectorUtils.getStandardUnionTypeName(this);
-  }
-
-  byte separator;
-  Text nullSequence;
-  boolean escaped;
-  byte escapeChar;
+  private  List<ObjectInspector> ois;
+  private byte separator;
+  private Text nullSequence;
+  private boolean escaped;
+  private byte escapeChar;
 
+  protected LazyUnionObjectInspector() {
+    super();
+  }
   protected LazyUnionObjectInspector(
       List<ObjectInspector> ois, byte separator,
       Text nullSequence, boolean escaped,
@@ -61,6 +59,11 @@ public class LazyUnionObjectInspector im
         nullSequence, escaped, escapeChar);
   }
 
+  @Override
+  public String getTypeName() {
+    return ObjectInspectorUtils.getStandardUnionTypeName(this);
+  }
+
   protected void init(
       List<ObjectInspector> ois, byte separator,
       Text nullSequence, boolean escaped,

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java Fri Sep 20 00:32:55 2013
@@ -28,6 +28,9 @@ import org.apache.hadoop.io.Writable;
 public abstract class AbstractPrimitiveLazyObjectInspector<T extends Writable>
     extends AbstractPrimitiveObjectInspector {
 
+  protected AbstractPrimitiveLazyObjectInspector() {
+    super();
+  }
   protected AbstractPrimitiveLazyObjectInspector(PrimitiveTypeEntry typeEntry) {
     super(typeEntry);
   }

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyBinaryObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyBinaryObjectInspector.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyBinaryObjectInspector.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyBinaryObjectInspector.java Fri Sep 20 00:32:55 2013
@@ -28,7 +28,7 @@ public class LazyBinaryObjectInspector e
   AbstractPrimitiveLazyObjectInspector<BytesWritable> implements
     BinaryObjectInspector {
 
-  protected LazyBinaryObjectInspector() {
+  public LazyBinaryObjectInspector() {
     super(PrimitiveObjectInspectorUtils.binaryTypeEntry);
   }
 

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java Fri Sep 20 00:32:55 2013
@@ -21,7 +21,13 @@ package org.apache.hadoop.hive.serde2.la
 import java.util.ArrayList;
 import java.util.HashMap;
 
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
+import org.apache.hadoop.hive.serde2.typeinfo.BaseTypeParams;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeSpec;
 
 /**
  * LazyPrimitiveObjectInspectorFactory is the primary way to create new
@@ -61,6 +67,8 @@ public final class LazyPrimitiveObjectIn
       new LazyBinaryObjectInspector();
   public static final LazyHiveDecimalObjectInspector LAZY_BIG_DECIMAL_OBJECT_INSPECTOR =
       new LazyHiveDecimalObjectInspector();
+  public static final LazyHiveVarcharObjectInspector LAZY_VARCHAR_OBJECT_INSPECTOR =
+      new LazyHiveVarcharObjectInspector(PrimitiveObjectInspectorUtils.varcharTypeEntry);
 
   static HashMap<ArrayList<Object>, LazyStringObjectInspector> cachedLazyStringObjectInspector =
       new HashMap<ArrayList<Object>, LazyStringObjectInspector>();
@@ -79,9 +87,38 @@ public final class LazyPrimitiveObjectIn
     return result;
   }
 
+  static PrimitiveObjectInspectorUtils.ParameterizedObjectInspectorMap
+    cachedParameterizedLazyObjectInspectors =
+      new PrimitiveObjectInspectorUtils.ParameterizedObjectInspectorMap();
+
+  public static PrimitiveObjectInspector getParameterizedObjectInspector(
+      PrimitiveTypeSpec typeSpec) {
+    PrimitiveCategory primitiveCategory = typeSpec.getPrimitiveCategory();
+    BaseTypeParams typeParams = typeSpec.getTypeParams();
+    PrimitiveObjectInspector poi =
+        cachedParameterizedLazyObjectInspectors.getObjectInspector(typeSpec);
+    if (poi == null) {
+      // Object inspector hasn't been cached for this type/params yet, create now
+      switch (primitiveCategory) {
+        case VARCHAR:
+          PrimitiveTypeEntry typeEntry = PrimitiveObjectInspectorUtils.getTypeEntryFromTypeSpecs(
+              primitiveCategory,
+              typeParams);
+          poi = new LazyHiveVarcharObjectInspector(typeEntry);
+          poi.setTypeParams(typeParams);
+          cachedParameterizedLazyObjectInspectors.setObjectInspector(poi);
+          break;
+
+        default:
+          throw new RuntimeException(
+              "Primitve type " + primitiveCategory + " should not take parameters");
+      }
+    }
+
+    return poi;
+  }
   public static AbstractPrimitiveLazyObjectInspector<?> getLazyObjectInspector(
       PrimitiveCategory primitiveCategory, boolean escaped, byte escapeChar) {
-
     switch (primitiveCategory) {
     case BOOLEAN:
       return LAZY_BOOLEAN_OBJECT_INSPECTOR;
@@ -99,6 +136,8 @@ public final class LazyPrimitiveObjectIn
       return LAZY_DOUBLE_OBJECT_INSPECTOR;
     case STRING:
       return getLazyStringObjectInspector(escaped, escapeChar);
+    case VARCHAR:
+      return LAZY_VARCHAR_OBJECT_INSPECTOR;
     case BINARY:
       return LAZY_BINARY_OBJECT_INSPECTOR;
     case VOID:
@@ -115,6 +154,26 @@ public final class LazyPrimitiveObjectIn
     }
   }
 
+  public static AbstractPrimitiveLazyObjectInspector<?> getLazyObjectInspector(
+      PrimitiveTypeSpec typeSpec, boolean escaped, byte escapeChar) {
+    PrimitiveCategory primitiveCategory = typeSpec.getPrimitiveCategory();
+    BaseTypeParams typeParams = typeSpec.getTypeParams();
+
+    if (typeParams == null) {
+      return getLazyObjectInspector(primitiveCategory, escaped, escapeChar);
+    } else {
+      switch(primitiveCategory) {
+        case VARCHAR:
+          LazyHiveVarcharObjectInspector oi = (LazyHiveVarcharObjectInspector)
+            getParameterizedObjectInspector(typeSpec);
+          return oi;
+
+        default:
+          throw new RuntimeException("Type " + primitiveCategory + " does not take parameters");
+      }
+    }
+  }
+
   private LazyPrimitiveObjectInspectorFactory() {
     // prevent instantiation
   }

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyStringObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyStringObjectInspector.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyStringObjectInspector.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyStringObjectInspector.java Fri Sep 20 00:32:55 2013
@@ -28,8 +28,12 @@ import org.apache.hadoop.io.Text;
 public class LazyStringObjectInspector extends
     AbstractPrimitiveLazyObjectInspector<Text> implements StringObjectInspector {
 
-  boolean escaped;
-  byte escapeChar;
+  private boolean escaped;
+  private byte escapeChar;
+
+  protected LazyStringObjectInspector() {
+    super();
+  }
 
   LazyStringObjectInspector(boolean escaped, byte escapeChar) {
     super(PrimitiveObjectInspectorUtils.stringTypeEntry);

Modified: hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java?rev=1524874&r1=1524873&r2=1524874&view=diff
==============================================================================
--- hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java (original)
+++ hive/branches/vectorization/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java Fri Sep 20 00:32:55 2013
@@ -27,6 +27,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBooleanObjectInspector;
@@ -71,6 +72,8 @@ public final class LazyBinaryFactory {
       return new LazyBinaryDouble((WritableDoubleObjectInspector) oi);
     case STRING:
       return new LazyBinaryString((WritableStringObjectInspector) oi);
+    case VARCHAR:
+      return new LazyBinaryHiveVarchar((WritableHiveVarcharObjectInspector) oi);
     case VOID: // for NULL
       return new LazyBinaryVoid((WritableVoidObjectInspector) oi);
     case DATE:



Mime
View raw message