hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rhbut...@apache.org
Subject svn commit: r1548312 [2/2] - in /hive/trunk: data/files/ metastore/src/java/org/apache/hadoop/hive/metastore/ ql/src/java/org/apache/hadoop/hive/ql/io/orc/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/
Date Thu, 05 Dec 2013 21:42:20 GMT
Modified: hive/trunk/ql/src/test/results/clientpositive/annotate_stats_select.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/annotate_stats_select.q.out?rev=1548312&r1=1548311&r2=1548312&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/annotate_stats_select.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/annotate_stats_select.q.out Thu Dec  5 21:42:20 2013
@@ -10,6 +10,7 @@ PREHOOK: query: create table if not exis
  ts1 timestamp,
  da1 timestamp,
  s1 string,
+ vc1 varchar(5),
  m1 map<string, string>,
  l1 array<int>,
  st1 struct<c1:int, c2:string>
@@ -29,6 +30,7 @@ POSTHOOK: query: create table if not exi
  ts1 timestamp,
  da1 timestamp,
  s1 string,
+ vc1 varchar(5),
  m1 map<string, string>,
  l1 array<int>,
  st1 struct<c1:int, c2:string>
@@ -78,6 +80,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 PREHOOK: query: -- basicStatState: COMPLETE colStatState: NONE numRows: 2 rawDataSize: 1514
 explain extended select * from alltypes_orc
 PREHOOK: type: QUERY
@@ -98,6 +101,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
 
@@ -112,7 +116,7 @@ STAGE PLANS:
         TableScan
           alias: alltypes_orc
           Statistics:
-              numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: NONE
+              numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: NONE
           GatherStats: false
           Select Operator
             expressions:
@@ -138,25 +142,27 @@ STAGE PLANS:
                   type: timestamp
                   expr: s1
                   type: string
+                  expr: vc1
+                  type: varchar(5)
                   expr: m1
                   type: map<string,string>
                   expr: l1
                   type: array<int>
                   expr: st1
                   type: struct<c1:int,c2:string>
-            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13
+            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: NONE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: NONE
             ListSink
 
 
 PREHOOK: query: -- statistics for complex types are not supported yet
-analyze table alltypes_orc compute statistics for columns bo1, ti1, si1, i1, bi1, f1, d1,s1
+analyze table alltypes_orc compute statistics for columns bo1, ti1, si1, i1, bi1, f1, d1, s1, vc1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@alltypes_orc
 #### A masked pattern was here ####
 POSTHOOK: query: -- statistics for complex types are not supported yet
-analyze table alltypes_orc compute statistics for columns bo1, ti1, si1, i1, bi1, f1, d1,s1
+analyze table alltypes_orc compute statistics for columns bo1, ti1, si1, i1, bi1, f1, d1, s1, vc1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypes_orc
 #### A masked pattern was here ####
@@ -174,6 +180,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 PREHOOK: query: -- numRows: 2 rawDataSize: 1514
 explain extended select * from alltypes_orc
 PREHOOK: type: QUERY
@@ -194,6 +201,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
 
@@ -208,7 +216,7 @@ STAGE PLANS:
         TableScan
           alias: alltypes_orc
           Statistics:
-              numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: PARTIAL
+              numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: PARTIAL
           GatherStats: false
           Select Operator
             expressions:
@@ -234,15 +242,17 @@ STAGE PLANS:
                   type: timestamp
                   expr: s1
                   type: string
+                  expr: vc1
+                  type: varchar(5)
                   expr: m1
                   type: map<string,string>
                   expr: l1
                   type: array<int>
                   expr: st1
                   type: struct<c1:int,c2:string>
-            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13
+            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: PARTIAL
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: PARTIAL
             ListSink
 
 
@@ -266,6 +276,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL bo1)))))
 
@@ -281,7 +292,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: COMPLETE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
               expressions:
@@ -324,19 +335,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -346,19 +357,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
                 field.delim |
 #### A masked pattern was here ####
                 mapkey.delim :
                 name default.alltypes_orc
                 numFiles 1
                 numRows 2
-                rawDataSize 1514
-                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+                rawDataSize 1686
+                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                totalSize 1409
+                totalSize 1475
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
               name: default.alltypes_orc
@@ -393,6 +404,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL i1) int1))))
 
@@ -408,7 +420,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: COMPLETE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
               expressions:
@@ -451,19 +463,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -473,19 +485,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
                 field.delim |
 #### A masked pattern was here ####
                 mapkey.delim :
                 name default.alltypes_orc
                 numFiles 1
                 numRows 2
-                rawDataSize 1514
-                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+                rawDataSize 1686
+                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                totalSize 1409
+                totalSize 1475
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
               name: default.alltypes_orc
@@ -518,6 +530,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL s1)))))
 
@@ -533,7 +546,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: COMPLETE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
               expressions:
@@ -576,19 +589,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -598,19 +611,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
                 field.delim |
 #### A masked pattern was here ####
                 mapkey.delim :
                 name default.alltypes_orc
                 numFiles 1
                 numRows 2
-                rawDataSize 1514
-                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+                rawDataSize 1686
+                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                totalSize 1409
+                totalSize 1475
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
               name: default.alltypes_orc
@@ -645,6 +658,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL m1)))))
 
@@ -660,7 +674,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: NONE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: NONE
             GatherStats: false
             Select Operator
               expressions:
@@ -668,14 +682,14 @@ STAGE PLANS:
                     type: map<string,string>
               outputColumnNames: _col0
               Statistics:
-                  numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: NONE
+                  numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: NONE
               File Output Operator
                 compressed: false
                 GlobalTableId: 0
 #### A masked pattern was here ####
                 NumFilesPerFileSink: 1
                 Statistics:
-                    numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: NONE
+                    numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: NONE
 #### A masked pattern was here ####
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
@@ -703,19 +717,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -725,19 +739,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
                 field.delim |
 #### A masked pattern was here ####
                 mapkey.delim :
                 name default.alltypes_orc
                 numFiles 1
                 numRows 2
-                rawDataSize 1514
-                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+                rawDataSize 1686
+                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                totalSize 1409
+                totalSize 1475
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
               name: default.alltypes_orc
@@ -770,6 +784,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL bo1)) (TOK_SELEXPR (TOK_TABLE_OR_COL ti1)) (TOK_SELEXPR (TOK_TABLE_OR_COL si1)) (TOK_SELEXPR (TOK_TABLE_OR_COL i1)) (TOK_SELEXPR (TOK_TABLE_OR_COL bi1)) (TOK_SELEXPR (TOK_TABLE_OR_COL f1)) (TOK_SELEXPR (TOK_TABLE_OR_COL d1)) (TOK_SELEXPR (TOK_TABLE_OR_COL s1)))))
 
@@ -785,7 +800,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: COMPLETE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
               expressions:
@@ -842,19 +857,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -864,19 +879,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
                 field.delim |
 #### A masked pattern was here ####
                 mapkey.delim :
                 name default.alltypes_orc
                 numFiles 1
                 numRows 2
-                rawDataSize 1514
-                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+                rawDataSize 1686
+                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                totalSize 1409
+                totalSize 1475
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
               name: default.alltypes_orc
@@ -909,6 +924,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_NULL))))
 
@@ -924,7 +940,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: COMPLETE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
               expressions:
@@ -967,19 +983,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -989,19 +1005,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
                 field.delim |
 #### A masked pattern was here ####
                 mapkey.delim :
                 name default.alltypes_orc
                 numFiles 1
                 numRows 2
-                rawDataSize 1514
-                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+                rawDataSize 1686
+                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                totalSize 1409
+                totalSize 1475
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
               name: default.alltypes_orc
@@ -1034,6 +1050,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 11))))
 
@@ -1049,7 +1066,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: COMPLETE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
               expressions:
@@ -1092,19 +1109,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -1114,19 +1131,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
                 field.delim |
 #### A masked pattern was here ####
                 mapkey.delim :
                 name default.alltypes_orc
                 numFiles 1
                 numRows 2
-                rawDataSize 1514
-                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+                rawDataSize 1686
+                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                totalSize 1409
+                totalSize 1475
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
               name: default.alltypes_orc
@@ -1159,6 +1176,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 11L))))
 
@@ -1174,7 +1192,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: COMPLETE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
               expressions:
@@ -1217,19 +1235,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -1239,19 +1257,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
                 field.delim |
 #### A masked pattern was here ####
                 mapkey.delim :
                 name default.alltypes_orc
                 numFiles 1
                 numRows 2
-                rawDataSize 1514
-                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+                rawDataSize 1686
+                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                totalSize 1409
+                totalSize 1475
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
               name: default.alltypes_orc
@@ -1284,6 +1302,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 11.0))))
 
@@ -1299,7 +1318,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: COMPLETE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
               expressions:
@@ -1342,19 +1361,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -1364,19 +1383,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
                 field.delim |
 #### A masked pattern was here ####
                 mapkey.delim :
                 name default.alltypes_orc
                 numFiles 1
                 numRows 2
-                rawDataSize 1514
-                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+                rawDataSize 1686
+                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                totalSize 1409
+                totalSize 1475
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
               name: default.alltypes_orc
@@ -1409,6 +1428,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR "hello"))))
 
@@ -1424,7 +1444,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: COMPLETE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
               expressions:
@@ -1467,19 +1487,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -1489,19 +1509,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
                 field.delim |
 #### A masked pattern was here ####
                 mapkey.delim :
                 name default.alltypes_orc
                 numFiles 1
                 numRows 2
-                rawDataSize 1514
-                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+                rawDataSize 1686
+                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                totalSize 1409
+                totalSize 1475
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
               name: default.alltypes_orc
@@ -1532,6 +1552,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION (TOK_CHAR 5) "hello")))))
 
@@ -1547,7 +1568,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: COMPLETE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
               expressions:
@@ -1590,19 +1611,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -1612,19 +1633,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
                 field.delim |
 #### A masked pattern was here ####
                 mapkey.delim :
                 name default.alltypes_orc
                 numFiles 1
                 numRows 2
-                rawDataSize 1514
-                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+                rawDataSize 1686
+                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                totalSize 1409
+                totalSize 1475
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
               name: default.alltypes_orc
@@ -1655,6 +1676,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION (TOK_VARCHAR 5) "hello")))))
 
@@ -1670,7 +1692,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: COMPLETE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
               expressions:
@@ -1713,19 +1735,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -1735,19 +1757,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
                 field.delim |
 #### A masked pattern was here ####
                 mapkey.delim :
                 name default.alltypes_orc
                 numFiles 1
                 numRows 2
-                rawDataSize 1514
-                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+                rawDataSize 1686
+                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                totalSize 1409
+                totalSize 1475
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
               name: default.alltypes_orc
@@ -1780,6 +1802,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION unbase64 "0xe23")))))
 
@@ -1795,7 +1818,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: COMPLETE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
               expressions:
@@ -1838,19 +1861,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -1860,19 +1883,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
                 field.delim |
 #### A masked pattern was here ####
                 mapkey.delim :
                 name default.alltypes_orc
                 numFiles 1
                 numRows 2
-                rawDataSize 1514
-                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+                rawDataSize 1686
+                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                totalSize 1409
+                totalSize 1475
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
               name: default.alltypes_orc
@@ -1905,6 +1928,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION TOK_TINYINT "1")) (TOK_SELEXPR (TOK_FUNCTION TOK_SMALLINT "20")))))
 
@@ -1920,7 +1944,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: COMPLETE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
               expressions:
@@ -1965,19 +1989,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -1987,19 +2011,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
                 field.delim |
 #### A masked pattern was here ####
                 mapkey.delim :
                 name default.alltypes_orc
                 numFiles 1
                 numRows 2
-                rawDataSize 1514
-                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+                rawDataSize 1686
+                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                totalSize 1409
+                totalSize 1475
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
               name: default.alltypes_orc
@@ -2032,6 +2056,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION TOK_TIMESTAMP "1970-12-31 15:59:58.174")))))
 
@@ -2047,7 +2072,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: COMPLETE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
               expressions:
@@ -2090,19 +2115,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -2112,19 +2137,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
                 field.delim |
 #### A masked pattern was here ####
                 mapkey.delim :
                 name default.alltypes_orc
                 numFiles 1
                 numRows 2
-                rawDataSize 1514
-                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+                rawDataSize 1686
+                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                totalSize 1409
+                totalSize 1475
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
               name: default.alltypes_orc
@@ -2157,6 +2182,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION TOK_DATE "1970-12-31 15:59:58.174")))))
 
@@ -2172,7 +2198,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: COMPLETE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
               expressions:
@@ -2215,19 +2241,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -2237,19 +2263,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
                 field.delim |
 #### A masked pattern was here ####
                 mapkey.delim :
                 name default.alltypes_orc
                 numFiles 1
                 numRows 2
-                rawDataSize 1514
-                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+                rawDataSize 1686
+                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                totalSize 1409
+                totalSize 1475
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
               name: default.alltypes_orc
@@ -2282,6 +2308,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION TOK_DECIMAL "58.174")))))
 
@@ -2297,7 +2324,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: COMPLETE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
               expressions:
@@ -2340,19 +2367,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -2362,19 +2389,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
                 field.delim |
 #### A masked pattern was here ####
                 mapkey.delim :
                 name default.alltypes_orc
                 numFiles 1
                 numRows 2
-                rawDataSize 1514
-                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+                rawDataSize 1686
+                serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
                 serialization.format |
                 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                totalSize 1409
+                totalSize 1475
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
               name: default.alltypes_orc
@@ -2407,6 +2434,7 @@ POSTHOOK: Lineage: alltypes_orc.si1 SIMP
 POSTHOOK: Lineage: alltypes_orc.st1 SIMPLE [(alltypes)alltypes.FieldSchema(name:st1, type:struct<c1:int,c2:string>, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ti1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ti1, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: alltypes_orc.ts1 SIMPLE [(alltypes)alltypes.FieldSchema(name:ts1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: alltypes_orc.vc1 SIMPLE [(alltypes)alltypes.FieldSchema(name:vc1, type:varchar(5), comment:null), ]
 ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypes_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION array 1 2 3)))))
 
@@ -2422,7 +2450,7 @@ STAGE PLANS:
           TableScan
             alias: alltypes_orc
             Statistics:
-                numRows: 2 dataSize: 1514 basicStatsState: COMPLETE colStatsState: COMPLETE
+                numRows: 2 dataSize: 1686 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
               expressions:
@@ -2465,19 +2493,19 @@ STAGE PLANS:
               COLUMN_STATS_ACCURATE true
               bucket_count -1
               colelction.delim ,
-              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+              columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1
+              columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:varchar(5):map<string,string>:array<int>:struct<c1:int,c2:string>
               field.delim |
 #### A masked pattern was here ####
               mapkey.delim :
               name default.alltypes_orc
               numFiles 1
               numRows 2
-              rawDataSize 1514
-              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
+              rawDataSize 1686
+              serialization.ddl struct alltypes_orc { bool bo1, byte ti1, i16 si1, i32 i1, i64 bi1, float f1, double d1, decimal(10,0) de1, timestamp ts1, timestamp da1, string s1, varchar(5) vc1, map<string,string> m1, list<i32> l1, struct<c1:i32,c2:string> st1}
               serialization.format |
               serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              totalSize 1409
+              totalSize 1475
 #### A masked pattern was here ####
             serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
           
@@ -2487,19 +2515,19 @@ STAGE PLANS:
                 COLUMN_STATS_ACCURATE true
                 bucket_count -1
                 colelction.delim ,
-                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,m1,l1,st1
-                columns.types boolean:tinyint:smallint:int:bigint:float:double:decimal(10,0):timestamp:timestamp:string:map<string,string>:array<int>:struct<c1:int,c2:string>
+                columns bo1,ti1,si1,i1,bi1,f1,d1,de1,ts1,da1,s1,vc1,m1,l1,st1

[... 1005 lines stripped ...]


Mime
View raw message