hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sze...@apache.org
Subject svn commit: r1673583 [13/27] - in /hive/branches/spark: ./ beeline/src/java/org/apache/hive/beeline/ bin/ cli/src/java/org/apache/hadoop/hive/cli/ cli/src/test/org/apache/hadoop/hive/cli/ common/ common/src/java/org/apache/hadoop/hive/common/ common/sr...
Date Tue, 14 Apr 2015 23:36:09 GMT
Modified: hive/branches/spark/ql/src/test/results/clientpositive/authorization_update.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/authorization_update.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/authorization_update.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/authorization_update.q.out Tue Apr 14 23:36:02 2015
@@ -1,12 +1,12 @@
 PREHOOK: query: -- current user has been set (comment line before the set cmd is resulting in parse error!!)
 
-CREATE TABLE t_auth_up(i int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+CREATE TABLE t_auth_up(i int, j int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@t_auth_up
 POSTHOOK: query: -- current user has been set (comment line before the set cmd is resulting in parse error!!)
 
-CREATE TABLE t_auth_up(i int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+CREATE TABLE t_auth_up(i int, j int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@t_auth_up
@@ -52,11 +52,11 @@ default	t_auth_up			user1	USER	SELECT	tr
 default	t_auth_up			user1	USER	UPDATE	true	-1	user1
 default	t_auth_up			userWIns	USER	SELECT	false	-1	user1
 default	t_auth_up			userWIns	USER	UPDATE	false	-1	user1
-PREHOOK: query: update t_auth_up set i = 0 where i > 0
+PREHOOK: query: update t_auth_up set j = 0 where i > 0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@t_auth_up
 PREHOOK: Output: default@t_auth_up
-POSTHOOK: query: update t_auth_up set i = 0 where i > 0
+POSTHOOK: query: update t_auth_up set j = 0 where i > 0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@t_auth_up
 POSTHOOK: Output: default@t_auth_up

Modified: hive/branches/spark/ql/src/test/results/clientpositive/authorization_update_own_table.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/authorization_update_own_table.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/authorization_update_own_table.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/authorization_update_own_table.q.out Tue Apr 14 23:36:02 2015
@@ -1,16 +1,16 @@
-PREHOOK: query: create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+PREHOOK: query: create table auth_noupd(i int, j int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@auth_noupd
-POSTHOOK: query: create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+POSTHOOK: query: create table auth_noupd(i int, j int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@auth_noupd
-PREHOOK: query: update auth_noupd set i = 0 where i > 0
+PREHOOK: query: update auth_noupd set j = 0 where i > 0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@auth_noupd
 PREHOOK: Output: default@auth_noupd
-POSTHOOK: query: update auth_noupd set i = 0 where i > 0
+POSTHOOK: query: update auth_noupd set j = 0 where i > 0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@auth_noupd
 POSTHOOK: Output: default@auth_noupd

Modified: hive/branches/spark/ql/src/test/results/clientpositive/auto_join1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/auto_join1.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/auto_join1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/auto_join1.q.out Tue Apr 14 23:36:02 2015
@@ -24,11 +24,11 @@ STAGE PLANS:
   Stage: Stage-5
     Map Reduce Local Work
       Alias -> Map Local Tables:
-        $hdt$_0:$hdt$_0:src1 
+        $hdt$_0:src1 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
-        $hdt$_0:$hdt$_0:src1 
+        $hdt$_0:src1 
           TableScan
             alias: src1
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/auto_join10.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/auto_join10.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/auto_join10.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/auto_join10.q.out Tue Apr 14 23:36:02 2015
@@ -23,11 +23,11 @@ STAGE PLANS:
   Stage: Stage-5
     Map Reduce Local Work
       Alias -> Map Local Tables:
-        $hdt$_0:$hdt$_0:$hdt$_0:src 
+        $hdt$_0:$hdt$_0:src 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
-        $hdt$_0:$hdt$_0:$hdt$_0:src 
+        $hdt$_0:$hdt$_0:src 
           TableScan
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/auto_join11.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/auto_join11.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/auto_join11.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/auto_join11.q.out Tue Apr 14 23:36:02 2015
@@ -23,11 +23,11 @@ STAGE PLANS:
   Stage: Stage-5
     Map Reduce Local Work
       Alias -> Map Local Tables:
-        $hdt$_0:$hdt$_0:$hdt$_0:src 
+        $hdt$_0:$hdt$_0:src 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
-        $hdt$_0:$hdt$_0:$hdt$_0:src 
+        $hdt$_0:$hdt$_0:src 
           TableScan
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/auto_join12.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/auto_join12.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/auto_join12.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/auto_join12.q.out Tue Apr 14 23:36:02 2015
@@ -29,14 +29,14 @@ STAGE PLANS:
   Stage: Stage-7
     Map Reduce Local Work
       Alias -> Map Local Tables:
-        $hdt$_0:$hdt$_0:$hdt$_0:$hdt$_0:src 
+        $hdt$_0:$hdt$_0:$hdt$_0:src 
           Fetch Operator
             limit: -1
-        $hdt$_0:$hdt$_0:$hdt$_1:$hdt$_1:$hdt$_1:src 
+        $hdt$_0:$hdt$_1:$hdt$_1:$hdt$_1:src 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
-        $hdt$_0:$hdt$_0:$hdt$_0:$hdt$_0:src 
+        $hdt$_0:$hdt$_0:$hdt$_0:src 
           TableScan
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
@@ -51,7 +51,7 @@ STAGE PLANS:
                   keys:
                     0 _col0 (type: string)
                     1 _col0 (type: string)
-        $hdt$_0:$hdt$_0:$hdt$_1:$hdt$_1:$hdt$_1:src 
+        $hdt$_0:$hdt$_1:$hdt$_1:$hdt$_1:src 
           TableScan
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/auto_join13.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/auto_join13.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/auto_join13.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/auto_join13.q.out Tue Apr 14 23:36:02 2015
@@ -29,14 +29,14 @@ STAGE PLANS:
   Stage: Stage-7
     Map Reduce Local Work
       Alias -> Map Local Tables:
-        $hdt$_0:$hdt$_0:$hdt$_0:src 
+        $hdt$_0:$hdt$_0:src 
           Fetch Operator
             limit: -1
-        $hdt$_0:$hdt$_0:$hdt$_1:$hdt$_1:src 
+        $hdt$_0:$hdt$_1:$hdt$_1:src 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
-        $hdt$_0:$hdt$_0:$hdt$_0:src 
+        $hdt$_0:$hdt$_0:src 
           TableScan
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
@@ -51,7 +51,7 @@ STAGE PLANS:
                   keys:
                     0 UDFToDouble(_col0) (type: double)
                     1 (UDFToDouble(_col2) + UDFToDouble(_col0)) (type: double)
-        $hdt$_0:$hdt$_0:$hdt$_1:$hdt$_1:src 
+        $hdt$_0:$hdt$_1:$hdt$_1:src 
           TableScan
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/auto_join14.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/auto_join14.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/auto_join14.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/auto_join14.q.out Tue Apr 14 23:36:02 2015
@@ -28,11 +28,11 @@ STAGE PLANS:
   Stage: Stage-5
     Map Reduce Local Work
       Alias -> Map Local Tables:
-        $hdt$_0:$hdt$_1:src 
+        $hdt$_1:src 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
-        $hdt$_0:$hdt$_1:src 
+        $hdt$_1:src 
           TableScan
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/auto_join22.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/auto_join22.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/auto_join22.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/auto_join22.q.out Tue Apr 14 23:36:02 2015
@@ -13,14 +13,14 @@ STAGE PLANS:
   Stage: Stage-8
     Map Reduce Local Work
       Alias -> Map Local Tables:
-        $hdt$_0:$hdt$_0:$hdt$_0:$hdt$_0:$hdt$_0:src4 
+        $hdt$_0:$hdt$_0:$hdt$_0:src4 
           Fetch Operator
             limit: -1
-        $hdt$_0:$hdt$_0:$hdt$_0:$hdt$_1:src4 
+        $hdt$_0:$hdt$_1:src4 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
-        $hdt$_0:$hdt$_0:$hdt$_0:$hdt$_0:$hdt$_0:src4 
+        $hdt$_0:$hdt$_0:$hdt$_0:src4 
           TableScan
             alias: src4
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
@@ -35,7 +35,7 @@ STAGE PLANS:
                   keys:
                     0 _col0 (type: string)
                     1 _col0 (type: string)
-        $hdt$_0:$hdt$_0:$hdt$_0:$hdt$_1:src4 
+        $hdt$_0:$hdt$_1:src4 
           TableScan
             alias: src4
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/auto_join26.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/auto_join26.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/auto_join26.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/auto_join26.q.out Tue Apr 14 23:36:02 2015
@@ -28,11 +28,11 @@ STAGE PLANS:
   Stage: Stage-6
     Map Reduce Local Work
       Alias -> Map Local Tables:
-        $hdt$_0:$hdt$_0:$hdt$_1:x 
+        $hdt$_0:$hdt$_1:x 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
-        $hdt$_0:$hdt$_0:$hdt$_1:x 
+        $hdt$_0:$hdt$_1:x 
           TableScan
             alias: x
             Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/auto_join_nulls.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/auto_join_nulls.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/auto_join_nulls.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/auto_join_nulls.q.out Tue Apr 14 23:36:02 2015
@@ -34,7 +34,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@myinput1
 #### A masked pattern was here ####
 13630578
-Warning: Map Join MAPJOIN[18][bigTable=?] in task 'Stage-2:MAPRED' is a cross product
+Warning: Map Join MAPJOIN[17][bigTable=?] in task 'Stage-2:MAPRED' is a cross product
 PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1 a RIGHT OUTER JOIN myinput1 b
 PREHOOK: type: QUERY
 PREHOOK: Input: default@myinput1

Modified: hive/branches/spark/ql/src/test/results/clientpositive/auto_join_without_localtask.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/auto_join_without_localtask.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/auto_join_without_localtask.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/auto_join_without_localtask.q.out Tue Apr 14 23:36:02 2015
@@ -270,11 +270,11 @@ STAGE PLANS:
   Stage: Stage-14
     Map Reduce Local Work
       Alias -> Map Local Tables:
-        $hdt$_0:$hdt$_1:a 
+        $hdt$_1:a 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
-        $hdt$_0:$hdt$_1:a 
+        $hdt$_1:a 
           TableScan
             alias: a
             Filter Operator
@@ -319,11 +319,11 @@ STAGE PLANS:
   Stage: Stage-12
     Map Reduce Local Work
       Alias -> Map Local Tables:
-        $hdt$_0:$hdt$_2:a 
+        $hdt$_2:a 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
-        $hdt$_0:$hdt$_2:a 
+        $hdt$_2:a 
           TableScan
             alias: a
             Filter Operator
@@ -406,11 +406,11 @@ STAGE PLANS:
       Local Work:
         Map Reduce Local Work
           Alias -> Map Local Tables:
-            $hdt$_0:$INTNAME 
+            $INTNAME 
               Fetch Operator
                 limit: -1
           Alias -> Map Local Operator Tree:
-            $hdt$_0:$INTNAME 
+            $INTNAME 
               TableScan
 
   Stage: Stage-2
@@ -457,11 +457,11 @@ STAGE PLANS:
   Stage: Stage-15
     Map Reduce Local Work
       Alias -> Map Local Tables:
-        $hdt$_0:$hdt$_0:a 
+        $hdt$_0:a 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
-        $hdt$_0:$hdt$_0:a 
+        $hdt$_0:a 
           TableScan
             alias: a
             Filter Operator
@@ -639,11 +639,11 @@ STAGE PLANS:
   Stage: Stage-13
     Map Reduce Local Work
       Alias -> Map Local Tables:
-        $hdt$_0:$hdt$_1:$hdt$_2:a 
+        $hdt$_1:$hdt$_2:a 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
-        $hdt$_0:$hdt$_1:$hdt$_2:a 
+        $hdt$_1:$hdt$_2:a 
           TableScan
             alias: a
             Filter Operator
@@ -714,11 +714,11 @@ STAGE PLANS:
       Local Work:
         Map Reduce Local Work
           Alias -> Map Local Tables:
-            $hdt$_0:$INTNAME 
+            $INTNAME 
               Fetch Operator
                 limit: -1
           Alias -> Map Local Operator Tree:
-            $hdt$_0:$INTNAME 
+            $INTNAME 
               TableScan
 
   Stage: Stage-2
@@ -748,11 +748,11 @@ STAGE PLANS:
   Stage: Stage-12
     Map Reduce Local Work
       Alias -> Map Local Tables:
-        $hdt$_0:$hdt$_0:a 
+        $hdt$_0:a 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
-        $hdt$_0:$hdt$_0:a 
+        $hdt$_0:a 
           TableScan
             alias: a
             Filter Operator
@@ -836,11 +836,11 @@ STAGE PLANS:
   Stage: Stage-14
     Map Reduce Local Work
       Alias -> Map Local Tables:
-        $hdt$_0:$hdt$_1:$hdt$_1:a 
+        $hdt$_1:$hdt$_1:a 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
-        $hdt$_0:$hdt$_1:$hdt$_1:a 
+        $hdt$_1:$hdt$_1:a 
           TableScan
             alias: a
             Filter Operator

Modified: hive/branches/spark/ql/src/test/results/clientpositive/avro_add_column.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/avro_add_column.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/avro_add_column.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/avro_add_column.q.out Tue Apr 14 23:36:02 2015
@@ -24,8 +24,8 @@ PREHOOK: Input: default@doctors
 POSTHOOK: query: DESCRIBE doctors
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@doctors
-number              	int                 	from deserializer   
-first_name          	string              	from deserializer   
+number              	int                 	                    
+first_name          	string              	                    
 PREHOOK: query: ALTER TABLE doctors ADD COLUMNS (last_name string)
 PREHOOK: type: ALTERTABLE_ADDCOLS
 PREHOOK: Input: default@doctors
@@ -40,9 +40,9 @@ PREHOOK: Input: default@doctors
 POSTHOOK: query: DESCRIBE doctors
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@doctors
-number              	int                 	from deserializer   
-first_name          	string              	from deserializer   
-last_name           	string              	from deserializer   
+number              	int                 	                    
+first_name          	string              	                    
+last_name           	string              	                    
 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors
 PREHOOK: type: LOAD
 #### A masked pattern was here ####

Modified: hive/branches/spark/ql/src/test/results/clientpositive/avro_add_column2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/avro_add_column2.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/avro_add_column2.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/avro_add_column2.q.out Tue Apr 14 23:36:02 2015
@@ -50,8 +50,8 @@ POSTHOOK: query: INSERT INTO TABLE docto
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@doctors
 POSTHOOK: Output: default@doctors_copy
-POSTHOOK: Lineage: doctors_copy.first_name SIMPLE [(doctors)doctors.FieldSchema(name:first_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: doctors_copy.number SIMPLE [(doctors)doctors.FieldSchema(name:number, type:int, comment:from deserializer), ]
+POSTHOOK: Lineage: doctors_copy.first_name SIMPLE [(doctors)doctors.FieldSchema(name:first_name, type:string, comment:), ]
+POSTHOOK: Lineage: doctors_copy.number SIMPLE [(doctors)doctors.FieldSchema(name:number, type:int, comment:), ]
 PREHOOK: query: ALTER TABLE doctors_copy ADD COLUMNS (last_name string)
 PREHOOK: type: ALTERTABLE_ADDCOLS
 PREHOOK: Input: default@doctors_copy
@@ -74,9 +74,9 @@ PREHOOK: Input: default@doctors_copy
 POSTHOOK: query: DESCRIBE doctors_copy
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@doctors_copy
-number              	int                 	from deserializer   
-first_name          	string              	from deserializer   
-last_name           	string              	from deserializer   
+number              	int                 	                    
+first_name          	string              	                    
+last_name           	string              	                    
 PREHOOK: query: SELECT * FROM doctors_copy
 PREHOOK: type: QUERY
 PREHOOK: Input: default@doctors_copy

Modified: hive/branches/spark/ql/src/test/results/clientpositive/avro_add_column3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/avro_add_column3.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/avro_add_column3.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/avro_add_column3.q.out Tue Apr 14 23:36:02 2015
@@ -52,8 +52,8 @@ POSTHOOK: query: INSERT INTO TABLE docto
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@doctors
 POSTHOOK: Output: default@doctors_copy@part=1
-POSTHOOK: Lineage: doctors_copy PARTITION(part=1).first_name SIMPLE [(doctors)doctors.FieldSchema(name:first_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: doctors_copy PARTITION(part=1).number SIMPLE [(doctors)doctors.FieldSchema(name:number, type:int, comment:from deserializer), ]
+POSTHOOK: Lineage: doctors_copy PARTITION(part=1).first_name SIMPLE [(doctors)doctors.FieldSchema(name:first_name, type:string, comment:), ]
+POSTHOOK: Lineage: doctors_copy PARTITION(part=1).number SIMPLE [(doctors)doctors.FieldSchema(name:number, type:int, comment:), ]
 PREHOOK: query: ALTER TABLE doctors_copy ADD COLUMNS (last_name string)
 PREHOOK: type: ALTERTABLE_ADDCOLS
 PREHOOK: Input: default@doctors_copy
@@ -68,9 +68,9 @@ PREHOOK: Input: default@doctors_copy
 POSTHOOK: query: DESCRIBE doctors_copy
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@doctors_copy
-number              	int                 	from deserializer   
-first_name          	string              	from deserializer   
-last_name           	string              	from deserializer   
+number              	int                 	                    
+first_name          	string              	                    
+last_name           	string              	                    
 part                	int                 	                    
 	 	 
 # Partition Information	 	 

Modified: hive/branches/spark/ql/src/test/results/clientpositive/avro_change_schema.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/avro_change_schema.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/avro_change_schema.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/avro_change_schema.q.out Tue Apr 14 23:36:02 2015
@@ -38,8 +38,8 @@ PREHOOK: Input: default@avro2
 POSTHOOK: query: DESCRIBE avro2
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@avro2
-string1             	string              	from deserializer   
-string2             	string              	from deserializer   
+string1             	string              	                    
+string2             	string              	                    
 PREHOOK: query: ALTER TABLE avro2 SET TBLPROPERTIES ('avro.schema.literal'='{ "namespace": "org.apache.hive",
   "name": "second_schema",
   "type": "record",
@@ -68,6 +68,6 @@ PREHOOK: Input: default@avro2
 POSTHOOK: query: DESCRIBE avro2
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@avro2
-int1                	int                 	from deserializer   
-float1              	float               	from deserializer   
-double1             	double              	from deserializer   
+int1                	int                 	                    
+float1              	float               	                    
+double1             	double              	                    

Modified: hive/branches/spark/ql/src/test/results/clientpositive/avro_compression_enabled.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/avro_compression_enabled.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/avro_compression_enabled.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/avro_compression_enabled.q.out Tue Apr 14 23:36:02 2015
@@ -29,7 +29,7 @@ TBLPROPERTIES ('avro.schema.literal'='{
     {
       "name":"extra_field",
       "type":"string",
-      "doc:":"an extra field not in the original file",
+      "doc":"an extra field not in the original file",
       "default":"fishfingers and custard"
     }
   ]
@@ -68,7 +68,7 @@ TBLPROPERTIES ('avro.schema.literal'='{
     {
       "name":"extra_field",
       "type":"string",
-      "doc:":"an extra field not in the original file",
+      "doc":"an extra field not in the original file",
       "default":"fishfingers and custard"
     }
   ]

Modified: hive/branches/spark/ql/src/test/results/clientpositive/avro_decimal.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/avro_decimal.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/avro_decimal.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/avro_decimal.q.out Tue Apr 14 23:36:02 2015
@@ -79,8 +79,8 @@ PREHOOK: Input: default@avro_dec
 POSTHOOK: query: DESC avro_dec
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@avro_dec
-name                	string              	from deserializer   
-value               	decimal(5,2)        	from deserializer   
+name                	string              	                    
+value               	decimal(5,2)        	                    
 PREHOOK: query: INSERT OVERWRITE TABLE avro_dec select name, value from dec
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dec
@@ -153,8 +153,8 @@ PREHOOK: Input: default@avro_dec1
 POSTHOOK: query: DESC avro_dec1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@avro_dec1
-name                	string              	from deserializer   
-value               	decimal(4,1)        	from deserializer   
+name                	string              	                    
+value               	decimal(4,1)        	                    
 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/dec.avro' into TABLE avro_dec1
 PREHOOK: type: LOAD
 #### A masked pattern was here ####

Modified: hive/branches/spark/ql/src/test/results/clientpositive/avro_decimal_native.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/avro_decimal_native.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/avro_decimal_native.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/avro_decimal_native.q.out Tue Apr 14 23:36:02 2015
@@ -65,8 +65,8 @@ PREHOOK: Input: default@avro_dec
 POSTHOOK: query: DESC avro_dec
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@avro_dec
-name                	string              	from deserializer   
-value               	decimal(5,2)        	from deserializer   
+name                	string              	                    
+value               	decimal(5,2)        	                    
 PREHOOK: query: INSERT OVERWRITE TABLE avro_dec SELECT name, value FROM dec
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dec
@@ -121,8 +121,8 @@ PREHOOK: Input: default@avro_dec1
 POSTHOOK: query: DESC avro_dec1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@avro_dec1
-name                	string              	from deserializer   
-value               	decimal(4,1)        	from deserializer   
+name                	string              	                    
+value               	decimal(4,1)        	                    
 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/dec.avro' INTO TABLE avro_dec1
 PREHOOK: type: LOAD
 #### A masked pattern was here ####

Modified: hive/branches/spark/ql/src/test/results/clientpositive/avro_evolved_schemas.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/avro_evolved_schemas.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/avro_evolved_schemas.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/avro_evolved_schemas.q.out Tue Apr 14 23:36:02 2015
@@ -30,7 +30,7 @@ TBLPROPERTIES ('avro.schema.literal'='{
     {
       "name":"extra_field",
       "type":"string",
-      "doc:":"an extra field not in the original file",
+      "doc":"an extra field not in the original file",
       "default":"fishfingers and custard"
     }
   ]
@@ -70,7 +70,7 @@ TBLPROPERTIES ('avro.schema.literal'='{
     {
       "name":"extra_field",
       "type":"string",
-      "doc:":"an extra field not in the original file",
+      "doc":"an extra field not in the original file",
       "default":"fishfingers and custard"
     }
   ]
@@ -84,10 +84,10 @@ PREHOOK: Input: default@doctors_with_new
 POSTHOOK: query: DESCRIBE doctors_with_new_field
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@doctors_with_new_field
-number              	int                 	from deserializer   
-first_name          	string              	from deserializer   
-last_name           	string              	from deserializer   
-extra_field         	string              	from deserializer   
+number              	int                 	Order of playing the role
+first_name          	string              	first name of actor playing role
+last_name           	string              	last name of actor playing role
+extra_field         	string              	an extra field not in the original file
 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors_with_new_field
 PREHOOK: type: LOAD
 #### A masked pattern was here ####

Modified: hive/branches/spark/ql/src/test/results/clientpositive/avro_joins.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/avro_joins.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/avro_joins.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/avro_joins.q.out Tue Apr 14 23:36:02 2015
@@ -31,7 +31,7 @@ TBLPROPERTIES ('avro.schema.literal'='{
     {
       "name":"extra_field",
       "type":"string",
-      "doc:":"an extra field not in the original file",
+      "doc":"an extra field not in the original file",
       "default":"fishfingers and custard"
     }
   ]
@@ -72,7 +72,7 @@ TBLPROPERTIES ('avro.schema.literal'='{
     {
       "name":"extra_field",
       "type":"string",
-      "doc:":"an extra field not in the original file",
+      "doc":"an extra field not in the original file",
       "default":"fishfingers and custard"
     }
   ]
@@ -86,10 +86,10 @@ PREHOOK: Input: default@doctors4
 POSTHOOK: query: DESCRIBE doctors4
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@doctors4
-number              	int                 	from deserializer   
-first_name          	string              	from deserializer   
-last_name           	string              	from deserializer   
-extra_field         	string              	from deserializer   
+number              	int                 	Order of playing the role
+first_name          	string              	first name of actor playing role
+last_name           	string              	last name of actor playing role
+extra_field         	string              	an extra field not in the original file
 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors4
 PREHOOK: type: LOAD
 #### A masked pattern was here ####
@@ -166,9 +166,9 @@ PREHOOK: Input: default@episodes
 POSTHOOK: query: DESCRIBE episodes
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@episodes
-title               	string              	from deserializer   
-air_date            	string              	from deserializer   
-doctor              	int                 	from deserializer   
+title               	string              	episode title       
+air_date            	string              	initial date        
+doctor              	int                 	main actor playing the Doctor in episode
 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/episodes.avro' INTO TABLE episodes
 PREHOOK: type: LOAD
 #### A masked pattern was here ####

Modified: hive/branches/spark/ql/src/test/results/clientpositive/avro_joins_native.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/avro_joins_native.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/avro_joins_native.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/avro_joins_native.q.out Tue Apr 14 23:36:02 2015
@@ -28,9 +28,9 @@ PREHOOK: Input: default@doctors4
 POSTHOOK: query: DESCRIBE doctors4
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@doctors4
-number              	int                 	from deserializer   
-first_name          	string              	from deserializer   
-last_name           	string              	from deserializer   
+number              	int                 	Order of playing the role
+first_name          	string              	first name of actor playing role
+last_name           	string              	last name of actor playing role
 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors4
 PREHOOK: type: LOAD
 #### A masked pattern was here ####
@@ -61,9 +61,9 @@ PREHOOK: Input: default@episodes
 POSTHOOK: query: DESCRIBE episodes
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@episodes
-title               	string              	from deserializer   
-air_date            	string              	from deserializer   
-doctor              	int                 	from deserializer   
+title               	string              	episode title       
+air_date            	string              	initial date        
+doctor              	int                 	main actor playing the Doctor in episode
 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/episodes.avro' INTO TABLE episodes
 PREHOOK: type: LOAD
 #### A masked pattern was here ####

Modified: hive/branches/spark/ql/src/test/results/clientpositive/avro_native.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/avro_native.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/avro_native.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/avro_native.q.out Tue Apr 14 23:36:02 2015
@@ -26,9 +26,9 @@ PREHOOK: Input: default@doctors
 POSTHOOK: query: DESCRIBE doctors
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@doctors
-number              	int                 	from deserializer   
-first_name          	string              	from deserializer   
-last_name           	string              	from deserializer   
+number              	int                 	                    
+first_name          	string              	                    
+last_name           	string              	                    
 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors
 PREHOOK: type: LOAD
 #### A masked pattern was here ####

Modified: hive/branches/spark/ql/src/test/results/clientpositive/avro_partitioned.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/avro_partitioned.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/avro_partitioned.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/avro_partitioned.q.out Tue Apr 14 23:36:02 2015
@@ -150,27 +150,27 @@ POSTHOOK: Output: default@episodes_parti
 POSTHOOK: Output: default@episodes_partitioned@doctor_pt=5
 POSTHOOK: Output: default@episodes_partitioned@doctor_pt=6
 POSTHOOK: Output: default@episodes_partitioned@doctor_pt=9
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
 PREHOOK: query: SELECT * FROM episodes_partitioned WHERE doctor_pt > 6
 PREHOOK: type: QUERY
 PREHOOK: Input: default@episodes_partitioned
@@ -360,27 +360,27 @@ POSTHOOK: Output: default@episodes_parti
 POSTHOOK: Output: default@episodes_partitioned_serdeproperties@doctor_pt=5
 POSTHOOK: Output: default@episodes_partitioned_serdeproperties@doctor_pt=6
 POSTHOOK: Output: default@episodes_partitioned_serdeproperties@doctor_pt=9
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=11).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=11).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=11).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=1).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=1).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=1).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=2).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=2).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=2).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=4).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=4).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=4).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=5).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=5).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=5).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=6).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=6).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=6).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=9).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=9).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=9).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=11).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=11).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=11).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=1).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=1).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=1).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=2).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=2).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=2).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=4).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=4).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=4).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=5).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=5).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=5).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=6).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=6).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=6).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=9).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=9).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned_serdeproperties PARTITION(doctor_pt=9).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
 PREHOOK: query: -- Evolve the table schema by adding new array field "cast_and_crew"
 ALTER TABLE episodes_partitioned_serdeproperties
 SET SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'

Modified: hive/branches/spark/ql/src/test/results/clientpositive/avro_partitioned_native.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/avro_partitioned_native.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/avro_partitioned_native.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/avro_partitioned_native.q.out Tue Apr 14 23:36:02 2015
@@ -60,27 +60,27 @@ POSTHOOK: Output: default@episodes_parti
 POSTHOOK: Output: default@episodes_partitioned@doctor_pt=5
 POSTHOOK: Output: default@episodes_partitioned@doctor_pt=6
 POSTHOOK: Output: default@episodes_partitioned@doctor_pt=9
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
 PREHOOK: query: SELECT * FROM episodes_partitioned WHERE doctor_pt > 6
 PREHOOK: type: QUERY
 PREHOOK: Input: default@episodes_partitioned

Modified: hive/branches/spark/ql/src/test/results/clientpositive/avro_sanity_test.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/avro_sanity_test.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/avro_sanity_test.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/avro_sanity_test.q.out Tue Apr 14 23:36:02 2015
@@ -72,9 +72,9 @@ PREHOOK: Input: default@doctors
 POSTHOOK: query: DESCRIBE doctors
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@doctors
-number              	int                 	from deserializer   
-first_name          	string              	from deserializer   
-last_name           	string              	from deserializer   
+number              	int                 	Order of playing the role
+first_name          	string              	first name of actor playing role
+last_name           	string              	last name of actor playing role
 PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/doctors.avro' INTO TABLE doctors
 PREHOOK: type: LOAD
 #### A masked pattern was here ####

Modified: hive/branches/spark/ql/src/test/results/clientpositive/avro_schema_evolution_native.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/avro_schema_evolution_native.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/avro_schema_evolution_native.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/avro_schema_evolution_native.q.out Tue Apr 14 23:36:02 2015
@@ -60,27 +60,27 @@ POSTHOOK: Output: default@episodes_parti
 POSTHOOK: Output: default@episodes_partitioned@doctor_pt=5
 POSTHOOK: Output: default@episodes_partitioned@doctor_pt=6
 POSTHOOK: Output: default@episodes_partitioned@doctor_pt=9
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=11).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=1).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=2).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=4).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=5).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=6).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).air_date SIMPLE [(episodes)episodes.FieldSchema(name:air_date, type:string, comment:initial date), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).doctor SIMPLE [(episodes)episodes.FieldSchema(name:doctor, type:int, comment:main actor playing the Doctor in episode), ]
+POSTHOOK: Lineage: episodes_partitioned PARTITION(doctor_pt=9).title SIMPLE [(episodes)episodes.FieldSchema(name:title, type:string, comment:episode title), ]
 PREHOOK: query: ALTER TABLE episodes_partitioned
 SET SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'
 WITH

Modified: hive/branches/spark/ql/src/test/results/clientpositive/avro_schema_literal.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/avro_schema_literal.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/avro_schema_literal.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/avro_schema_literal.q.out Tue Apr 14 23:36:02 2015
@@ -70,20 +70,20 @@ PREHOOK: Input: default@avro1
 POSTHOOK: query: DESCRIBE avro1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@avro1
-string1             	string              	from deserializer   
-int1                	int                 	from deserializer   
-tinyint1            	int                 	from deserializer   
-smallint1           	int                 	from deserializer   
-bigint1             	bigint              	from deserializer   
-boolean1            	boolean             	from deserializer   
-float1              	float               	from deserializer   
-double1             	double              	from deserializer   
-list1               	array<string>       	from deserializer   
-map1                	map<string,int>     	from deserializer   
-struct1             	struct<sint:int,sboolean:boolean,sstring:string>	from deserializer   
-union1              	uniontype<float,boolean,string>	from deserializer   
-enum1               	string              	from deserializer   
-nullableint         	int                 	from deserializer   
-bytes1              	binary              	from deserializer   
-fixed1              	binary              	from deserializer   
-dec1                	decimal(5,2)        	from deserializer   
+string1             	string              	                    
+int1                	int                 	                    
+tinyint1            	int                 	                    
+smallint1           	int                 	                    
+bigint1             	bigint              	                    
+boolean1            	boolean             	                    
+float1              	float               	                    
+double1             	double              	                    
+list1               	array<string>       	                    
+map1                	map<string,int>     	                    
+struct1             	struct<sint:int,sboolean:boolean,sstring:string>	                    
+union1              	uniontype<float,boolean,string>	                    
+enum1               	string              	                    
+nullableint         	int                 	                    
+bytes1              	binary              	                    
+fixed1              	binary              	                    
+dec1                	decimal(5,2)        	                    

Modified: hive/branches/spark/ql/src/test/results/clientpositive/combine2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/combine2.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/combine2.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/combine2.q.out Tue Apr 14 23:36:02 2015
@@ -564,14 +564,14 @@ STAGE PLANS:
               name: default.combine2
             name: default.combine2
       Truncated Path -> Alias:
-        /combine2/value=2010-04-21 09%3A45%3A00 [$hdt$_0:$hdt$_0:combine2]
-        /combine2/value=val_0 [$hdt$_0:$hdt$_0:combine2]
-        /combine2/value=val_2 [$hdt$_0:$hdt$_0:combine2]
-        /combine2/value=val_4 [$hdt$_0:$hdt$_0:combine2]
-        /combine2/value=val_5 [$hdt$_0:$hdt$_0:combine2]
-        /combine2/value=val_8 [$hdt$_0:$hdt$_0:combine2]
-        /combine2/value=val_9 [$hdt$_0:$hdt$_0:combine2]
-        /combine2/value=| [$hdt$_0:$hdt$_0:combine2]
+        /combine2/value=2010-04-21 09%3A45%3A00 [$hdt$_0:combine2]
+        /combine2/value=val_0 [$hdt$_0:combine2]
+        /combine2/value=val_2 [$hdt$_0:combine2]
+        /combine2/value=val_4 [$hdt$_0:combine2]
+        /combine2/value=val_5 [$hdt$_0:combine2]
+        /combine2/value=val_8 [$hdt$_0:combine2]
+        /combine2/value=val_9 [$hdt$_0:combine2]
+        /combine2/value=| [$hdt$_0:combine2]
       Needs Tagging: false
       Reduce Operator Tree:
         Group By Operator

Modified: hive/branches/spark/ql/src/test/results/clientpositive/correlationoptimizer1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/correlationoptimizer1.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/correlationoptimizer1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/correlationoptimizer1.q.out Tue Apr 14 23:36:02 2015
@@ -329,11 +329,11 @@ STAGE PLANS:
   Stage: Stage-6
     Map Reduce Local Work
       Alias -> Map Local Tables:
-        $hdt$_0:$hdt$_0:$hdt$_0:$hdt$_1:x 
+        $hdt$_0:$hdt$_0:$hdt$_1:x 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
-        $hdt$_0:$hdt$_0:$hdt$_0:$hdt$_1:x 
+        $hdt$_0:$hdt$_0:$hdt$_1:x 
           TableScan
             alias: x
             Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/correlationoptimizer12.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/correlationoptimizer12.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/correlationoptimizer12.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/correlationoptimizer12.q.out Tue Apr 14 23:36:02 2015
@@ -27,16 +27,12 @@ STAGE PLANS:
           TableScan
             alias: x
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
+            Reduce Output Operator
+              key expressions: key (type: string)
+              sort order: +
+              Map-reduce partition columns: key (type: string)
               Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              Reduce Output Operator
-                key expressions: _col0 (type: string)
-                sort order: +
-                Map-reduce partition columns: _col0 (type: string)
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                value expressions: _col1 (type: string)
+              value expressions: value (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
@@ -116,16 +112,12 @@ STAGE PLANS:
           TableScan
             alias: y
             Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
+            Reduce Output Operator
+              key expressions: key (type: string)
+              sort order: +
+              Map-reduce partition columns: key (type: string)
               Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
-              Reduce Output Operator
-                key expressions: _col0 (type: string)
-                sort order: +
-                Map-reduce partition columns: _col0 (type: string)
-                Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
-                value expressions: _col1 (type: string)
+              value expressions: value (type: string)
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/correlationoptimizer3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/correlationoptimizer3.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/correlationoptimizer3.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/correlationoptimizer3.q.out Tue Apr 14 23:36:02 2015
@@ -504,14 +504,14 @@ STAGE PLANS:
   Stage: Stage-9
     Map Reduce Local Work
       Alias -> Map Local Tables:
-        $hdt$_0:$hdt$_0:$hdt$_0:$hdt$_0:$hdt$_0:$hdt$_1:x 
+        $hdt$_0:$hdt$_0:$hdt$_1:x 
           Fetch Operator
             limit: -1
-        $hdt$_0:$hdt$_0:$hdt$_0:$hdt$_1:$hdt$_1:$hdt$_1:$hdt$_2:x 
+        $hdt$_0:$hdt$_1:$hdt$_1:$hdt$_2:x 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
-        $hdt$_0:$hdt$_0:$hdt$_0:$hdt$_0:$hdt$_0:$hdt$_1:x 
+        $hdt$_0:$hdt$_0:$hdt$_1:x 
           TableScan
             alias: x
             Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
@@ -526,7 +526,7 @@ STAGE PLANS:
                   keys:
                     0 _col0 (type: string)
                     1 _col0 (type: string)
-        $hdt$_0:$hdt$_0:$hdt$_0:$hdt$_1:$hdt$_1:$hdt$_1:$hdt$_2:x 
+        $hdt$_0:$hdt$_1:$hdt$_1:$hdt$_2:x 
           TableScan
             alias: x
             Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
@@ -1208,14 +1208,14 @@ STAGE PLANS:
   Stage: Stage-9
     Map Reduce Local Work
       Alias -> Map Local Tables:
-        $hdt$_0:$hdt$_0:$hdt$_0:$hdt$_0:$hdt$_1:x 
+        $hdt$_0:$hdt$_0:$hdt$_1:x 
           Fetch Operator
             limit: -1
-        $hdt$_0:$hdt$_0:$hdt$_1:$hdt$_1:$hdt$_1:$hdt$_2:x 
+        $hdt$_0:$hdt$_1:$hdt$_1:$hdt$_2:x 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
-        $hdt$_0:$hdt$_0:$hdt$_0:$hdt$_0:$hdt$_1:x 
+        $hdt$_0:$hdt$_0:$hdt$_1:x 
           TableScan
             alias: x
             Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
@@ -1230,7 +1230,7 @@ STAGE PLANS:
                   keys:
                     0 _col0 (type: string)
                     1 _col0 (type: string)
-        $hdt$_0:$hdt$_0:$hdt$_1:$hdt$_1:$hdt$_1:$hdt$_2:x 
+        $hdt$_0:$hdt$_1:$hdt$_1:$hdt$_2:x 
           TableScan
             alias: x
             Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/create_like.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/create_like.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/create_like.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/create_like.q.out Tue Apr 14 23:36:02 2015
@@ -331,9 +331,9 @@ POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@doctors
 # col_name            	data_type           	comment             
 	 	 
-number              	int                 	from deserializer   
-first_name          	string              	from deserializer   
-last_name           	string              	from deserializer   
+number              	int                 	Order of playing the role
+first_name          	string              	first name of actor playing role
+last_name           	string              	last name of actor playing role
 	 	 
 # Detailed Table Information	 	 
 Database:           	default             	 
@@ -380,9 +380,9 @@ POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@doctors2
 # col_name            	data_type           	comment             
 	 	 
-number              	int                 	from deserializer   
-first_name          	string              	from deserializer   
-last_name           	string              	from deserializer   
+number              	int                 	Order of playing the role
+first_name          	string              	first name of actor playing role
+last_name           	string              	last name of actor playing role
 	 	 
 # Detailed Table Information	 	 
 Database:           	default             	 

Modified: hive/branches/spark/ql/src/test/results/clientpositive/ctas_colname.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/ctas_colname.q.out?rev=1673583&r1=1673582&r2=1673583&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/ctas_colname.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/ctas_colname.q.out Tue Apr 14 23:36:02 2015
@@ -174,15 +174,11 @@ STAGE PLANS:
           TableScan
             alias: src1
             Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
+            Reduce Output Operator
+              key expressions: key (type: string), value (type: string)
+              sort order: ++
+              Map-reduce partition columns: key (type: string)
               Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
-              Reduce Output Operator
-                key expressions: _col0 (type: string), _col1 (type: string)
-                sort order: ++
-                Map-reduce partition columns: _col0 (type: string)
-                Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string)
@@ -340,15 +336,11 @@ STAGE PLANS:
           TableScan
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
+            Reduce Output Operator
+              key expressions: key (type: string), value (type: string)
+              sort order: ++
+              Map-reduce partition columns: key (type: string)
               Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              Reduce Output Operator
-                key expressions: _col0 (type: string), _col1 (type: string)
-                sort order: ++
-                Map-reduce partition columns: _col0 (type: string)
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string)



Mime
View raw message