hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mmccl...@apache.org
Subject [01/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)
Date Tue, 19 Apr 2016 10:12:55 GMT
Repository: hive
Updated Branches:
  refs/heads/branch-1 f42b984bd -> 130293e56


http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/results/clientpositive/vector_udf1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_udf1.q.out b/ql/src/test/results/clientpositive/vector_udf1.q.out
index bb02ea7..748276f 100644
--- a/ql/src/test/results/clientpositive/vector_udf1.q.out
+++ b/ql/src/test/results/clientpositive/vector_udf1.q.out
@@ -62,10 +62,9 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-      Execution mode: vectorized
 
   Stage: Stage-0
     Fetch Operator
@@ -126,10 +125,9 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-      Execution mode: vectorized
 
   Stage: Stage-0
     Fetch Operator
@@ -190,10 +188,9 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-      Execution mode: vectorized
 
   Stage: Stage-0
     Fetch Operator
@@ -256,8 +253,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -319,8 +316,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -382,8 +379,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -445,8 +442,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -508,8 +505,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
       Execution mode: vectorized
 
@@ -572,8 +569,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
       Execution mode: vectorized
 
@@ -636,8 +633,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -699,8 +696,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
       Execution mode: vectorized
 
@@ -763,8 +760,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -826,8 +823,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
       Execution mode: vectorized
 
@@ -890,8 +887,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
       Execution mode: vectorized
 
@@ -954,8 +951,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -1017,8 +1014,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -1080,8 +1077,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
       Execution mode: vectorized
 
@@ -1142,8 +1139,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -1201,8 +1198,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -1260,8 +1257,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 1508 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -1321,8 +1318,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
       Execution mode: vectorized
 
@@ -1385,8 +1382,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
       Execution mode: vectorized
 
@@ -1461,8 +1458,8 @@ STAGE PLANS:
             compressed: false
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -1485,7 +1482,7 @@ from varchar_udf_1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@varchar_udf_1
 #### A masked pattern was here ####
-{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1,"ndvbitvector":"{0}{3}{2}{3}{1}{0}{2}{0}{1}{0}{0}{1}{3}{2}{0}{3}"}	{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1,"ndvbitvector":"{0}{3}{2}{3}{1}{0}{2}{0}{1}{0}{0}{1}{3}{2}{0}{3}"}
+{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1}	{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1}
 PREHOOK: query: explain
 select
   min(c2),
@@ -1511,10 +1508,10 @@ STAGE PLANS:
             Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: c2 (type: string), c4 (type: varchar(20))
-              outputColumnNames: c2, c4
+              outputColumnNames: _col0, _col1
               Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
-                aggregations: min(c2), min(c4)
+                aggregations: min(_col0), min(_col1)
                 mode: hash
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE
@@ -1533,8 +1530,8 @@ STAGE PLANS:
             compressed: false
             Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE
             table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -1583,10 +1580,10 @@ STAGE PLANS:
             Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: c2 (type: string), c4 (type: varchar(20))
-              outputColumnNames: c2, c4
+              outputColumnNames: _col0, _col1
               Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
-                aggregations: max(c2), max(c4)
+                aggregations: max(_col0), max(_col1)
                 mode: hash
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE
@@ -1605,8 +1602,8 @@ STAGE PLANS:
             compressed: false
             Statistics: Num rows: 1 Data size: 168 Basic stats: COMPLETE Column stats: NONE
             table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/results/clientpositive/vectorized_casts.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorized_casts.q.out b/ql/src/test/results/clientpositive/vectorized_casts.q.out
index 1113453..630be88 100644
--- a/ql/src/test/results/clientpositive/vectorized_casts.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_casts.q.out
@@ -340,18 +340,18 @@ true	NULL	true	true	true	NULL	true	false	true	true	11	NULL	-64615982	1803053750
 true	NULL	true	true	true	NULL	true	false	true	true	8	NULL	890988972	-1862301000	8	NULL	1	15	NULL	NULL	8	8	8	8.0	NULL	8.90988972E8	-1.862301E9	8.0	NULL	1.0	15.892	NULL	NULL	8.9098899E8	NULL	1969-12-31 16:00:00.008	NULL	1970-01-10 23:29:48.972	1969-12-10 02:41:39	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	890988972	-1862301000	8.0	NULL	TRUE	0	1969-12-31 16:00:15.892	XylAH4	8.0	1.781977944E9	0.9893582466233818	8.90988973E8
 true	NULL	true	true	true	NULL	true	false	true	true	8	NULL	930867246	1205399250	8	NULL	1	15	NULL	NULL	8	8	8	8.0	NULL	9.30867246E8	1.20539925E9	8.0	NULL	1.0	15.892	NULL	NULL	9.3086726E8	NULL	1969-12-31 16:00:00.008	NULL	1970-01-11 10:34:27.246	1970-01-14 14:49:59.25	1969-12-31 16:00:08	NULL	1969-12-31 16:00:00.001	1969-12-31 16:00:00	1969-12-31 16:00:15.892	NULL	NULL	8	NULL	930867246	1205399250	8.0	NULL	TRUE	0	1969-12-31 16:00:15.892	c1V8o1A	8.0	1.861734492E9	0.9893582466233818	9.30867247E8
 true	true	NULL	true	true	true	NULL	false	true	NULL	-14	-7196	NULL	-1552199500	-14	-7196	NULL	11	NULL	NULL	-14	-14	-14	-14.0	-7196.0	NULL	-1.5521995E9	-14.0	-7196.0	NULL	11.065	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.986	1969-12-31 15:59:52.804	NULL	1969-12-13 16:50:00.5	1969-12-31 15:59:46	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:11.065	NULL	NULL	-14	-7196	NULL	-1552199500	-14.0	-7196.0	NULL	0	1969-12-31 16:00:11.065	NULL	-14.0	NULL	-0.9906073556948704	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-21	-7196	NULL	1542429000	-21	-7196	NULL	-4	NULL	NULL	-21	-21	-21	-21.0	-7196.0	NULL	1.542429E9	-21.0	-7196.0	NULL	-4.1	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.979	1969-12-31 15:59:52.804	NULL	1970-01-18 12:27:09	1969-12-31 15:59:39	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:55.9	NULL	NULL	-21	-7196	NULL	1542429000	-21.0	-7196.0	NULL	0	1969-12-31 15:59:55.9	NULL	-21.0	NULL	-0.8366556385360561	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-24	-7196	NULL	829111000	-24	-7196	NULL	-6	NULL	NULL	-24	-24	-24	-24.0	-7196.0	NULL	8.29111E8	-24.0	-7196.0	NULL	-6.855	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.976	1969-12-31 15:59:52.804	NULL	1970-01-10 06:18:31	1969-12-31 15:59:36	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.145	NULL	NULL	-24	-7196	NULL	829111000	-24.0	-7196.0	NULL	0	1969-12-31 15:59:53.145	NULL	-24.0	NULL	0.9055783620066238	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-21	-7196	NULL	1542429000	-21	-7196	NULL	-5	NULL	NULL	-21	-21	-21	-21.0	-7196.0	NULL	1.542429E9	-21.0	-7196.0	NULL	-4.1	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.979	1969-12-31 15:59:52.804	NULL	1970-01-18 12:27:09	1969-12-31 15:59:39	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:55.9	NULL	NULL	-21	-7196	NULL	1542429000	-21.0	-7196.0	NULL	0	1969-12-31 15:59:55.9	NULL	-21.0	NULL	-0.8366556385360561	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-24	-7196	NULL	829111000	-24	-7196	NULL	-7	NULL	NULL	-24	-24	-24	-24.0	-7196.0	NULL	8.29111E8	-24.0	-7196.0	NULL	-6.855	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.976	1969-12-31 15:59:52.804	NULL	1970-01-10 06:18:31	1969-12-31 15:59:36	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.145	NULL	NULL	-24	-7196	NULL	829111000	-24.0	-7196.0	NULL	0	1969-12-31 15:59:53.145	NULL	-24.0	NULL	0.9055783620066238	NULL
 true	true	NULL	true	true	true	NULL	false	true	NULL	-30	-200	NULL	1429852250	-30	-200	NULL	12	NULL	NULL	-30	-30	-30	-30.0	-200.0	NULL	1.42985225E9	-30.0	-200.0	NULL	12.935	NULL	NULL	NULL	-200.0	1969-12-31 15:59:59.97	1969-12-31 15:59:59.8	NULL	1970-01-17 05:10:52.25	1969-12-31 15:59:30	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 16:00:12.935	NULL	NULL	-30	-200	NULL	1429852250	-30.0	-200.0	NULL	0	1969-12-31 16:00:12.935	NULL	-30.0	NULL	0.9880316240928618	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-36	-200	NULL	-2006216750	-36	-200	NULL	-14	NULL	NULL	-36	-36	-36	-36.0	-200.0	NULL	-2.00621675E9	-36.0	-200.0	NULL	-14.252	NULL	NULL	NULL	-200.0	1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1969-12-08 10:43:03.25	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.748	NULL	NULL	-36	-200	NULL	-2006216750	-36.0	-200.0	NULL	0	1969-12-31 15:59:45.748	NULL	-36.0	NULL	0.9917788534431158	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-36	-200	NULL	1599879000	-36	-200	NULL	-6	NULL	NULL	-36	-36	-36	-36.0	-200.0	NULL	1.599879E9	-36.0	-200.0	NULL	-6.183	NULL	NULL	NULL	-200.0	1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1970-01-19 04:24:39	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.817	NULL	NULL	-36	-200	NULL	1599879000	-36.0	-200.0	NULL	0	1969-12-31 15:59:53.817	NULL	-36.0	NULL	0.9917788534431158	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-38	15601	NULL	-1858689000	-38	15601	NULL	-1	NULL	NULL	-38	-38	-38	-38.0	15601.0	NULL	-1.858689E9	-38.0	15601.0	NULL	-1.386	NULL	NULL	NULL	15601.0	1969-12-31 15:59:59.962	1969-12-31 16:00:15.601	NULL	1969-12-10 03:41:51	1969-12-31 15:59:22	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:58.614	NULL	NULL	-38	15601	NULL	-1858689000	-38.0	15601.0	NULL	0	1969-12-31 15:59:58.614	NULL	-38.0	NULL	-0.2963685787093853	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-36	-200	NULL	-2006216750	-36	-200	NULL	-15	NULL	NULL	-36	-36	-36	-36.0	-200.0	NULL	-2.00621675E9	-36.0	-200.0	NULL	-14.252	NULL	NULL	NULL	-200.0	1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1969-12-08 10:43:03.25	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.748	NULL	NULL	-36	-200	NULL	-2006216750	-36.0	-200.0	NULL	0	1969-12-31 15:59:45.748	NULL	-36.0	NULL	0.9917788534431158	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-36	-200	NULL	1599879000	-36	-200	NULL	-7	NULL	NULL	-36	-36	-36	-36.0	-200.0	NULL	1.599879E9	-36.0	-200.0	NULL	-6.183	NULL	NULL	NULL	-200.0	1969-12-31 15:59:59.964	1969-12-31 15:59:59.8	NULL	1970-01-19 04:24:39	1969-12-31 15:59:24	1969-12-31 15:56:40	NULL	1969-12-31 16:00:00	1969-12-31 15:59:53.817	NULL	NULL	-36	-200	NULL	1599879000	-36.0	-200.0	NULL	0	1969-12-31 15:59:53.817	NULL	-36.0	NULL	0.9917788534431158	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-38	15601	NULL	-1858689000	-38	15601	NULL	-2	NULL	NULL	-38	-38	-38	-38.0	15601.0	NULL	-1.858689E9	-38.0	15601.0	NULL	-1.3860000000000001	NULL	NULL	NULL	15601.0	1969-12-31 15:59:59.962	1969-12-31 16:00:15.601	NULL	1969-12-10 03:41:51	1969-12-31 15:59:22	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:58.614	NULL	NULL	-38	15601	NULL	-1858689000	-38.0	15601.0	NULL	0	1969-12-31 15:59:58.614	NULL	-38.0	NULL	-0.2963685787093853	NULL
 true	true	NULL	true	true	true	NULL	false	true	NULL	-5	15601	NULL	612416000	-5	15601	NULL	4	NULL	NULL	-5	-5	-5	-5.0	15601.0	NULL	6.12416E8	-5.0	15601.0	NULL	4.679	NULL	NULL	NULL	15601.0	1969-12-31 15:59:59.995	1969-12-31 16:00:15.601	NULL	1970-01-07 18:06:56	1969-12-31 15:59:55	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 16:00:04.679	NULL	NULL	-5	15601	NULL	612416000	-5.0	15601.0	NULL	0	1969-12-31 16:00:04.679	NULL	-5.0	NULL	0.9589242746631385	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-50	-7196	NULL	-1031187250	-50	-7196	NULL	-5	NULL	NULL	-50	-50	-50	-50.0	-7196.0	NULL	-1.03118725E9	-50.0	-7196.0	NULL	-5.267	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.95	1969-12-31 15:59:52.804	NULL	1969-12-19 17:33:32.75	1969-12-31 15:59:10	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:54.733	NULL	NULL	-50	-7196	NULL	-1031187250	-50.0	-7196.0	NULL	0	1969-12-31 15:59:54.733	NULL	-50.0	NULL	0.26237485370392877	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-50	-7196	NULL	-1031187250	-50	-7196	NULL	-6	NULL	NULL	-50	-50	-50	-50.0	-7196.0	NULL	-1.03118725E9	-50.0	-7196.0	NULL	-5.267	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.95	1969-12-31 15:59:52.804	NULL	1969-12-19 17:33:32.75	1969-12-31 15:59:10	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:54.733	NULL	NULL	-50	-7196	NULL	-1031187250	-50.0	-7196.0	NULL	0	1969-12-31 15:59:54.733	NULL	-50.0	NULL	0.26237485370392877	NULL
 true	true	NULL	true	true	true	NULL	false	true	NULL	-59	-7196	NULL	-1604890000	-59	-7196	NULL	13	NULL	NULL	-59	-59	-59	-59.0	-7196.0	NULL	-1.60489E9	-59.0	-7196.0	NULL	13.15	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.941	1969-12-31 15:59:52.804	NULL	1969-12-13 02:11:50	1969-12-31 15:59:01	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:13.15	NULL	NULL	-59	-7196	NULL	-1604890000	-59.0	-7196.0	NULL	0	1969-12-31 16:00:13.15	NULL	-59.0	NULL	-0.6367380071391379	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	-60	-7196	NULL	1516314750	-60	-7196	NULL	-7	NULL	NULL	-60	-60	-60	-60.0	-7196.0	NULL	1.51631475E9	-60.0	-7196.0	NULL	-7.592	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.94	1969-12-31 15:59:52.804	NULL	1970-01-18 05:11:54.75	1969-12-31 15:59:00	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:52.408	NULL	NULL	-60	-7196	NULL	1516314750	-60.0	-7196.0	NULL	0	1969-12-31 15:59:52.408	NULL	-60.0	NULL	0.3048106211022167	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	-60	-7196	NULL	1516314750	-60	-7196	NULL	-8	NULL	NULL	-60	-60	-60	-60.0	-7196.0	NULL	1.51631475E9	-60.0	-7196.0	NULL	-7.592	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.94	1969-12-31 15:59:52.804	NULL	1970-01-18 05:11:54.75	1969-12-31 15:59:00	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 15:59:52.408	NULL	NULL	-60	-7196	NULL	1516314750	-60.0	-7196.0	NULL	0	1969-12-31 15:59:52.408	NULL	-60.0	NULL	0.3048106211022167	NULL
 true	true	NULL	true	true	true	NULL	false	true	NULL	-8	-7196	NULL	-1849991500	-8	-7196	NULL	3	NULL	NULL	-8	-8	-8	-8.0	-7196.0	NULL	-1.8499915E9	-8.0	-7196.0	NULL	3.136	NULL	NULL	NULL	-7196.0	1969-12-31 15:59:59.992	1969-12-31 15:59:52.804	NULL	1969-12-10 06:06:48.5	1969-12-31 15:59:52	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:03.136	NULL	NULL	-8	-7196	NULL	-1849991500	-8.0	-7196.0	NULL	0	1969-12-31 16:00:03.136	NULL	-8.0	NULL	-0.9893582466233818	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	20	15601	NULL	-362433250	20	15601	NULL	-14	NULL	NULL	20	20	20	20.0	15601.0	NULL	-3.6243325E8	20.0	15601.0	NULL	-14.871	NULL	NULL	NULL	15601.0	1969-12-31 16:00:00.02	1969-12-31 16:00:15.601	NULL	1969-12-27 11:19:26.75	1969-12-31 16:00:20	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.129	NULL	NULL	20	15601	NULL	-362433250	20.0	15601.0	NULL	0	1969-12-31 15:59:45.129	NULL	20.0	NULL	0.9129452507276277	NULL
-true	true	NULL	true	true	true	NULL	false	true	NULL	48	15601	NULL	-795361000	48	15601	NULL	-9	NULL	NULL	48	48	48	48.0	15601.0	NULL	-7.95361E8	48.0	15601.0	NULL	-9.765	NULL	NULL	NULL	15601.0	1969-12-31 16:00:00.048	1969-12-31 16:00:15.601	NULL	1969-12-22 11:03:59	1969-12-31 16:00:48	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:50.235	NULL	NULL	48	15601	NULL	-795361000	48.0	15601.0	NULL	0	1969-12-31 15:59:50.235	NULL	48.0	NULL	-0.7682546613236668	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	20	15601	NULL	-362433250	20	15601	NULL	-15	NULL	NULL	20	20	20	20.0	15601.0	NULL	-3.6243325E8	20.0	15601.0	NULL	-14.871	NULL	NULL	NULL	15601.0	1969-12-31 16:00:00.02	1969-12-31 16:00:15.601	NULL	1969-12-27 11:19:26.75	1969-12-31 16:00:20	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:45.129	NULL	NULL	20	15601	NULL	-362433250	20.0	15601.0	NULL	0	1969-12-31 15:59:45.129	NULL	20.0	NULL	0.9129452507276277	NULL
+true	true	NULL	true	true	true	NULL	false	true	NULL	48	15601	NULL	-795361000	48	15601	NULL	-10	NULL	NULL	48	48	48	48.0	15601.0	NULL	-7.95361E8	48.0	15601.0	NULL	-9.765	NULL	NULL	NULL	15601.0	1969-12-31 16:00:00.048	1969-12-31 16:00:15.601	NULL	1969-12-22 11:03:59	1969-12-31 16:00:48	1969-12-31 20:20:01	NULL	1969-12-31 16:00:00	1969-12-31 15:59:50.235	NULL	NULL	48	15601	NULL	-795361000	48.0	15601.0	NULL	0	1969-12-31 15:59:50.235	NULL	48.0	NULL	-0.7682546613236668	NULL
 true	true	NULL	true	true	true	NULL	false	true	NULL	5	-7196	NULL	-1015607500	5	-7196	NULL	10	NULL	NULL	5	5	5	5.0	-7196.0	NULL	-1.0156075E9	5.0	-7196.0	NULL	10.973	NULL	NULL	NULL	-7196.0	1969-12-31 16:00:00.005	1969-12-31 15:59:52.804	NULL	1969-12-19 21:53:12.5	1969-12-31 16:00:05	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:10.973	NULL	NULL	5	-7196	NULL	-1015607500	5.0	-7196.0	NULL	0	1969-12-31 16:00:10.973	NULL	5.0	NULL	-0.9589242746631385	NULL
 true	true	NULL	true	true	true	NULL	false	true	NULL	59	-7196	NULL	-1137754500	59	-7196	NULL	10	NULL	NULL	59	59	59	59.0	-7196.0	NULL	-1.1377545E9	59.0	-7196.0	NULL	10.956	NULL	NULL	NULL	-7196.0	1969-12-31 16:00:00.059	1969-12-31 15:59:52.804	NULL	1969-12-18 11:57:25.5	1969-12-31 16:00:59	1969-12-31 14:00:04	NULL	1969-12-31 16:00:00	1969-12-31 16:00:10.956	NULL	NULL	59	-7196	NULL	-1137754500	59.0	-7196.0	NULL	0	1969-12-31 16:00:10.956	NULL	59.0	NULL	0.6367380071391379	NULL

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/test/results/clientpositive/vectorized_timestamp.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorized_timestamp.q.out b/ql/src/test/results/clientpositive/vectorized_timestamp.q.out
new file mode 100644
index 0000000..c04428e
--- /dev/null
+++ b/ql/src/test/results/clientpositive/vectorized_timestamp.q.out
@@ -0,0 +1,239 @@
+PREHOOK: query: DROP TABLE IF EXISTS test
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE IF EXISTS test
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE test(ts TIMESTAMP) STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test
+POSTHOOK: query: CREATE TABLE test(ts TIMESTAMP) STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test
+PREHOOK: query: INSERT INTO TABLE test VALUES ('0001-01-01 00:00:00.000000000'), ('9999-12-31 23:59:59.999999999')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__1
+PREHOOK: Output: default@test
+POSTHOOK: query: INSERT INTO TABLE test VALUES ('0001-01-01 00:00:00.000000000'), ('9999-12-31 23:59:59.999999999')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__1
+POSTHOOK: Output: default@test
+POSTHOOK: Lineage: test.ts EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+PREHOOK: query: EXPLAIN
+SELECT ts FROM test
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT ts FROM test
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test
+            Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: ts (type: timestamp)
+              outputColumnNames: _col0
+              Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT ts FROM test
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT ts FROM test
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test
+#### A masked pattern was here ####
+0001-01-01 00:00:00
+9999-12-31 23:59:59.999999999
+PREHOOK: query: EXPLAIN
+SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test
+            Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: ts (type: timestamp)
+              outputColumnNames: _col0
+              Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                aggregations: min(_col0), max(_col0)
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: timestamp), _col1 (type: timestamp)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: min(VALUE._col0), max(VALUE._col1)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: _col0 (type: timestamp), _col1 (type: timestamp), (_col1 - _col0) (type: interval_day_time)
+            outputColumnNames: _col0, _col1, _col2
+            Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test
+#### A masked pattern was here ####
+0001-01-01 00:00:00	9999-12-31 23:59:59.999999999	3652060 23:59:59.999999999
+PREHOOK: query: EXPLAIN
+SELECT ts FROM test
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT ts FROM test
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test
+            Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: ts (type: timestamp)
+              outputColumnNames: _col0
+              Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+      Execution mode: vectorized
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT ts FROM test
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT ts FROM test
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test
+#### A masked pattern was here ####
+0001-01-01 00:00:00
+9999-12-31 23:59:59.999999999
+PREHOOK: query: EXPLAIN
+SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: test
+            Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: ts (type: timestamp)
+              outputColumnNames: _col0
+              Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                aggregations: min(_col0), max(_col0)
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: timestamp), _col1 (type: timestamp)
+      Execution mode: vectorized
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: min(VALUE._col0), max(VALUE._col1)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: _col0 (type: timestamp), _col1 (type: timestamp), (_col1 - _col0) (type: interval_day_time)
+            outputColumnNames: _col0, _col1, _col2
+            Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test
+#### A masked pattern was here ####
+0001-01-01 00:00:00	9999-12-31 23:59:59.999999999	3652060 23:59:59.999999999

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
index 3779f1a..5a7feb5 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
@@ -349,19 +349,6 @@ public class BinarySortableSerializeWrite implements SerializeWrite {
     BinarySortableSerDe.serializeHiveIntervalDayTime(output, vidt, invert);
   }
 
-  @Override
-  public void writeHiveIntervalDayTime(long totalNanos) throws IOException {
-    final boolean invert = columnSortOrderIsDesc[++index];
-
-    // This field is not a null.
-    BinarySortableSerDe.writeByte(output, (byte) 1, invert);
-
-    long totalSecs = DateUtils.getIntervalDayTimeTotalSecondsFromTotalNanos(totalNanos);
-    int nanos = DateUtils.getIntervalDayTimeNanosFromTotalNanos(totalNanos);
-    BinarySortableSerDe.serializeLong(output, totalSecs, invert);
-    BinarySortableSerDe.serializeInt(output, nanos, invert);
-  }
-
   /*
    * DECIMAL.
    */

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
index e6fb8b6..0c70fda 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
@@ -145,9 +145,6 @@ public interface SerializeWrite {
    */
   void writeHiveIntervalDayTime(HiveIntervalDayTime vidt) throws IOException;
 
-  // We provide a faster way to write a hive interval day time without a HiveIntervalDayTime object.
-  void writeHiveIntervalDayTime(long totalNanos) throws IOException;
-
   /*
    * DECIMAL.
    */

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
index a2a6c79..f0201d8 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
@@ -25,6 +25,7 @@ import java.math.BigDecimal;
 import java.sql.Timestamp;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
+import java.util.Date;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
@@ -150,6 +151,21 @@ public class TimestampWritable implements WritableComparable<TimestampWritable>
     }
   }
 
+  public static void updateTimestamp(Timestamp timestamp, long secondsAsMillis, int nanos) {
+    ((Date) timestamp).setTime(secondsAsMillis);
+    timestamp.setNanos(nanos);
+  }
+
+  public void setInternal(long secondsAsMillis, int nanos) {
+
+    // This is our way of documenting that we are MUTATING the contents of
+    // this writable's internal timestamp.
+    updateTimestamp(timestamp, secondsAsMillis, nanos);
+
+    bytesEmpty = true;
+    timestampEmpty = false;
+  }
+
   private void clearTimestamp() {
     timestampEmpty = true;
   }
@@ -306,7 +322,20 @@ public class TimestampWritable implements WritableComparable<TimestampWritable>
     return seconds + nanos / 1000000000;
   }
 
+  public static long getLong(Timestamp timestamp) {
+    return timestamp.getTime() / 1000;
+  }
 
+  /**
+  *
+  * @return double representation of the timestamp, accurate to nanoseconds
+  */
+ public static double getDouble(Timestamp timestamp) {
+   double seconds, nanos;
+   seconds = millisToSeconds(timestamp.getTime());
+   nanos = timestamp.getNanos();
+   return seconds + nanos / 1000000000;
+ }
 
   public void readFields(DataInput in) throws IOException {
     in.readFully(internalBytes, 0, 4);
@@ -526,6 +555,21 @@ public class TimestampWritable implements WritableComparable<TimestampWritable>
     return t;
   }
 
+  public HiveDecimal getHiveDecimal() {
+    if (timestampEmpty) {
+      populateTimestamp();
+    }
+    return getHiveDecimal(timestamp);
+  }
+
+  public static HiveDecimal getHiveDecimal(Timestamp timestamp) {
+    // The BigDecimal class recommends not converting directly from double to BigDecimal,
+    // so we convert through a string...
+    Double timestampDouble = TimestampWritable.getDouble(timestamp);
+    HiveDecimal result = HiveDecimal.create(timestampDouble.toString());
+    return result;
+  }
+
   /**
    * Converts the time in seconds or milliseconds to a timestamp.
    * @param time time in seconds or in milliseconds
@@ -536,6 +580,17 @@ public class TimestampWritable implements WritableComparable<TimestampWritable>
       return new Timestamp(intToTimestampInSeconds ?  time * 1000 : time);
   }
 
+  /**
+   * Converts the time in seconds or milliseconds to a timestamp.
+   * @param time time in seconds or in milliseconds
+   * @return the timestamp
+   */
+  public static void setTimestampFromLong(Timestamp timestamp, long time,
+      boolean intToTimestampInSeconds) {
+      // If the time is in seconds, converts it to milliseconds first.
+    timestamp.setTime(intToTimestampInSeconds ?  time * 1000 : time);
+  }
+
   public static Timestamp doubleToTimestamp(double f) {
     long seconds = (long) f;
 
@@ -559,6 +614,37 @@ public class TimestampWritable implements WritableComparable<TimestampWritable>
     return t;
   }
 
+  public static void setTimestampFromDouble(Timestamp timestamp, double f) {
+    // Otherwise, BigDecimal throws an exception.  (Support vector operations that sometimes
+    // do work on double Not-a-Number NaN values).
+    if (Double.isNaN(f)) {
+      timestamp.setTime(0);
+      return;
+    }
+    // Algorithm used by TimestampWritable.doubleToTimestamp method.
+    // Allocates a BigDecimal object!
+
+    long seconds = (long) f;
+
+    // We must ensure the exactness of the double's fractional portion.
+    // 0.6 as the fraction part will be converted to 0.59999... and
+    // significantly reduce the savings from binary serialization
+    BigDecimal bd = new BigDecimal(String.valueOf(f));
+    bd = bd.subtract(new BigDecimal(seconds)).multiply(new BigDecimal(1000000000));
+    int nanos = bd.intValue();
+
+    // Convert to millis
+    long millis = seconds * 1000;
+    if (nanos < 0) {
+      millis -= 1000;
+      nanos += 1000000000;
+    }
+    timestamp.setTime(millis);
+
+    // Set remaining fractional portion to nanos
+    timestamp.setNanos(nanos);
+  }
+
   public static void setTimestamp(Timestamp t, byte[] bytes, int offset) {
     boolean hasDecimalOrSecondVInt = hasDecimalOrSecondVInt(bytes[offset]);
     long seconds = (long) TimestampWritable.getSeconds(bytes, offset);
@@ -656,7 +742,7 @@ public class TimestampWritable implements WritableComparable<TimestampWritable>
    * Rounds the number of milliseconds relative to the epoch down to the nearest whole number of
    * seconds. 500 would round to 0, -500 would round to -1.
    */
-  static long millisToSeconds(long millis) {
+  public static long millisToSeconds(long millis) {
     if (millis >= 0) {
       return millis / 1000;
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
index 46f37eb..0f6c6a6 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
@@ -473,26 +473,6 @@ public class LazySimpleSerializeWrite implements SerializeWrite {
     index++;
   }
 
-  @Override
-  public void writeHiveIntervalDayTime(long totalNanos) throws IOException {
-
-    if (index > 0) {
-      output.write(separator);
-    }
-
-    if (hiveIntervalDayTime == null) {
-      hiveIntervalDayTime = new HiveIntervalDayTime();
-    }
-    if (hiveIntervalDayTimeWritable == null) {
-      hiveIntervalDayTimeWritable = new HiveIntervalDayTimeWritable();
-    }
-    DateUtils.setIntervalDayTimeTotalNanos(hiveIntervalDayTime, totalNanos);
-    hiveIntervalDayTimeWritable.set(hiveIntervalDayTime);
-    LazyHiveIntervalDayTime.writeUTF8(output, hiveIntervalDayTimeWritable);
-
-    index++;
-  }
-
   /*
    * DECIMAL.
    */

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
index 2d201ec..56134d7 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
@@ -673,42 +673,6 @@ public class LazyBinarySerializeWrite implements SerializeWrite {
     }
   }
 
-  @Override
-  public void writeHiveIntervalDayTime(long totalNanos) throws IOException {
-
-    // Every 8 fields we write a NULL byte.
-    if ((fieldIndex % 8) == 0) {
-      if (fieldIndex > 0) {
-        // Write back previous 8 field's NULL byte.
-        output.writeByte(nullOffset, nullByte);
-        nullByte = 0;
-        nullOffset = output.getLength();
-      }
-      // Allocate next NULL byte.
-      output.reserve(1);
-    }
-
-    // Set bit in NULL byte when a field is NOT NULL.
-    nullByte |= 1 << (fieldIndex % 8);
-
-    if (hiveIntervalDayTime == null) {
-      hiveIntervalDayTime = new HiveIntervalDayTime();
-    }
-    if (hiveIntervalDayTimeWritable == null) {
-      hiveIntervalDayTimeWritable = new HiveIntervalDayTimeWritable();
-    }
-    DateUtils.setIntervalDayTimeTotalNanos(hiveIntervalDayTime, totalNanos);
-    hiveIntervalDayTimeWritable.set(hiveIntervalDayTime);
-    hiveIntervalDayTimeWritable.writeToByteStream(output);
-
-    fieldIndex++;
-
-    if (fieldIndex == fieldCount) {
-      // Write back the final NULL byte before the last fields.
-      output.writeByte(nullOffset, nullByte);
-    }
-  }
-
   /*
    * DECIMAL.
    */


Mime
View raw message