hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mmccl...@apache.org
Subject [07/12] hive git commit: HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)
Date Sun, 10 Apr 2016 06:59:39 GMT
http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
index 3cdf405..7e34965 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt
@@ -18,7 +18,8 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen;
 
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
@@ -49,7 +50,7 @@ public class <ClassName> extends VectorAggregateExpression {
 
       private static final long serialVersionUID = 1L;
 
-      transient private final PisaTimestamp value;
+      transient private final Timestamp value;
 
       /**
       * Value is explicitly (re)initialized in reset()
@@ -57,15 +58,15 @@ public class <ClassName> extends VectorAggregateExpression {
       transient private boolean isNull = true;
 
       public Aggregation() {
-        value = new PisaTimestamp();
+        value = new Timestamp(0);
       }
 
       public void checkValue(TimestampColumnVector colVector, int index) {
         if (isNull) {
           isNull = false;
-          colVector.pisaTimestampUpdate(this.value, index);
+          colVector.timestampUpdate(this.value, index);
         } else if (colVector.compareTo(this.value, index) <OperatorSymbol> 0) {
-          colVector.pisaTimestampUpdate(this.value, index);
+          colVector.timestampUpdate(this.value, index);
         }
       }
 
@@ -77,7 +78,7 @@ public class <ClassName> extends VectorAggregateExpression {
       @Override
       public void reset () {
         isNull = true;
-        this.value.reset();
+        this.value.setTime(0);
       }
     }
 
@@ -311,7 +312,7 @@ public class <ClassName> extends VectorAggregateExpression {
           if (inputColVector.noNulls &&
             (myagg.isNull || (inputColVector.compareTo(myagg.value, 0) <OperatorSymbol> 0))) {
             myagg.isNull = false;
-            inputColVector.pisaTimestampUpdate(myagg.value, 0);
+            inputColVector.timestampUpdate(myagg.value, 0);
           }
           return;
         }
@@ -344,10 +345,10 @@ public class <ClassName> extends VectorAggregateExpression {
         if (!isNull[i]) {
           if (myagg.isNull) {
             myagg.isNull = false;
-            inputColVector.pisaTimestampUpdate(myagg.value, i);
+            inputColVector.timestampUpdate(myagg.value, i);
           }
           else if (inputColVector.compareTo(myagg.value, i) <OperatorSymbol> 0) {
-            inputColVector.pisaTimestampUpdate(myagg.value, i);
+            inputColVector.timestampUpdate(myagg.value, i);
           }
         }
       }
@@ -360,14 +361,14 @@ public class <ClassName> extends VectorAggregateExpression {
         int[] selected) {
 
       if (myagg.isNull) {
-        inputColVector.pisaTimestampUpdate(myagg.value, selected[0]);
+        inputColVector.timestampUpdate(myagg.value, selected[0]);
         myagg.isNull = false;
       }
 
       for (int i=0; i< batchSize; ++i) {
         int sel = selected[i];
         if (inputColVector.compareTo(myagg.value, sel) <OperatorSymbol> 0) {
-          inputColVector.pisaTimestampUpdate(myagg.value, sel);
+          inputColVector.timestampUpdate(myagg.value, sel);
         }
       }
     }
@@ -381,11 +382,11 @@ public class <ClassName> extends VectorAggregateExpression {
       for(int i=0;i<batchSize;++i) {
         if (!isNull[i]) {
           if (myagg.isNull) {
-            inputColVector.pisaTimestampUpdate(myagg.value, i);
+            inputColVector.timestampUpdate(myagg.value, i);
             myagg.isNull = false;
           }
           else if (inputColVector.compareTo(myagg.value, i) <OperatorSymbol> 0) {
-            inputColVector.pisaTimestampUpdate(myagg.value, i);
+            inputColVector.timestampUpdate(myagg.value, i);
           }
         }
       }
@@ -396,13 +397,13 @@ public class <ClassName> extends VectorAggregateExpression {
         TimestampColumnVector inputColVector,
         int batchSize) {
       if (myagg.isNull) {
-        inputColVector.pisaTimestampUpdate(myagg.value, 0);
+        inputColVector.timestampUpdate(myagg.value, 0);
         myagg.isNull = false;
       }
 
       for (int i=0;i<batchSize;++i) {
         if (inputColVector.compareTo(myagg.value, i) <OperatorSymbol> 0) {
-          inputColVector.pisaTimestampUpdate(myagg.value, i);
+          inputColVector.timestampUpdate(myagg.value, i);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
index 5de055c..bb795fa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.exec.vector;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 public final class TimestampUtils {
@@ -38,4 +39,11 @@ public final class TimestampUtils {
     timestampWritable.set(timestampColVector.asScratchTimestamp(elementNum));
     return timestampWritable;
   }
+
+  public static HiveIntervalDayTimeWritable intervalDayTimeColumnVectorWritable(
+      IntervalDayTimeColumnVector intervalDayTimeColVector, int elementNum,
+      HiveIntervalDayTimeWritable intervalDayTimeWritable) {
+    intervalDayTimeWritable.set(intervalDayTimeColVector.asScratchIntervalDayTime(elementNum));
+    return intervalDayTimeWritable;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
index 965c027..de0300a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
@@ -288,7 +288,26 @@ public abstract class VectorAssignRow {
     }
   }
 
-  private class IntervalDayTimeAssigner extends AbstractTimestampAssigner {
+  private abstract class AbstractIntervalDayTimeAssigner extends Assigner {
+
+    protected IntervalDayTimeColumnVector colVector;
+
+    AbstractIntervalDayTimeAssigner(int columnIndex) {
+      super(columnIndex);
+    }
+
+    @Override
+    void setColumnVector(VectorizedRowBatch batch) {
+      colVector = (IntervalDayTimeColumnVector) batch.cols[columnIndex];
+    }
+
+    @Override
+    void forgetColumnVector() {
+      colVector = null;
+    }
+  }
+
+  private class IntervalDayTimeAssigner extends AbstractIntervalDayTimeAssigner {
 
     IntervalDayTimeAssigner(int columnIndex) {
       super(columnIndex);
@@ -301,7 +320,7 @@ public abstract class VectorAssignRow {
       } else {
         HiveIntervalDayTimeWritable idtw = (HiveIntervalDayTimeWritable) object;
         HiveIntervalDayTime idt = idtw.getHiveIntervalDayTime();
-        colVector.set(batchIndex, idt.pisaTimestampUpdate(colVector.useScratchPisaTimestamp()));
+        colVector.set(batchIndex, idt);
         colVector.isNull[batchIndex] = false;
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
index 463c8a6..96b8f78 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
@@ -25,6 +25,7 @@ import java.util.Map;
 
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -176,6 +177,16 @@ public class VectorColumnAssignFactory {
     }
   }
 
+  private static abstract class VectorIntervalDayTimeColumnAssign
+  extends VectorColumnAssignVectorBase<IntervalDayTimeColumnVector> {
+
+    protected void assignIntervalDayTime(HiveIntervalDayTime value, int index) {
+      outCol.set(index, value);
+    }
+    protected void assignIntervalDayTime(HiveIntervalDayTimeWritable tw, int index) {
+      outCol.set(index, tw.getHiveIntervalDayTime());
+    }
+  }
 
   public static VectorColumnAssign[] buildAssigners(VectorizedRowBatch outputBatch)
       throws HiveException {
@@ -364,7 +375,7 @@ public class VectorColumnAssignFactory {
           }
         }.init(outputBatch, (LongColumnVector) destCol);
         break;
-      case INTERVAL_DAY_TIME:outVCA = new VectorLongColumnAssign() {
+      case INTERVAL_DAY_TIME:outVCA = new VectorIntervalDayTimeColumnAssign() {
         @Override
         public void assignObjectValue(Object val, int destIndex) throws HiveException {
           if (val == null) {
@@ -372,12 +383,12 @@ public class VectorColumnAssignFactory {
           }
           else {
             HiveIntervalDayTimeWritable bw = (HiveIntervalDayTimeWritable) val;
-            assignLong(
-                DateUtils.getIntervalDayTimeTotalNanos(bw.getHiveIntervalDayTime()),
+            assignIntervalDayTime(
+                bw.getHiveIntervalDayTime(),
                 destIndex);
           }
         }
-      }.init(outputBatch, (LongColumnVector) destCol);
+      }.init(outputBatch, (IntervalDayTimeColumnVector) destCol);
       break;
       default:
         throw new HiveException("Incompatible Long vector column and primitive category " +

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnSetInfo.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnSetInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnSetInfo.java
index 0949145..935b47b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnSetInfo.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnSetInfo.java
@@ -60,6 +60,11 @@ public class VectorColumnSetInfo {
   protected int[] timestampIndices;
 
   /**
+   * indices of INTERVAL_DAY_TIME primitive keys.
+   */
+  protected int[] intervalDayTimeIndices;
+
+  /**
    * Helper class for looking up a key value based on key index.
    */
   public class KeyLookupHelper {
@@ -68,12 +73,13 @@ public class VectorColumnSetInfo {
     public int stringIndex;
     public int decimalIndex;
     public int timestampIndex;
+    public int intervalDayTimeIndex;
 
     private static final int INDEX_UNUSED = -1;
 
     private void resetIndices() {
         this.longIndex = this.doubleIndex = this.stringIndex = this.decimalIndex =
-            timestampIndex = INDEX_UNUSED;
+            timestampIndex = intervalDayTimeIndex = INDEX_UNUSED;
     }
     public void setLong(int index) {
       resetIndices();
@@ -99,6 +105,11 @@ public class VectorColumnSetInfo {
       resetIndices();
       this.timestampIndex= index;
     }
+
+    public void setIntervalDayTime(int index) {
+      resetIndices();
+      this.intervalDayTimeIndex= index;
+    }
   }
 
   /**
@@ -114,6 +125,7 @@ public class VectorColumnSetInfo {
   protected int stringIndicesIndex;
   protected int decimalIndicesIndex;
   protected int timestampIndicesIndex;
+  protected int intervalDayTimeIndicesIndex;
 
   protected VectorColumnSetInfo(int keyCount) {
     this.keyCount = keyCount;
@@ -130,6 +142,8 @@ public class VectorColumnSetInfo {
     decimalIndicesIndex = 0;
     timestampIndices = new int[this.keyCount];
     timestampIndicesIndex = 0;
+    intervalDayTimeIndices = new int[this.keyCount];
+    intervalDayTimeIndicesIndex = 0;
     indexLookup = new KeyLookupHelper[this.keyCount];
   }
 
@@ -172,6 +186,12 @@ public class VectorColumnSetInfo {
       ++timestampIndicesIndex;
       break;
 
+    case INTERVAL_DAY_TIME:
+      intervalDayTimeIndices[intervalDayTimeIndicesIndex] = addIndex;
+      indexLookup[addIndex].setIntervalDayTime(intervalDayTimeIndicesIndex);
+      ++intervalDayTimeIndicesIndex;
+      break;
+
     default:
       throw new HiveException("Unexpected column vector type " + columnVectorType);
     }
@@ -185,5 +205,6 @@ public class VectorColumnSetInfo {
     stringIndices = Arrays.copyOf(stringIndices, stringIndicesIndex);
     decimalIndices = Arrays.copyOf(decimalIndices, decimalIndicesIndex);
     timestampIndices = Arrays.copyOf(timestampIndices, timestampIndicesIndex);
+    intervalDayTimeIndices = Arrays.copyOf(intervalDayTimeIndices, intervalDayTimeIndicesIndex);
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java
index 73476a3..c8e0284 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java
@@ -223,6 +223,34 @@ public class VectorCopyRow {
       }
     }
   }
+
+  private class IntervalDayTimeCopyRow extends CopyRow {
+
+    IntervalDayTimeCopyRow(int inColumnIndex, int outColumnIndex) {
+      super(inColumnIndex, outColumnIndex);
+    }
+
+    @Override
+    void copy(VectorizedRowBatch inBatch, int inBatchIndex, VectorizedRowBatch outBatch, int outBatchIndex) {
+      IntervalDayTimeColumnVector inColVector = (IntervalDayTimeColumnVector) inBatch.cols[inColumnIndex];
+      IntervalDayTimeColumnVector outColVector = (IntervalDayTimeColumnVector) outBatch.cols[outColumnIndex];
+
+      if (inColVector.isRepeating) {
+        if (inColVector.noNulls || !inColVector.isNull[0]) {
+          outColVector.setElement(outBatchIndex, 0, inColVector);
+        } else {
+          VectorizedBatchUtil.setNullColIsNullValue(outColVector, outBatchIndex);
+        }
+      } else {
+        if (inColVector.noNulls || !inColVector.isNull[inBatchIndex]) {
+          outColVector.setElement(outBatchIndex, inBatchIndex, inColVector);
+        } else {
+          VectorizedBatchUtil.setNullColIsNullValue(outColVector, outBatchIndex);
+        }
+      }
+    }
+  }
+
   private CopyRow[] subRowToBatchCopiersByValue;
   private CopyRow[] subRowToBatchCopiersByReference;
 
@@ -250,6 +278,10 @@ public class VectorCopyRow {
         copyRowByValue = new TimestampCopyRow(inputColumn, outputColumn);
         break;
 
+      case INTERVAL_DAY_TIME:
+        copyRowByValue = new IntervalDayTimeCopyRow(inputColumn, outputColumn);
+        break;
+
       case DOUBLE:
         copyRowByValue = new DoubleCopyRow(inputColumn, outputColumn);
         break;

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
index 50881e7..3eadc12 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
@@ -264,7 +264,14 @@ public final class VectorDeserializeRow<T extends DeserializeRead> {
     }
   }
 
-  private class IntervalDayTimeReader extends AbstractTimestampReader {
+  private abstract class AbstractIntervalDayTimeReader extends Reader<T> {
+
+    AbstractIntervalDayTimeReader(int columnIndex) {
+      super(columnIndex);
+    }
+  }
+
+  private class IntervalDayTimeReader extends AbstractIntervalDayTimeReader {
 
     DeserializeRead.ReadIntervalDayTimeResults readIntervalDayTimeResults;
 
@@ -275,14 +282,14 @@ public final class VectorDeserializeRow<T extends DeserializeRead> {
 
     @Override
     void apply(VectorizedRowBatch batch, int batchIndex) throws IOException {
-      TimestampColumnVector colVector = (TimestampColumnVector) batch.cols[columnIndex];
+      IntervalDayTimeColumnVector colVector = (IntervalDayTimeColumnVector) batch.cols[columnIndex];
 
       if (deserializeRead.readCheckNull()) {
         VectorizedBatchUtil.setNullColIsNullValue(colVector, batchIndex);
       } else {
         deserializeRead.readIntervalDayTime(readIntervalDayTimeResults);
         HiveIntervalDayTime idt = readIntervalDayTimeResults.getHiveIntervalDayTime();
-        colVector.set(batchIndex, idt.pisaTimestampUpdate(colVector.useScratchPisaTimestamp()));
+        colVector.set(batchIndex, idt);
         colVector.isNull[batchIndex] = false;
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
index 0b9ad55..7b3f781 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
@@ -59,6 +59,9 @@ public class VectorExpressionDescriptor {
   // TimestampColumnVector -->
   //    TIMESTAMP
   //
+  // IntervalDayTimeColumnVector -->
+  //    INTERVAL_DAY_TIME
+  //
   public enum ArgumentType {
     NONE                    (0x000),
     INT_FAMILY              (0x001),
@@ -76,7 +79,6 @@ public class VectorExpressionDescriptor {
     INTERVAL_FAMILY         (INTERVAL_YEAR_MONTH.value | INTERVAL_DAY_TIME.value),
     INT_INTERVAL_YEAR_MONTH     (INT_FAMILY.value | INTERVAL_YEAR_MONTH.value),
     INT_DATE_INTERVAL_YEAR_MONTH  (INT_FAMILY.value | DATE.value | INTERVAL_YEAR_MONTH.value),
-    TIMESTAMP_INTERVAL_DAY_TIME (TIMESTAMP.value | INTERVAL_DAY_TIME.value),
     STRING_DATETIME_FAMILY  (STRING_FAMILY.value | DATETIME_FAMILY.value),
     ALL_FAMILY              (0xFFF);
 
@@ -346,7 +348,7 @@ public class VectorExpressionDescriptor {
           return ve;
         }
       } catch (Exception ex) {
-        throw new HiveException(ex);
+        throw new HiveException("Could not instantiate VectorExpression class " + ve.getSimpleName(), ex);
       }
     }
     if (LOG.isDebugEnabled()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
index 622f4a3..e883f38 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
@@ -32,7 +32,6 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -323,7 +322,26 @@ public abstract class VectorExtractRow {
     }
   }
 
-  private class IntervalDayTimeExtractor extends AbstractTimestampExtractor {
+  private abstract class AbstractIntervalDayTimeExtractor extends Extractor {
+
+    protected IntervalDayTimeColumnVector colVector;
+
+    AbstractIntervalDayTimeExtractor(int columnIndex) {
+      super(columnIndex);
+    }
+
+    @Override
+    void setColumnVector(VectorizedRowBatch batch) {
+      colVector = (IntervalDayTimeColumnVector) batch.cols[columnIndex];
+    }
+
+    @Override
+    void forgetColumnVector() {
+      colVector = null;
+    }
+  }
+
+  private class IntervalDayTimeExtractor extends AbstractIntervalDayTimeExtractor {
 
     private HiveIntervalDayTime hiveIntervalDayTime;
 
@@ -337,7 +355,7 @@ public abstract class VectorExtractRow {
     Object extract(int batchIndex) {
       int adjustedIndex = (colVector.isRepeating ? 0 : batchIndex);
       if (colVector.noNulls || !colVector.isNull[adjustedIndex]) {
-        hiveIntervalDayTime.set(colVector.asScratchPisaTimestamp(adjustedIndex));
+        hiveIntervalDayTime.set(colVector.asScratchIntervalDayTime(adjustedIndex));
         PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector.set(object, hiveIntervalDayTime);
         return object;
       } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupKeyHelper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupKeyHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupKeyHelper.java
index 9f0ac11..50d0452 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupKeyHelper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupKeyHelper.java
@@ -131,5 +131,17 @@ public class VectorGroupKeyHelper extends VectorColumnSetInfo {
         outputColumnVector.isNull[outputBatch.size] = true;
       }
     }
+    for(int i=0;i<intervalDayTimeIndices.length; ++i) {
+      int keyIndex = intervalDayTimeIndices[i];
+      IntervalDayTimeColumnVector inputColumnVector = (IntervalDayTimeColumnVector) inputBatch.cols[keyIndex];
+      IntervalDayTimeColumnVector outputColumnVector = (IntervalDayTimeColumnVector) outputBatch.cols[keyIndex];
+      if (inputColumnVector.noNulls || !inputColumnVector.isNull[0]) {
+
+        outputColumnVector.setElement(outputBatch.size, 0, inputColumnVector);
+      } else {
+        outputColumnVector.noNulls = false;
+        outputColumnVector.isNull[outputBatch.size] = true;
+      }
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
index b5d8164..8a101a6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
@@ -18,16 +18,16 @@
 
 package org.apache.hadoop.hive.ql.exec.vector;
 
+import java.sql.Timestamp;
 import java.util.Arrays;
 
-import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.KeyWrapper;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 
 /**
@@ -44,7 +44,8 @@ public class VectorHashKeyWrapper extends KeyWrapper {
   private static final double[] EMPTY_DOUBLE_ARRAY = new double[0];
   private static final byte[][] EMPTY_BYTES_ARRAY = new byte[0][];
   private static final HiveDecimalWritable[] EMPTY_DECIMAL_ARRAY = new HiveDecimalWritable[0];
-  private static final PisaTimestamp[] EMPTY_TIMESTAMP_ARRAY = new PisaTimestamp[0];
+  private static final Timestamp[] EMPTY_TIMESTAMP_ARRAY = new Timestamp[0];
+  private static final HiveIntervalDayTime[] EMPTY_INTERVAL_DAY_TIME_ARRAY = new HiveIntervalDayTime[0];
 
   private long[] longValues;
   private double[] doubleValues;
@@ -55,17 +56,21 @@ public class VectorHashKeyWrapper extends KeyWrapper {
 
   private HiveDecimalWritable[] decimalValues;
 
-  private PisaTimestamp[] timestampValues;
+  private Timestamp[] timestampValues;
+
+  private HiveIntervalDayTime[] intervalDayTimeValues;
 
   private boolean[] isNull;
   private int hashcode;
 
   public VectorHashKeyWrapper(int longValuesCount, int doubleValuesCount,
-          int byteValuesCount, int decimalValuesCount, int timestampValuesCount) {
+          int byteValuesCount, int decimalValuesCount, int timestampValuesCount,
+          int intervalDayTimeValuesCount) {
     longValues = longValuesCount > 0 ? new long[longValuesCount] : EMPTY_LONG_ARRAY;
     doubleValues = doubleValuesCount > 0 ? new double[doubleValuesCount] : EMPTY_DOUBLE_ARRAY;
     decimalValues = decimalValuesCount > 0 ? new HiveDecimalWritable[decimalValuesCount] : EMPTY_DECIMAL_ARRAY;
-    timestampValues = timestampValuesCount > 0 ? new PisaTimestamp[timestampValuesCount] : EMPTY_TIMESTAMP_ARRAY;
+    timestampValues = timestampValuesCount > 0 ? new Timestamp[timestampValuesCount] : EMPTY_TIMESTAMP_ARRAY;
+    intervalDayTimeValues = intervalDayTimeValuesCount > 0 ? new HiveIntervalDayTime[intervalDayTimeValuesCount] : EMPTY_INTERVAL_DAY_TIME_ARRAY;
     for(int i = 0; i < decimalValuesCount; ++i) {
       decimalValues[i] = new HiveDecimalWritable(HiveDecimal.ZERO);
     }
@@ -79,10 +84,13 @@ public class VectorHashKeyWrapper extends KeyWrapper {
       byteLengths = EMPTY_INT_ARRAY;
     }
     for(int i = 0; i < timestampValuesCount; ++i) {
-      timestampValues[i] = new PisaTimestamp();
+      timestampValues[i] = new Timestamp(0);
+    }
+    for(int i = 0; i < intervalDayTimeValuesCount; ++i) {
+      intervalDayTimeValues[i] = new HiveIntervalDayTime();
     }
     isNull = new boolean[longValuesCount + doubleValuesCount + byteValuesCount +
-                         decimalValuesCount + timestampValuesCount];
+                         decimalValuesCount + timestampValuesCount + intervalDayTimeValuesCount];
     hashcode = 0;
   }
 
@@ -108,6 +116,10 @@ public class VectorHashKeyWrapper extends KeyWrapper {
       hashcode ^= timestampValues[i].hashCode();
     }
 
+    for (int i = 0; i < intervalDayTimeValues.length; i++) {
+      hashcode ^= intervalDayTimeValues[i].hashCode();
+    }
+
     // This code, with branches and all, is not executed if there are no string keys
     for (int i = 0; i < byteValues.length; ++i) {
       /*
@@ -146,6 +158,7 @@ public class VectorHashKeyWrapper extends KeyWrapper {
           Arrays.equals(doubleValues, keyThat.doubleValues) &&
           Arrays.equals(decimalValues,  keyThat.decimalValues) &&
           Arrays.equals(timestampValues,  keyThat.timestampValues) &&
+          Arrays.equals(intervalDayTimeValues,  keyThat.intervalDayTimeValues) &&
           Arrays.equals(isNull, keyThat.isNull) &&
           byteValues.length == keyThat.byteValues.length &&
           (0 == byteValues.length || bytesEquals(keyThat));
@@ -212,14 +225,21 @@ public class VectorHashKeyWrapper extends KeyWrapper {
       clone.byteLengths = EMPTY_INT_ARRAY;
     }
     if (timestampValues.length > 0) {
-      clone.timestampValues = new PisaTimestamp[timestampValues.length];
+      clone.timestampValues = new Timestamp[timestampValues.length];
       for(int i = 0; i < timestampValues.length; ++i) {
-        clone.timestampValues[i] = new PisaTimestamp();
-        clone.timestampValues[i].update(timestampValues[i]);
+        clone.timestampValues[i] = (Timestamp) timestampValues[i].clone();
       }
     } else {
       clone.timestampValues = EMPTY_TIMESTAMP_ARRAY;
     }
+    if (intervalDayTimeValues.length > 0) {
+      clone.intervalDayTimeValues = new HiveIntervalDayTime[intervalDayTimeValues.length];
+      for(int i = 0; i < intervalDayTimeValues.length; ++i) {
+        clone.intervalDayTimeValues[i] = (HiveIntervalDayTime) intervalDayTimeValues[i].clone();
+      }
+    } else {
+      clone.intervalDayTimeValues = EMPTY_INTERVAL_DAY_TIME_ARRAY;
+    }
 
     clone.hashcode = hashcode;
     assert clone.equals(this);
@@ -281,14 +301,14 @@ public class VectorHashKeyWrapper extends KeyWrapper {
       isNull[longValues.length + doubleValues.length + byteValues.length + index] = true;
   }
 
-  public void assignTimestamp(int index, PisaTimestamp value) {
-    timestampValues[index].update(value);
+  public void assignTimestamp(int index, Timestamp value) {
+    timestampValues[index] = value;
     isNull[longValues.length + doubleValues.length + byteValues.length +
            decimalValues.length + index] = false;
   }
 
   public void assignTimestamp(int index, TimestampColumnVector colVector, int elementNum) {
-    colVector.pisaTimestampUpdate(timestampValues[index], elementNum);
+    colVector.timestampUpdate(timestampValues[index], elementNum);
     isNull[longValues.length + doubleValues.length + byteValues.length +
            decimalValues.length + index] = false;
   }
@@ -298,15 +318,33 @@ public class VectorHashKeyWrapper extends KeyWrapper {
              decimalValues.length + index] = true;
   }
 
+  public void assignIntervalDayTime(int index, HiveIntervalDayTime value) {
+    intervalDayTimeValues[index].set(value);
+    isNull[longValues.length + doubleValues.length + byteValues.length +
+           decimalValues.length + timestampValues.length + index] = false;
+  }
+
+  public void assignIntervalDayTime(int index, IntervalDayTimeColumnVector colVector, int elementNum) {
+    intervalDayTimeValues[index].set(colVector.asScratchIntervalDayTime(elementNum));
+    isNull[longValues.length + doubleValues.length + byteValues.length +
+           decimalValues.length + timestampValues.length + index] = false;
+  }
+
+  public void assignNullIntervalDayTime(int index) {
+      isNull[longValues.length + doubleValues.length + byteValues.length +
+             decimalValues.length + timestampValues.length + index] = true;
+  }
+
   @Override
   public String toString()
   {
-    return String.format("%d[%s] %d[%s] %d[%s] %d[%s] %d[%s]",
+    return String.format("%d[%s] %d[%s] %d[%s] %d[%s] %d[%s] %d[%s]",
         longValues.length, Arrays.toString(longValues),
         doubleValues.length, Arrays.toString(doubleValues),
         byteValues.length, Arrays.toString(byteValues),
         decimalValues.length, Arrays.toString(decimalValues),
-        timestampValues.length, Arrays.toString(timestampValues));
+        timestampValues.length, Arrays.toString(timestampValues),
+        intervalDayTimeValues.length, Arrays.toString(intervalDayTimeValues));
   }
 
   public boolean getIsLongNull(int i) {
@@ -364,9 +402,17 @@ public class VectorHashKeyWrapper extends KeyWrapper {
                   decimalValues.length + i];
   }
 
-  public PisaTimestamp getTimestamp(int i) {
+  public Timestamp getTimestamp(int i) {
     return timestampValues[i];
   }
 
+  public boolean getIsIntervalDayTimeNull(int i) {
+    return isNull[longValues.length + doubleValues.length + byteValues.length +
+                  decimalValues.length + timestampValues.length + i];
+  }
+
+  public HiveIntervalDayTime getIntervalDayTime(int i) {
+    return intervalDayTimeValues[i];
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
index 1c34124..bfd26ae 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
@@ -198,6 +198,28 @@ public class VectorHashKeyWrapperBatch extends VectorColumnSetInfo {
             columnVector.noNulls, columnVector.isRepeating, batch.selectedInUse));
       }
     }
+    for(int i=0;i<intervalDayTimeIndices.length; ++i) {
+      int keyIndex = intervalDayTimeIndices[i];
+      int columnIndex = keyExpressions[keyIndex].getOutputColumn();
+      IntervalDayTimeColumnVector columnVector = (IntervalDayTimeColumnVector) batch.cols[columnIndex];
+      if (columnVector.noNulls && !columnVector.isRepeating && !batch.selectedInUse) {
+        assignIntervalDayTimeNoNullsNoRepeatingNoSelection(i, batch.size, columnVector);
+      } else if (columnVector.noNulls && !columnVector.isRepeating && batch.selectedInUse) {
+        assignIntervalDayTimeNoNullsNoRepeatingSelection(i, batch.size, columnVector, batch.selected);
+      } else if (columnVector.noNulls && columnVector.isRepeating) {
+        assignIntervalDayTimeNoNullsRepeating(i, batch.size, columnVector);
+      } else if (!columnVector.noNulls && !columnVector.isRepeating && !batch.selectedInUse) {
+        assignIntervalDayTimeNullsNoRepeatingNoSelection(i, batch.size, columnVector);
+      } else if (!columnVector.noNulls && columnVector.isRepeating) {
+        assignIntervalDayTimeNullsRepeating(i, batch.size, columnVector);
+      } else if (!columnVector.noNulls && !columnVector.isRepeating && batch.selectedInUse) {
+        assignIntervalDayTimeNullsNoRepeatingSelection (i, batch.size, columnVector, batch.selected);
+      } else {
+        throw new HiveException (String.format(
+            "Unimplemented intervalDayTime null/repeat/selected combination %b/%b/%b",
+            columnVector.noNulls, columnVector.isRepeating, batch.selectedInUse));
+      }
+    }
     for(int i=0;i<batch.size;++i) {
       vectorHashKeyWrappers[i].setHashKey();
     }
@@ -596,6 +618,81 @@ public class VectorHashKeyWrapperBatch extends VectorColumnSetInfo {
       vectorHashKeyWrappers[r].assignTimestamp(index, columnVector, r);
     }
   }
+
+  /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for IntervalDayTime type, possible nulls, no repeat values, batch selection vector.
+   */
+  private void assignIntervalDayTimeNullsNoRepeatingSelection(int index, int size,
+      IntervalDayTimeColumnVector columnVector, int[] selected) {
+    for(int i = 0; i < size; ++i) {
+      int row = selected[i];
+      if (!columnVector.isNull[row]) {
+        vectorHashKeyWrappers[i].assignIntervalDayTime(index, columnVector, row);
+      } else {
+        vectorHashKeyWrappers[i].assignNullIntervalDayTime(index);
+      }
+    }
+  }
+
+  /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for IntervalDayTime type, repeat null values.
+   */
+  private void assignIntervalDayTimeNullsRepeating(int index, int size,
+      IntervalDayTimeColumnVector columnVector) {
+    for(int r = 0; r < size; ++r) {
+      vectorHashKeyWrappers[r].assignNullIntervalDayTime(index);
+    }
+  }
+
+  /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for IntervalDayTime type, possible nulls, repeat values.
+   */
+  private void assignIntervalDayTimeNullsNoRepeatingNoSelection(int index, int size,
+      IntervalDayTimeColumnVector columnVector) {
+    for(int r = 0; r < size; ++r) {
+      if (!columnVector.isNull[r]) {
+        vectorHashKeyWrappers[r].assignIntervalDayTime(index, columnVector, r);
+      } else {
+        vectorHashKeyWrappers[r].assignNullIntervalDayTime(index);
+      }
+    }
+  }
+
+  /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for IntervalDayTime type, no nulls, repeat values, no selection vector.
+   */
+  private void assignIntervalDayTimeNoNullsRepeating(int index, int size, IntervalDayTimeColumnVector columnVector) {
+    for(int r = 0; r < size; ++r) {
+      vectorHashKeyWrappers[r].assignIntervalDayTime(index, columnVector, 0);
+    }
+  }
+
+  /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for IntervalDayTime type, no nulls, no repeat values, batch selection vector.
+   */
+  private void assignIntervalDayTimeNoNullsNoRepeatingSelection(int index, int size,
+      IntervalDayTimeColumnVector columnVector, int[] selected) {
+    for(int r = 0; r < size; ++r) {
+      vectorHashKeyWrappers[r].assignIntervalDayTime(index, columnVector, selected[r]);
+    }
+  }
+
+  /**
+   * Helper method to assign values from a vector column into the key wrapper.
+   * Optimized for IntervalDayTime type, no nulls, no repeat values, no selection vector.
+   */
+  private void assignIntervalDayTimeNoNullsNoRepeatingNoSelection(int index, int size,
+      IntervalDayTimeColumnVector columnVector) {
+    for(int r = 0; r < size; ++r) {
+      vectorHashKeyWrappers[r].assignIntervalDayTime(index, columnVector, r);
+    }
+  }
+
   /**
    * Prepares a VectorHashKeyWrapperBatch to work for a specific set of keys.
    * Computes the fast access lookup indices, preallocates all needed internal arrays.
@@ -638,6 +735,7 @@ public class VectorHashKeyWrapperBatch extends VectorColumnSetInfo {
     compiledKeyWrapperBatch.keysFixedSize += model.lengthForObjectArrayOfSize(compiledKeyWrapperBatch.stringIndices.length);
     compiledKeyWrapperBatch.keysFixedSize += model.lengthForObjectArrayOfSize(compiledKeyWrapperBatch.decimalIndices.length);
     compiledKeyWrapperBatch.keysFixedSize += model.lengthForObjectArrayOfSize(compiledKeyWrapperBatch.timestampIndices.length);
+    compiledKeyWrapperBatch.keysFixedSize += model.lengthForObjectArrayOfSize(compiledKeyWrapperBatch.intervalDayTimeIndices.length);
     compiledKeyWrapperBatch.keysFixedSize += model.lengthForIntArrayOfSize(compiledKeyWrapperBatch.longIndices.length) * 2;
     compiledKeyWrapperBatch.keysFixedSize +=
         model.lengthForBooleanArrayOfSize(keyExpressions.length);
@@ -647,7 +745,8 @@ public class VectorHashKeyWrapperBatch extends VectorColumnSetInfo {
 
   public VectorHashKeyWrapper allocateKeyWrapper() {
     return new VectorHashKeyWrapper(longIndices.length, doubleIndices.length,
-        stringIndices.length, decimalIndices.length, timestampIndices.length);
+        stringIndices.length, decimalIndices.length, timestampIndices.length,
+        intervalDayTimeIndices.length);
   }
 
   /**
@@ -679,12 +778,15 @@ public class VectorHashKeyWrapperBatch extends VectorColumnSetInfo {
       return kw.getIsTimestampNull(klh.timestampIndex)? null :
           keyOutputWriter.writeValue(
                 kw.getTimestamp(klh.timestampIndex));
-    }
-    else {
+    } else if (klh.intervalDayTimeIndex >= 0) {
+      return kw.getIsIntervalDayTimeNull(klh.intervalDayTimeIndex)? null :
+        keyOutputWriter.writeValue(
+              kw.getIntervalDayTime(klh.intervalDayTimeIndex));
+    } else {
       throw new HiveException(String.format(
-          "Internal inconsistent KeyLookupHelper at index [%d]:%d %d %d %d %d",
+          "Internal inconsistent KeyLookupHelper at index [%d]:%d %d %d %d %d %d",
           i, klh.longIndex, klh.doubleIndex, klh.stringIndex, klh.decimalIndex,
-          klh.timestampIndex));
+          klh.timestampIndex, klh.intervalDayTimeIndex));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
index dea38e8..6af3d99 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
@@ -23,7 +23,6 @@ import java.sql.Timestamp;
 import java.util.List;
 
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -332,11 +331,11 @@ public final class VectorSerializeRow<T extends SerializeWrite> {
 
     @Override
     boolean apply(VectorizedRowBatch batch, int batchIndex) throws IOException {
-      TimestampColumnVector colVector = (TimestampColumnVector) batch.cols[columnIndex];
+      IntervalDayTimeColumnVector colVector = (IntervalDayTimeColumnVector) batch.cols[columnIndex];
 
       if (colVector.isRepeating) {
         if (colVector.noNulls || !colVector.isNull[0]) {
-          hiveIntervalDayTime.set(colVector.asScratchPisaTimestamp(0));
+          hiveIntervalDayTime.set(colVector.asScratchIntervalDayTime(0));
           serializeWrite.writeHiveIntervalDayTime(hiveIntervalDayTime);
           return true;
         } else {
@@ -345,7 +344,7 @@ public final class VectorSerializeRow<T extends SerializeWrite> {
         }
       } else {
         if (colVector.noNulls || !colVector.isNull[batchIndex]) {
-          hiveIntervalDayTime.set(colVector.asScratchPisaTimestamp(batchIndex));
+          hiveIntervalDayTime.set(colVector.asScratchIntervalDayTime(batchIndex));
           serializeWrite.writeHiveIntervalDayTime(hiveIntervalDayTime);
           return true;
         } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
index 3f95be2..0552f9d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
@@ -68,11 +68,13 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUD
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMaxLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMaxString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMaxTimestamp;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMaxIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMinDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMinDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMinLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMinString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMinTimestamp;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFMinIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFStdPopDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFStdPopDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.VectorUDAFStdPopLong;
@@ -2333,10 +2335,12 @@ public class VectorizationContext {
           case INTERVAL_YEAR_MONTH:
             return ColumnVector.Type.LONG;
 
-          case INTERVAL_DAY_TIME:
           case TIMESTAMP:
             return ColumnVector.Type.TIMESTAMP;
 
+          case INTERVAL_DAY_TIME:
+            return ColumnVector.Type.INTERVAL_DAY_TIME;
+
           case FLOAT:
           case DOUBLE:
             return ColumnVector.Type.DOUBLE;
@@ -2369,19 +2373,20 @@ public class VectorizationContext {
     add(new AggregateDefinition("min",         VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY,           null,                          VectorUDAFMinDouble.class));
     add(new AggregateDefinition("min",         VectorExpressionDescriptor.ArgumentType.STRING_FAMILY,          null,                          VectorUDAFMinString.class));
     add(new AggregateDefinition("min",         VectorExpressionDescriptor.ArgumentType.DECIMAL,                null,                          VectorUDAFMinDecimal.class));
-    add(new AggregateDefinition("min",         VectorExpressionDescriptor.ArgumentType.TIMESTAMP_INTERVAL_DAY_TIME,     null,                          VectorUDAFMinTimestamp.class));
+    add(new AggregateDefinition("min",         VectorExpressionDescriptor.ArgumentType.TIMESTAMP,              null,                          VectorUDAFMinTimestamp.class));
     add(new AggregateDefinition("max",         VectorExpressionDescriptor.ArgumentType.INT_DATE_INTERVAL_YEAR_MONTH,    null,                          VectorUDAFMaxLong.class));
     add(new AggregateDefinition("max",         VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY,           null,                          VectorUDAFMaxDouble.class));
     add(new AggregateDefinition("max",         VectorExpressionDescriptor.ArgumentType.STRING_FAMILY,          null,                          VectorUDAFMaxString.class));
     add(new AggregateDefinition("max",         VectorExpressionDescriptor.ArgumentType.DECIMAL,                null,                          VectorUDAFMaxDecimal.class));
-    add(new AggregateDefinition("max",         VectorExpressionDescriptor.ArgumentType.TIMESTAMP_INTERVAL_DAY_TIME,     null,                          VectorUDAFMaxTimestamp.class));
+    add(new AggregateDefinition("max",         VectorExpressionDescriptor.ArgumentType.TIMESTAMP,              null,                          VectorUDAFMaxTimestamp.class));
     add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.NONE,                   GroupByDesc.Mode.HASH,         VectorUDAFCountStar.class));
     add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.INT_DATE_INTERVAL_YEAR_MONTH,    GroupByDesc.Mode.HASH,         VectorUDAFCount.class));
     add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.INT_FAMILY,             GroupByDesc.Mode.MERGEPARTIAL, VectorUDAFCountMerge.class));
     add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY,           GroupByDesc.Mode.HASH,         VectorUDAFCount.class));
     add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.STRING_FAMILY,          GroupByDesc.Mode.HASH,         VectorUDAFCount.class));
     add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.DECIMAL,                GroupByDesc.Mode.HASH,         VectorUDAFCount.class));
-    add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.TIMESTAMP_INTERVAL_DAY_TIME,     GroupByDesc.Mode.HASH,         VectorUDAFCount.class));
+    add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.TIMESTAMP,              GroupByDesc.Mode.HASH,         VectorUDAFCount.class));
+    add(new AggregateDefinition("count",       VectorExpressionDescriptor.ArgumentType.INTERVAL_DAY_TIME,      GroupByDesc.Mode.HASH,         VectorUDAFCount.class));
     add(new AggregateDefinition("sum",         VectorExpressionDescriptor.ArgumentType.INT_FAMILY,             null,                          VectorUDAFSumLong.class));
     add(new AggregateDefinition("sum",         VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY,           null,                          VectorUDAFSumDouble.class));
     add(new AggregateDefinition("sum",         VectorExpressionDescriptor.ArgumentType.DECIMAL,                null,                          VectorUDAFSumDecimal.class));

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
index a68d0cc..be04da8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
@@ -144,9 +144,10 @@ public class VectorizedBatchUtil {
           case DATE:
           case INTERVAL_YEAR_MONTH:
             return new LongColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
-          case INTERVAL_DAY_TIME:
           case TIMESTAMP:
             return new TimestampColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
+          case INTERVAL_DAY_TIME:
+            return new IntervalDayTimeColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
           case FLOAT:
           case DOUBLE:
             return new DoubleColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
@@ -417,14 +418,14 @@ public class VectorizedBatchUtil {
     }
       break;
     case INTERVAL_DAY_TIME: {
-      LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
+      IntervalDayTimeColumnVector icv = (IntervalDayTimeColumnVector) batch.cols[offset + colIndex];
       if (writableCol != null) {
-        HiveIntervalDayTime i = ((HiveIntervalDayTimeWritable) writableCol).getHiveIntervalDayTime();
-        lcv.vector[rowIndex] = DateUtils.getIntervalDayTimeTotalNanos(i);
-        lcv.isNull[rowIndex] = false;
+        HiveIntervalDayTime idt = ((HiveIntervalDayTimeWritable) writableCol).getHiveIntervalDayTime();
+        icv.set(rowIndex, idt);
+        icv.isNull[rowIndex] = false;
       } else {
-        lcv.vector[rowIndex] = 1;
-        setNullColIsNullValue(lcv, rowIndex);
+        icv.setNullValue(rowIndex);
+        setNullColIsNullValue(icv, rowIndex);
       }
     }
       break;
@@ -585,6 +586,8 @@ public class VectorizedBatchUtil {
           decColVector.scale);
     } else if (source instanceof TimestampColumnVector) {
       return new TimestampColumnVector(((TimestampColumnVector) source).getLength());
+    } else if (source instanceof IntervalDayTimeColumnVector) {
+      return new IntervalDayTimeColumnVector(((IntervalDayTimeColumnVector) source).getLength());
     } else if (source instanceof ListColumnVector) {
       ListColumnVector src = (ListColumnVector) source;
       ColumnVector child = cloneColumnVector(src.child);
@@ -688,6 +691,9 @@ public class VectorizedBatchUtil {
             Timestamp timestamp = new Timestamp(0);
             ((TimestampColumnVector) colVector).timestampUpdate(timestamp, index);
             sb.append(timestamp.toString());
+          } else if (colVector instanceof IntervalDayTimeColumnVector) {
+            HiveIntervalDayTime intervalDayTime = ((IntervalDayTimeColumnVector) colVector).asScratchIntervalDayTime(index);
+            sb.append(intervalDayTime.toString());
           } else {
             sb.append("Unknown");
           }

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
index 7e79e1e..0724191 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
@@ -400,14 +400,14 @@ public class VectorizedRowBatchCtx {
         }
 
         case INTERVAL_DAY_TIME: {
-          TimestampColumnVector tcv = (TimestampColumnVector) batch.cols[colIndex];
+          IntervalDayTimeColumnVector icv = (IntervalDayTimeColumnVector) batch.cols[colIndex];
           if (value == null) {
-            tcv.noNulls = false;
-            tcv.isNull[0] = true;
-            tcv.isRepeating = true;
+            icv.noNulls = false;
+            icv.isNull[0] = true;
+            icv.isRepeating = true;
           } else {
-            tcv.fill(((HiveIntervalDayTime) value).pisaTimestampUpdate(tcv.useScratchPisaTimestamp()));
-            tcv.isNull[0] = false;
+            icv.fill(((HiveIntervalDayTime) value));
+            icv.isNull[0] = false;
           }
         }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java
index 2b0068d..6225ade 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java
@@ -44,7 +44,6 @@ public class CastDecimalToTimestamp extends FuncDecimalToTimestamp {
 
   @Override
   protected void func(TimestampColumnVector outV, DecimalColumnVector inV,  int i) {
-    Timestamp timestamp = TimestampWritable.decimalToTimestamp(inV.vector[i].getHiveDecimal());
-    outV.set(i, timestamp);
+    outV.set(i, TimestampWritable.decimalToTimestamp(inV.vector[i].getHiveDecimal()));
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java
index 39823fe..31d2f78 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 public class CastDoubleToTimestamp extends VectorExpression {
   private static final long serialVersionUID = 1L;
@@ -37,9 +38,11 @@ public class CastDoubleToTimestamp extends VectorExpression {
     super();
   }
 
-  private void setSecondsWithFractionalNanoseconds(TimestampColumnVector timestampColVector,
+  private void setDouble(TimestampColumnVector timestampColVector,
       double[] vector, int elementNum) {
-    timestampColVector.setTimestampSecondsWithFractionalNanoseconds(elementNum, vector[elementNum]);
+    TimestampWritable.setTimestampFromDouble(
+        timestampColVector.getScratchTimestamp(), vector[elementNum]);
+    timestampColVector.setFromScratchTimestamp(elementNum);
   }
 
   @Override
@@ -66,7 +69,7 @@ public class CastDoubleToTimestamp extends VectorExpression {
     if (inputColVector.isRepeating) {
       //All must be selected otherwise size would be zero
       //Repeating property will not change.
-      setSecondsWithFractionalNanoseconds(outputColVector, vector, 0);
+      setDouble(outputColVector, vector, 0);
       // Even if there are no nulls, we always copy over entry 0. Simplifies code.
       outputIsNull[0] = inputIsNull[0];
       outputColVector.isRepeating = true;
@@ -74,11 +77,11 @@ public class CastDoubleToTimestamp extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          setSecondsWithFractionalNanoseconds(outputColVector, vector, i);
+          setDouble(outputColVector, vector, i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          setSecondsWithFractionalNanoseconds(outputColVector, vector, i);
+          setDouble(outputColVector, vector, i);
         }
       }
       outputColVector.isRepeating = false;
@@ -86,12 +89,12 @@ public class CastDoubleToTimestamp extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          setSecondsWithFractionalNanoseconds(outputColVector, vector, i);
+          setDouble(outputColVector, vector, i);
           outputIsNull[i] = inputIsNull[i];
         }
       } else {
         for(int i = 0; i != n; i++) {
-          setSecondsWithFractionalNanoseconds(outputColVector, vector, i);
+          setDouble(outputColVector, vector, i);
         }
         System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
index d344d4d..a2ee52d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
@@ -20,8 +20,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 public class CastLongToTimestamp extends VectorExpression {
   private static final long serialVersionUID = 1L;
@@ -40,7 +39,10 @@ public class CastLongToTimestamp extends VectorExpression {
   }
 
   private void setSeconds(TimestampColumnVector timestampColVector, long[] vector, int elementNum) {
-    timestampColVector.setTimestampSeconds(elementNum, vector[elementNum]);
+    TimestampWritable.setTimestampFromLong(
+        timestampColVector.getScratchTimestamp(), vector[elementNum],
+        /* intToTimestampInSeconds */ true);
+    timestampColVector.setFromScratchTimestamp(elementNum);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
index a0c947f..01c8810 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
@@ -20,8 +20,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 public class CastMillisecondsLongToTimestamp extends VectorExpression {
   private static final long serialVersionUID = 1L;
@@ -39,6 +38,13 @@ public class CastMillisecondsLongToTimestamp extends VectorExpression {
     super();
   }
 
+  private void setMilliseconds(TimestampColumnVector timestampColVector, long[] vector, int elementNum) {
+    TimestampWritable.setTimestampFromLong(
+        timestampColVector.getScratchTimestamp(), vector[elementNum],
+        /* intToTimestampInSeconds */ false);
+    timestampColVector.setFromScratchTimestamp(elementNum);
+  }
+
   @Override
   public void evaluate(VectorizedRowBatch batch) {
 
@@ -63,19 +69,19 @@ public class CastMillisecondsLongToTimestamp extends VectorExpression {
     if (inputColVector.isRepeating) {
       //All must be selected otherwise size would be zero
       //Repeating property will not change.
-      outputColVector.setTimestampMilliseconds(0, vector[0]);
+      setMilliseconds(outputColVector, vector, 0);
       // Even if there are no nulls, we always copy over entry 0. Simplifies code.
-      outputIsNull[0] = inputIsNull[0]; 
+      outputIsNull[0] = inputIsNull[0];
       outputColVector.isRepeating = true;
     } else if (inputColVector.noNulls) {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputColVector.setTimestampMilliseconds(i, vector[i]);
+          setMilliseconds(outputColVector, vector, i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputColVector.setTimestampMilliseconds(i, vector[i]);
+          setMilliseconds(outputColVector, vector, i);
         }
       }
       outputColVector.isRepeating = false;
@@ -83,12 +89,12 @@ public class CastMillisecondsLongToTimestamp extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputColVector.setTimestampMilliseconds(i, vector[i]);
+          setMilliseconds(outputColVector, vector, i);
           outputIsNull[i] = inputIsNull[i];
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputColVector.setTimestampMilliseconds(i, vector[i]);
+          setMilliseconds(outputColVector, vector, i);
         }
         System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java
index a3ddf9f..c8844c8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java
@@ -21,7 +21,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde.serdeConstants;
@@ -56,7 +56,7 @@ public class CastStringToIntervalDayTime extends VectorExpression {
     BytesColumnVector inV = (BytesColumnVector) batch.cols[inputColumn];
     int[] sel = batch.selected;
     int n = batch.size;
-    TimestampColumnVector outV = (TimestampColumnVector) batch.cols[outputColumn];
+    IntervalDayTimeColumnVector outV = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
 
     if (n == 0) {
 
@@ -113,11 +113,11 @@ public class CastStringToIntervalDayTime extends VectorExpression {
     }
   }
 
-  private void evaluate(TimestampColumnVector outV, BytesColumnVector inV, int i) {
+  private void evaluate(IntervalDayTimeColumnVector outV, BytesColumnVector inV, int i) {
     try {
       HiveIntervalDayTime interval = HiveIntervalDayTime.valueOf(
           new String(inV.vector[i], inV.start[i], inV.length[i], "UTF-8"));
-      outV.setEpochSecondsAndSignedNanos(i, interval.getTotalSeconds(), interval.getNanos());
+      outV.set(i, interval);
     } catch (Exception e) {
       outV.setNullValue(i);
       outV.isNull[i] = true;

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java
index 55b84b1..b8a58cd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java
@@ -41,8 +41,8 @@ public class CastTimestampToBoolean extends VectorExpression {
   }
 
   private int toBool(TimestampColumnVector timestampColVector, int index) {
-    return (timestampColVector.getEpochDay(index) != 0 ||
-            timestampColVector.getNanoOfDay(index) != 0) ? 1 : 0;
+    return (timestampColVector.getTime(index) != 0 ||
+            timestampColVector.getNanos(index) != 0) ? 1 : 0;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
index 00790b9..4e3e62c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
@@ -44,6 +44,6 @@ public class CastTimestampToDate extends FuncTimestampToLong {
   @Override
   protected void func(LongColumnVector outV, TimestampColumnVector inV, int i) {
 
-    outV.vector[i] = DateWritable.millisToDays(inV.getTimestampMilliseconds(i));
+    outV.vector[i] = DateWritable.millisToDays(inV.getTime(i));
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
index aec104e..e5bfb15 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
@@ -18,9 +18,9 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 /**
  * To be used to cast timestamp to decimal.
@@ -39,11 +39,6 @@ public class CastTimestampToDecimal extends FuncTimestampToDecimal {
 
   @Override
   protected void func(DecimalColumnVector outV, TimestampColumnVector inV, int i) {
-
-    // The BigDecimal class recommends not converting directly from double to BigDecimal,
-    // so we convert like the non-vectorized case and got through a string...
-    Double timestampDouble = inV.getTimestampSecondsWithFractionalNanos(i);
-    HiveDecimal result = HiveDecimal.create(timestampDouble.toString());
-    outV.set(i, result);
+    outV.set(i, TimestampWritable.getHiveDecimal(inV.asScratchTimestamp(i)));
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
index f8737f9..a955d79 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
@@ -20,8 +20,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 public class CastTimestampToDouble extends VectorExpression {
   private static final long serialVersionUID = 1L;
@@ -63,7 +62,7 @@ public class CastTimestampToDouble extends VectorExpression {
     if (inputColVector.isRepeating) {
       //All must be selected otherwise size would be zero
       //Repeating property will not change.
-      outputVector[0] =  inputColVector.getTimestampSecondsWithFractionalNanos(0);
+      outputVector[0] = inputColVector.getDouble(0);
       // Even if there are no nulls, we always copy over entry 0. Simplifies code.
       outputIsNull[0] = inputIsNull[0];
       outputColVector.isRepeating = true;
@@ -71,11 +70,11 @@ public class CastTimestampToDouble extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputVector[i] =  inputColVector.getTimestampSecondsWithFractionalNanos(i);
+          outputVector[i] =  inputColVector.getDouble(i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputVector[i] =  inputColVector.getTimestampSecondsWithFractionalNanos(i);
+          outputVector[i] =  inputColVector.getDouble(i);
         }
       }
       outputColVector.isRepeating = false;
@@ -83,12 +82,12 @@ public class CastTimestampToDouble extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputVector[i] =  inputColVector.getTimestampSecondsWithFractionalNanos(i);
+          outputVector[i] = inputColVector.getDouble(i);
           outputIsNull[i] = inputIsNull[i];
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputVector[i] =  inputColVector.getTimestampSecondsWithFractionalNanos(i);
+          outputVector[i] = inputColVector.getDouble(i);
         }
         System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
index 4f53f5c..ba2e823 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
@@ -64,19 +64,19 @@ public class CastTimestampToLong extends VectorExpression {
     if (inputColVector.isRepeating) {
       //All must be selected otherwise size would be zero
       //Repeating property will not change.
-      outputVector[0] =  inputColVector.getEpochSeconds(0);
+      outputVector[0] =  inputColVector.getTimestampAsLong(0);
       // Even if there are no nulls, we always copy over entry 0. Simplifies code.
-      outputIsNull[0] = inputIsNull[0]; 
+      outputIsNull[0] = inputIsNull[0];
       outputColVector.isRepeating = true;
     } else if (inputColVector.noNulls) {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputVector[i] =  inputColVector.getEpochSeconds(i);
+          outputVector[i] =  inputColVector.getTimestampAsLong(i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputVector[i] =  inputColVector.getEpochSeconds(i);
+          outputVector[i] =  inputColVector.getTimestampAsLong(i);
         }
       }
       outputColVector.isRepeating = false;
@@ -84,12 +84,12 @@ public class CastTimestampToLong extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputVector[i] =  inputColVector.getEpochSeconds(i);
+          outputVector[i] =  inputColVector.getTimestampAsLong(i);
           outputIsNull[i] = inputIsNull[i];
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputVector[i] =  inputColVector.getEpochSeconds(i);
+          outputVector[i] =  inputColVector.getTimestampAsLong(i);
         }
         System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
index 24ee9bc..8a743f6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
@@ -24,13 +24,9 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
 import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hive.common.util.DateUtils;
 
 /**
  * Constant is represented as a vector with repeating values.
@@ -44,7 +40,8 @@ public class ConstantVectorExpression extends VectorExpression {
   private double doubleValue = 0;
   private byte[] bytesValue = null;
   private HiveDecimal decimalValue = null;
-  private PisaTimestamp timestampValue = null;
+  private Timestamp timestampValue = null;
+  private HiveIntervalDayTime intervalDayTimeValue = null;
   private boolean isNullValue = false;
 
   private ColumnVector.Type type;
@@ -97,7 +94,7 @@ public class ConstantVectorExpression extends VectorExpression {
   }
 
   public ConstantVectorExpression(int outputColumn, HiveIntervalDayTime value) {
-    this(outputColumn, "timestamp");
+    this(outputColumn, "interval_day_time");
     setIntervalDayTimeValue(value);
   }
 
@@ -165,6 +162,17 @@ public class ConstantVectorExpression extends VectorExpression {
     }
   }
 
+  private void evaluateIntervalDayTime(VectorizedRowBatch vrg) {
+    IntervalDayTimeColumnVector dcv = (IntervalDayTimeColumnVector) vrg.cols[outputColumn];
+    dcv.isRepeating = true;
+    dcv.noNulls = !isNullValue;
+    if (!isNullValue) {
+      dcv.set(0, intervalDayTimeValue);
+    } else {
+      dcv.isNull[0] = true;
+    }
+  }
+
   @Override
   public void evaluate(VectorizedRowBatch vrg) {
     switch (type) {
@@ -183,6 +191,9 @@ public class ConstantVectorExpression extends VectorExpression {
     case TIMESTAMP:
       evaluateTimestamp(vrg);
       break;
+    case INTERVAL_DAY_TIME:
+      evaluateIntervalDayTime(vrg);
+      break;
     }
   }
 
@@ -225,16 +236,19 @@ public class ConstantVectorExpression extends VectorExpression {
   }
 
   public void setTimestampValue(Timestamp timestampValue) {
-    this.timestampValue = new PisaTimestamp(timestampValue);
+    this.timestampValue = timestampValue;
   }
 
-  public void setIntervalDayTimeValue(HiveIntervalDayTime intervalDayTimeValue) {
-    this.timestampValue = intervalDayTimeValue.pisaTimestampUpdate(new PisaTimestamp());
+  public Timestamp getTimestampValue() {
+    return timestampValue;
   }
 
+  public void setIntervalDayTimeValue(HiveIntervalDayTime intervalDayTimeValue) {
+    this.intervalDayTimeValue = intervalDayTimeValue;
+  }
 
-  public PisaTimestamp getTimestampValue() {
-    return timestampValue;
+  public HiveIntervalDayTime getIntervalDayTimeValue() {
+    return intervalDayTimeValue;
   }
 
   public String getTypeString() {

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
index 8d2a186..fafacce 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
@@ -18,7 +18,9 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.exec.vector.*;
@@ -26,7 +28,7 @@ import org.apache.hadoop.hive.ql.util.DateTimeMath;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 // A type date (LongColumnVector storing epoch days) minus a type date produces a
-// type interval_day_time (TimestampColumnVector storing nanosecond interval in 2 longs).
+// type interval_day_time (IntervalDayTimeColumnVector storing nanosecond interval in 2 longs).
 public class DateColSubtractDateColumn extends VectorExpression {
 
   private static final long serialVersionUID = 1L;
@@ -34,16 +36,16 @@ public class DateColSubtractDateColumn extends VectorExpression {
   private int colNum1;
   private int colNum2;
   private int outputColumn;
-  private PisaTimestamp scratchPisaTimestamp1;
-  private PisaTimestamp scratchPisaTimestamp2;
+  private Timestamp scratchTimestamp1;
+  private Timestamp scratchTimestamp2;
   private DateTimeMath dtm = new DateTimeMath();
 
   public DateColSubtractDateColumn(int colNum1, int colNum2, int outputColumn) {
     this.colNum1 = colNum1;
     this.colNum2 = colNum2;
     this.outputColumn = outputColumn;
-    scratchPisaTimestamp1 = new PisaTimestamp();
-    scratchPisaTimestamp2 = new PisaTimestamp();
+    scratchTimestamp1 = new Timestamp(0);
+    scratchTimestamp2 = new Timestamp(0);
   }
 
   public DateColSubtractDateColumn() {
@@ -63,7 +65,7 @@ public class DateColSubtractDateColumn extends VectorExpression {
     LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum2];
 
     // Output is type interval_day_time.
-    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
+    IntervalDayTimeColumnVector outputColVector = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
 
     int[] sel = batch.selected;
     int n = batch.size;
@@ -80,73 +82,69 @@ public class DateColSubtractDateColumn extends VectorExpression {
       || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
       || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
 
-    // Handle nulls first  
+    // Handle nulls first
     NullUtil.propagateNullsColCol(
       inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
 
+    HiveIntervalDayTime resultIntervalDayTime = outputColVector.getScratchIntervalDayTime();
+
     /* Disregard nulls for processing. In other words,
      * the arithmetic operation is performed even if one or
      * more inputs are null. This is to improve speed by avoiding
      * conditional checks in the inner loop.
      */
     if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
-      outputColVector.subtract(
-          scratchPisaTimestamp1.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[0])),
-          scratchPisaTimestamp2.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[0])),
-          0);
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+      dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+      outputColVector.setFromScratchIntervalDayTime(0);
     } else if (inputColVector1.isRepeating) {
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
       if (batch.selectedInUse) {
-        scratchPisaTimestamp1.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[0]));
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputColVector.subtract(
-              scratchPisaTimestamp1,
-              scratchPisaTimestamp2.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[i])),
-              i);
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
       } else {
-        scratchPisaTimestamp1.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[0]));
         for(int i = 0; i != n; i++) {
-          outputColVector.subtract(
-              scratchPisaTimestamp1,
-              scratchPisaTimestamp2.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[i])),
-              i);
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
       }
     } else if (inputColVector2.isRepeating) {
+      scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
       if (batch.selectedInUse) {
-        scratchPisaTimestamp2.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[0]));
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputColVector.subtract(
-              scratchPisaTimestamp1.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[i])),
-              scratchPisaTimestamp2,
-              i);
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
       } else {
-        scratchPisaTimestamp2.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[0]));
         for(int i = 0; i != n; i++) {
-          outputColVector.subtract(
-              scratchPisaTimestamp1.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[i])),
-              scratchPisaTimestamp2,
-              i);
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
       }
     } else {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputColVector.subtract(
-              scratchPisaTimestamp1.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[i])),
-              scratchPisaTimestamp2.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[i])),
-              i);
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputColVector.subtract(
-              scratchPisaTimestamp1.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[i])),
-              scratchPisaTimestamp2.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[i])),
-              i);
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
       }
     }
@@ -157,7 +155,7 @@ public class DateColSubtractDateColumn extends VectorExpression {
      * in complex arithmetic expressions like col2 / (col1 - 1)
      * in the case when some col1 entries are null.
      */
-    NullUtil.setNullDataEntriesTimestamp(outputColVector, batch.selectedInUse, sel, n);
+    NullUtil.setNullDataEntriesIntervalDayTime(outputColVector, batch.selectedInUse, sel, n);
   }
 
   @Override


Mime
View raw message