hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jcama...@apache.org
Subject [2/2] hive git commit: Revert "HIVE-19498: Vectorization: CAST expressions produce wrong results (Matt McCline, reviewed by Teddy Choi)"
Date Thu, 24 May 2018 22:48:11 GMT
Revert "HIVE-19498: Vectorization: CAST expressions produce wrong results (Matt McCline, reviewed by Teddy Choi)"

This reverts commit 25aaf7db0d62d6007c79213a33dae0fb8ac9a7be.


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fc040d52
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fc040d52
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fc040d52

Branch: refs/heads/master
Commit: fc040d52c051e4655b2cb80d047a7a18a7f08e7a
Parents: 25aaf7d
Author: Jesus Camacho Rodriguez <jcamacho@apache.org>
Authored: Thu May 24 15:48:04 2018 -0700
Committer: Jesus Camacho Rodriguez <jcamacho@apache.org>
Committed: Thu May 24 15:48:04 2018 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   4 -
 .../UDAFTemplates/VectorUDAFAvgDecimal.txt      |   2 +-
 .../UDAFTemplates/VectorUDAFAvgDecimalMerge.txt |   2 +-
 .../UDAFTemplates/VectorUDAFMinMaxDecimal.txt   |   2 +-
 .../exec/vector/VectorExpressionDescriptor.java |  72 ++-
 .../exec/vector/VectorHashKeyWrapperBatch.java  |   2 +-
 .../ql/exec/vector/VectorizationContext.java    |  26 +-
 .../ql/exec/vector/VectorizedBatchUtil.java     |   4 -
 .../vector/expressions/CastDateToBoolean.java   |  61 ---
 .../expressions/CastDecimalToDecimal.java       |   2 +-
 .../vector/expressions/CastDoubleToDecimal.java |  15 +-
 .../vector/expressions/CastFloatToDecimal.java  |  65 ---
 .../vector/expressions/CastLongToDecimal.java   |   2 +-
 .../vector/expressions/CastStringToDecimal.java |   2 +-
 .../vector/expressions/CastTimestampToLong.java |  60 +--
 .../expressions/NullVectorExpression.java       |  56 ---
 .../aggregates/VectorUDAFSumDecimal.java        |   2 +-
 .../VectorUDAFSumDecimal64ToDecimal.java        |   2 +-
 .../VectorPTFEvaluatorDecimalFirstValue.java    |   2 +-
 .../exec/vector/ptf/VectorPTFGroupBatches.java  |   2 +-
 .../hive/ql/plan/ExprNodeGenericFuncDesc.java   |   5 +-
 .../apache/hadoop/hive/ql/udf/UDFToBoolean.java |   4 +-
 .../apache/hadoop/hive/ql/udf/UDFToByte.java    |   7 +-
 .../apache/hadoop/hive/ql/udf/UDFToInteger.java |   7 +-
 .../apache/hadoop/hive/ql/udf/UDFToShort.java   |   7 +-
 .../ql/exec/vector/VectorRandomRowSource.java   |  68 +--
 .../expressions/TestVectorCastStatement.java    | 502 -------------------
 .../vector/expressions/TestVectorTypeCasts.java |   4 -
 .../llap/vector_decimal_aggregate.q.out         |   4 +-
 .../clientpositive/spark/timestamp_1.q.out      |  24 +-
 .../clientpositive/spark/timestamp_2.q.out      |  24 +-
 .../clientpositive/spark/timestamp_3.q.out      |   4 +-
 .../spark/vector_decimal_aggregate.q.out        |  36 +-
 .../results/clientpositive/timestamp_1.q.out    |  24 +-
 .../results/clientpositive/timestamp_2.q.out    |  24 +-
 .../results/clientpositive/timestamp_3.q.out    |   4 +-
 .../vector_decimal_aggregate.q.out              |  32 +-
 .../apache/hadoop/hive/tools/GenVectorCode.java |   2 +
 38 files changed, 197 insertions(+), 970 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 7ed3a9c..931533a 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -3554,10 +3554,6 @@ public class HiveConf extends Configuration {
         "1. chosen : use VectorUDFAdaptor for a small set of UDFs that were chosen for good performance\n" +
         "2. all    : use VectorUDFAdaptor for all UDFs"
     ),
-    HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE("hive.test.vectorized.adaptor.override", false,
-        "internal use only, used to force always using the VectorUDFAdaptor.\n" +
-        "The default is false, of course",
-        true),
     HIVE_VECTORIZATION_PTF_ENABLED("hive.vectorized.execution.ptf.enabled", true,
         "This flag should be set to true to enable vectorized mode of the PTF of query execution.\n" +
         "The default value is true."),

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimal.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimal.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimal.txt
index f512639..fa72171 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimal.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimal.txt
@@ -522,7 +522,7 @@ public class <ClassName> extends VectorAggregateExpression {
     fields[AVERAGE_COUNT_FIELD_INDEX].isNull[batchIndex] = false;
     ((LongColumnVector) fields[AVERAGE_COUNT_FIELD_INDEX]).vector[batchIndex] = myagg.count;
     fields[AVERAGE_SUM_FIELD_INDEX].isNull[batchIndex] = false;
-    ((DecimalColumnVector) fields[AVERAGE_SUM_FIELD_INDEX]).set(batchIndex, myagg.sum);
+    ((DecimalColumnVector) fields[AVERAGE_SUM_FIELD_INDEX]).vector[batchIndex].set(myagg.sum);
 
     // NULL out useless source field.
     ColumnVector sourceColVector = (ColumnVector) fields[AVERAGE_SOURCE_FIELD_INDEX];

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimalMerge.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimalMerge.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimalMerge.txt
index 5fe9256..e273d07 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimalMerge.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimalMerge.txt
@@ -532,7 +532,7 @@ public class <ClassName> extends VectorAggregateExpression {
     fields[AVERAGE_COUNT_FIELD_INDEX].isNull[batchIndex] = false;
     ((LongColumnVector) fields[AVERAGE_COUNT_FIELD_INDEX]).vector[batchIndex] = myagg.mergeCount;
     fields[AVERAGE_SUM_FIELD_INDEX].isNull[batchIndex] = false;
-    ((DecimalColumnVector) fields[AVERAGE_SUM_FIELD_INDEX]).set(batchIndex, myagg.mergeSum);
+    ((DecimalColumnVector) fields[AVERAGE_SUM_FIELD_INDEX]).vector[batchIndex].set(myagg.mergeSum);
 
     // NULL out useless source field.
     ColumnVector sourceColVector = (ColumnVector) fields[AVERAGE_SOURCE_FIELD_INDEX];

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt
index 9c8ebcc..9fe85d3 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt
@@ -471,6 +471,6 @@ public class <ClassName> extends VectorAggregateExpression {
       return;
     }
     outputColVector.isNull[batchIndex] = false;
-    outputColVector.set(batchIndex, myagg.value);
+    outputColVector.vector[batchIndex].set(myagg.value);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
index 2f8a419..2d73ab4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java
@@ -63,41 +63,39 @@ public class VectorExpressionDescriptor {
   //    INTERVAL_DAY_TIME
   //
   public enum ArgumentType {
-    NONE                    (0x000000L),
-    INT_FAMILY              (0x000001L),
-    FLOAT                   (0x000002L),
-    DOUBLE                  (0x000004L),
-    FLOAT_FAMILY            (FLOAT.value | DOUBLE.value),
-    DECIMAL                 (0x000008L),
-    STRING                  (0x000010L),
-    CHAR                    (0x000020L),
-    VARCHAR                 (0x000040L),
+    NONE                    (0x000),
+    INT_FAMILY              (0x001),
+    FLOAT_FAMILY            (0x002),
+    DECIMAL                 (0x004),
+    STRING                  (0x008),
+    CHAR                    (0x010),
+    VARCHAR                 (0x020),
     STRING_FAMILY           (STRING.value | CHAR.value | VARCHAR.value),
-    DATE                    (0x000080L),
-    TIMESTAMP               (0x000100L),
-    INTERVAL_YEAR_MONTH     (0x000200L),
-    INTERVAL_DAY_TIME       (0x000400L),
-    BINARY                  (0x000800L),
-    STRUCT                  (0x001000L),
-    DECIMAL_64              (0x002000L),
-    LIST                    (0x004000L),
-    MAP                     (0x008000L),
-    VOID                    (0x010000L),
+    DATE                    (0x040),
+    TIMESTAMP               (0x080),
+    INTERVAL_YEAR_MONTH     (0x100),
+    INTERVAL_DAY_TIME       (0x200),
+    BINARY                  (0x400),
+    STRUCT                  (0x800),
+    DECIMAL_64              (0x1000),
+    LIST                    (0x2000),
+    MAP                     (0x4000),
+    VOID                    (0x8000),
     INT_DECIMAL_64_FAMILY   (INT_FAMILY.value | DECIMAL_64.value),
     DATETIME_FAMILY         (DATE.value | TIMESTAMP.value),
     INTERVAL_FAMILY         (INTERVAL_YEAR_MONTH.value | INTERVAL_DAY_TIME.value),
     INT_INTERVAL_YEAR_MONTH     (INT_FAMILY.value | INTERVAL_YEAR_MONTH.value),
     INT_DATE_INTERVAL_YEAR_MONTH  (INT_FAMILY.value | DATE.value | INTERVAL_YEAR_MONTH.value),
     STRING_DATETIME_FAMILY  (STRING_FAMILY.value | DATETIME_FAMILY.value),
-    ALL_FAMILY              (0xFFFFFFL);
+    ALL_FAMILY              (0xFFFF);
 
-    private final long value;
+    private final int value;
 
-    ArgumentType(long val) {
+    ArgumentType(int val) {
       this.value = val;
     }
 
-    public long getValue() {
+    public int getValue() {
       return value;
     }
 
@@ -163,6 +161,34 @@ public class VectorExpressionDescriptor {
     public boolean isSameTypeOrFamily(ArgumentType other) {
       return ((value & other.value) != 0);
     }
+
+    public static String getVectorColumnSimpleName(ArgumentType argType) {
+      if (argType == INT_FAMILY ||
+          argType == DATE ||
+          argType == INTERVAL_YEAR_MONTH
+          ) {
+        return "Long";
+      } else if (argType == TIMESTAMP ||
+                 argType == INTERVAL_DAY_TIME) {
+        return "Timestamp";
+      } else if (argType == FLOAT_FAMILY) {
+        return "Double";
+      } else if (argType == DECIMAL) {
+        return "Decimal";
+      } else if (argType == STRING ||
+                 argType == CHAR ||
+                 argType == VARCHAR ||
+                 argType == BINARY) {
+        return "String";
+      } else {
+        return "None";
+      }
+    }
+
+    public static String getVectorColumnSimpleName(String hiveTypeName) {
+      ArgumentType argType = fromHiveTypeName(hiveTypeName);
+      return getVectorColumnSimpleName(argType);
+    }
   }
 
   public enum InputExpressionType {

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
index 689d3c3..0e6f8c5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
@@ -1037,7 +1037,7 @@ public class VectorHashKeyWrapperBatch extends VectorColumnSetInfo {
           kw.getByteLength(columnTypeSpecificIndex));
       break;
     case DECIMAL:
-      ((DecimalColumnVector) colVector).set(batchIndex,
+      ((DecimalColumnVector) colVector).vector[batchIndex].set(
           kw.getDecimal(columnTypeSpecificIndex));
       break;
     case TIMESTAMP:

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
index 45ceffc..491a6b1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
@@ -133,7 +133,6 @@ public class VectorizationContext {
   }
 
   private HiveVectorAdaptorUsageMode hiveVectorAdaptorUsageMode;
-  private boolean testVectorAdaptorOverride;
 
   public enum HiveVectorIfStmtMode {
     ADAPTOR,
@@ -159,8 +158,6 @@ public class VectorizationContext {
 
   private void setHiveConfVars(HiveConf hiveConf) {
     hiveVectorAdaptorUsageMode = HiveVectorAdaptorUsageMode.getHiveConfValue(hiveConf);
-    testVectorAdaptorOverride =
-        HiveConf.getBoolVar(hiveConf, ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE);
     hiveVectorIfStmtMode = HiveVectorIfStmtMode.getHiveConfValue(hiveConf);
     this.reuseScratchColumns =
         HiveConf.getBoolVar(hiveConf, ConfVars.HIVE_VECTORIZATION_TESTING_REUSE_SCRATCH_COLUMNS);
@@ -174,11 +171,8 @@ public class VectorizationContext {
 
   private void copyHiveConfVars(VectorizationContext vContextEnvironment) {
     hiveVectorAdaptorUsageMode = vContextEnvironment.hiveVectorAdaptorUsageMode;
-    testVectorAdaptorOverride = vContextEnvironment.testVectorAdaptorOverride;
     hiveVectorIfStmtMode = vContextEnvironment.hiveVectorIfStmtMode;
     this.reuseScratchColumns = vContextEnvironment.reuseScratchColumns;
-    useCheckedVectorExpressions = vContextEnvironment.useCheckedVectorExpressions;
-    adaptorSuppressEvaluateExceptions = vContextEnvironment.adaptorSuppressEvaluateExceptions;
     this.ocm.setReuseColumns(reuseScratchColumns);
   }
 
@@ -807,12 +801,8 @@ public class VectorizationContext {
       // Note: this is a no-op for custom UDFs
       List<ExprNodeDesc> childExpressions = getChildExpressionsWithImplicitCast(expr.getGenericUDF(),
           exprDesc.getChildren(), exprDesc.getTypeInfo());
-
-      // Are we forcing the usage of VectorUDFAdaptor for test purposes?
-      if (!testVectorAdaptorOverride) {
-        ve = getGenericUdfVectorExpression(expr.getGenericUDF(),
-            childExpressions, mode, exprDesc.getTypeInfo());
-      }
+      ve = getGenericUdfVectorExpression(expr.getGenericUDF(),
+          childExpressions, mode, exprDesc.getTypeInfo());
       if (ve == null) {
         // Ok, no vectorized class available.  No problem -- try to use the VectorUDFAdaptor
         // when configured.
@@ -1114,7 +1104,7 @@ public class VectorizationContext {
     return HiveDecimalUtils.getPrecisionForType(typeInfo);
   }
 
-  public static GenericUDF getGenericUDFForCast(TypeInfo castType) throws HiveException {
+  private GenericUDF getGenericUDFForCast(TypeInfo castType) throws HiveException {
     UDF udfClass = null;
     GenericUDF genericUdf = null;
     switch (((PrimitiveTypeInfo) castType).getPrimitiveCategory()) {
@@ -1175,10 +1165,8 @@ public class VectorizationContext {
       if (udfClass == null) {
         throw new HiveException("Could not add implicit cast for type "+castType.getTypeName());
       }
-      GenericUDFBridge genericUDFBridge = new GenericUDFBridge();
-      genericUDFBridge.setUdfClassName(udfClass.getClass().getName());
-      genericUDFBridge.setUdfName(udfClass.getClass().getSimpleName());
-      genericUdf = genericUDFBridge;
+      genericUdf = new GenericUDFBridge();
+      ((GenericUDFBridge) genericUdf).setUdfClassName(udfClass.getClass().getName());
     }
     if (genericUdf instanceof SettableUDF) {
       ((SettableUDF) genericUdf).setTypeInfo(castType);
@@ -2748,9 +2736,7 @@ public class VectorizationContext {
     }
     if (isIntFamily(inputType)) {
       return createVectorExpression(CastLongToDecimal.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
-    } else if (inputType.equals("float")) {
-      return createVectorExpression(CastFloatToDecimal.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
-    } else if (inputType.equals("double")) {
+    } else if (isFloatFamily(inputType)) {
       return createVectorExpression(CastDoubleToDecimal.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
     } else if (decimalTypePattern.matcher(inputType).matches()) {
       if (child instanceof ExprNodeColumnDesc) {

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
index d51d44a..d92ec32 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
@@ -880,10 +880,6 @@ public class VectorizedBatchUtil {
     return newBatch;
   }
 
-  public static Writable getPrimitiveWritable(TypeInfo typeInfo) {
-    return getPrimitiveWritable(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory());
-  }
-
   public static Writable getPrimitiveWritable(PrimitiveCategory primitiveCategory) {
     switch (primitiveCategory) {
     case VOID:

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToBoolean.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToBoolean.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToBoolean.java
deleted file mode 100644
index 117e814..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToBoolean.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions;
-
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-
-/*
- * Comment from BooleanWritable evaluate(DateWritable d)
- *     // date value to boolean doesn't make any sense.
- * So, we always set the output to NULL.
- */
-public class CastDateToBoolean extends NullVectorExpression {
-  private static final long serialVersionUID = 1L;
-
-  private final int colNum;
-
-  public CastDateToBoolean(int colNum, int outputColumnNum) {
-    super(outputColumnNum);
-    this.colNum = colNum;
-  }
-
-  public CastDateToBoolean() {
-    super();
-
-    // Dummy final assignments.
-    colNum = -1;
-  }
-
-  @Override
-  public String vectorExpressionParameters() {
-    return getColumnParamString(0, colNum);
-  }
-
-  @Override
-  public VectorExpressionDescriptor.Descriptor getDescriptor() {
-    return (new VectorExpressionDescriptor.Builder())
-        .setMode(
-            VectorExpressionDescriptor.Mode.PROJECTION)
-        .setNumArguments(1)
-        .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("date"))
-        .setInputExpressionTypes(
-            VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java
index bcf55cd..5e0d570 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java
@@ -57,7 +57,7 @@ public class CastDecimalToDecimal extends VectorExpression {
    */
   protected void convert(DecimalColumnVector outputColVector, DecimalColumnVector inputColVector, int i) {
     // The set routine enforces precision and scale.
-    outputColVector.set(i, inputColVector.vector[i]);
+    outputColVector.vector[i].set(inputColVector.vector[i]);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java
index b6a7a26..4619724 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 
 /**
@@ -42,21 +41,9 @@ public class CastDoubleToDecimal extends FuncDoubleToDecimal {
   protected void func(DecimalColumnVector outV, DoubleColumnVector inV, int i) {
     HiveDecimalWritable decWritable = outV.vector[i];
     decWritable.setFromDouble(inV.vector[i]);
-    if (!decWritable.mutateEnforcePrecisionScale(outV.precision, outV.scale)) {
+    if (!decWritable.isSet()) {
       outV.isNull[i] = true;
       outV.noNulls = false;
     }
   }
-
-  @Override
-  public VectorExpressionDescriptor.Descriptor getDescriptor() {
-    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
-    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
-        .setNumArguments(1)
-        .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.FLOAT)
-        .setInputExpressionTypes(
-            VectorExpressionDescriptor.InputExpressionType.COLUMN);
-    return b.build();
-  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastFloatToDecimal.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastFloatToDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastFloatToDecimal.java
deleted file mode 100644
index 0462334..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastFloatToDecimal.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions;
-
-import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-
-/**
- * Cast input float to a decimal. Get target value scale from output column vector.
- */
-public class CastFloatToDecimal extends FuncDoubleToDecimal {
-
-  private static final long serialVersionUID = 1L;
-
-  public CastFloatToDecimal() {
-    super();
-  }
-
-  public CastFloatToDecimal(int inputColumn, int outputColumnNum) {
-    super(inputColumn, outputColumnNum);
-  }
-
-  @Override
-  protected void func(DecimalColumnVector outV, DoubleColumnVector inV, int i) {
-    HiveDecimalWritable decWritable = outV.vector[i];
-
-    // TEMPORARY: In order to avoid a new version of storage-api, do the conversion here...
-    byte[] floatBytes = Float.toString((float) inV.vector[i]).getBytes();
-    decWritable.setFromBytes(floatBytes, 0, floatBytes.length);
-    if (!decWritable.mutateEnforcePrecisionScale(outV.precision, outV.scale)) {
-      outV.isNull[i] = true;
-      outV.noNulls = false;
-    }
-  }
-
-  @Override
-  public VectorExpressionDescriptor.Descriptor getDescriptor() {
-    VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder();
-    b.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
-        .setNumArguments(1)
-        .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.FLOAT)
-        .setInputExpressionTypes(
-            VectorExpressionDescriptor.InputExpressionType.COLUMN);
-    return b.build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java
index fa88e3f..f8edbd9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java
@@ -41,6 +41,6 @@ public class CastLongToDecimal extends FuncLongToDecimal {
 
   @Override
   protected void func(DecimalColumnVector outV, LongColumnVector inV, int i) {
-    outV.set(i, HiveDecimal.create(inV.vector[i]));
+    outV.vector[i].set(HiveDecimal.create(inV.vector[i]));
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java
index 7dc322e..d8d7dae 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java
@@ -63,7 +63,7 @@ public class CastStringToDecimal extends VectorExpression {
        * making a new string.
        */
       s = new String(inputColVector.vector[i], inputColVector.start[i], inputColVector.length[i], "UTF-8");
-      outputColVector.set(i, HiveDecimal.create(s));
+      outputColVector.vector[i].set(HiveDecimal.create(s));
     } catch (Exception e) {
 
       // for any exception in conversion to decimal, produce NULL

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
index 3f5f25d..42e005e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
@@ -24,16 +24,12 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.MathExpr;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
-import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 
 public class CastTimestampToLong extends VectorExpression {
   private static final long serialVersionUID = 1L;
 
   private int colNum;
 
-  private transient PrimitiveCategory integerPrimitiveCategory;
-
   public CastTimestampToLong(int colNum, int outputColumnNum) {
     super(outputColumnNum);
     this.colNum = colNum;
@@ -44,41 +40,6 @@ public class CastTimestampToLong extends VectorExpression {
   }
 
   @Override
-  public void transientInit() throws HiveException {
-    integerPrimitiveCategory = ((PrimitiveTypeInfo) outputTypeInfo).getPrimitiveCategory();
-  }
-
-  private void setIntegerFromTimestamp(TimestampColumnVector inputColVector,
-      LongColumnVector outputColVector, int batchIndex) {
-
-    final long longValue = inputColVector.getTimestampAsLong(batchIndex);
-
-    boolean isInRange;
-    switch (integerPrimitiveCategory) {
-    case BYTE:
-      isInRange = ((byte) longValue) == longValue;
-      break;
-    case SHORT:
-      isInRange = ((short) longValue) == longValue;
-      break;
-    case INT:
-      isInRange = ((int) longValue) == longValue;
-      break;
-    case LONG:
-      isInRange = true;
-      break;
-    default:
-      throw new RuntimeException("Unexpected integer primitive category " + integerPrimitiveCategory);
-    }
-    if (isInRange) {
-      outputColVector.vector[batchIndex] = longValue;
-    } else {
-      outputColVector.isNull[batchIndex] = true;
-      outputColVector.noNulls = false;
-    }
-  }
-
-  @Override
   public void evaluate(VectorizedRowBatch batch) throws HiveException {
 
     if (childExpressions != null) {
@@ -91,6 +52,7 @@ public class CastTimestampToLong extends VectorExpression {
     boolean[] inputIsNull = inputColVector.isNull;
     boolean[] outputIsNull = outputColVector.isNull;
     int n = batch.size;
+    long[] outputVector = outputColVector.vector;
 
     // return immediately if batch is empty
     if (n == 0) {
@@ -103,7 +65,7 @@ public class CastTimestampToLong extends VectorExpression {
     if (inputColVector.isRepeating) {
       if (inputColVector.noNulls || !inputIsNull[0]) {
         outputIsNull[0] = false;
-        setIntegerFromTimestamp(inputColVector, outputColVector, 0);
+        outputVector[0] = inputColVector.getTimestampAsLong(0);
       } else {
         outputIsNull[0] = true;
         outputColVector.noNulls = false;
@@ -122,12 +84,12 @@ public class CastTimestampToLong extends VectorExpression {
            final int i = sel[j];
            // Set isNull before call in case it changes it mind.
            outputIsNull[i] = false;
-           setIntegerFromTimestamp(inputColVector, outputColVector, i);
+           outputVector[i] =  inputColVector.getTimestampAsLong(i);
          }
         } else {
           for(int j = 0; j != n; j++) {
             final int i = sel[j];
-            setIntegerFromTimestamp(inputColVector, outputColVector, i);
+            outputVector[i] =  inputColVector.getTimestampAsLong(i);
           }
         }
       } else {
@@ -139,7 +101,7 @@ public class CastTimestampToLong extends VectorExpression {
           outputColVector.noNulls = true;
         }
         for(int i = 0; i != n; i++) {
-          setIntegerFromTimestamp(inputColVector, outputColVector, i);
+          outputVector[i] =  inputColVector.getTimestampAsLong(i);
         }
       }
     } else /* there are NULLs in the inputColVector */ {
@@ -152,20 +114,20 @@ public class CastTimestampToLong extends VectorExpression {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           if (!inputIsNull[i]) {
-            outputIsNull[i] = false;
-            setIntegerFromTimestamp(inputColVector, outputColVector, i);
+            inputIsNull[i] = false;
+            outputVector[i] =  inputColVector.getTimestampAsLong(i);
           } else {
-            outputIsNull[i] = true;
+            inputIsNull[i] = true;
             outputColVector.noNulls = false;
           }
         }
       } else {
         for(int i = 0; i != n; i++) {
           if (!inputIsNull[i]) {
-            outputIsNull[i] = false;
-            setIntegerFromTimestamp(inputColVector, outputColVector, i);
+            inputIsNull[i] = false;
+            outputVector[i] =  inputColVector.getTimestampAsLong(i);
           } else {
-            outputIsNull[i] = true;
+            inputIsNull[i] = true;
             outputColVector.noNulls = false;
           }
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullVectorExpression.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullVectorExpression.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullVectorExpression.java
deleted file mode 100644
index b7bfe1e..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullVectorExpression.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions;
-
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-
-public class NullVectorExpression extends VectorExpression {
-  private static final long serialVersionUID = 1L;
-
-  public NullVectorExpression(int outputColumnNum) {
-    super(outputColumnNum);
-  }
-
-  public NullVectorExpression() {
-    super();
-  }
-
-
-  @Override
-  public String vectorExpressionParameters() {
-    return null;
-  }
-
-  @Override
-  public void evaluate(VectorizedRowBatch batch) throws HiveException {
-    ColumnVector colVector = batch.cols[outputColumnNum];
-    colVector.isNull[0] = true;
-    colVector.noNulls = false;
-    colVector.isRepeating = true;
-  }
-
-  @Override
-  public Descriptor getDescriptor() {
-    // Not applicable.
-    return null;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFSumDecimal.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFSumDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFSumDecimal.java
index 315b72b..95703b0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFSumDecimal.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFSumDecimal.java
@@ -460,6 +460,6 @@ public class VectorUDAFSumDecimal extends VectorAggregateExpression {
     }
     outputColVector.isNull[batchIndex] = false;
 
-    outputColVector.set(batchIndex, myagg.sum);
+    outputColVector.vector[batchIndex].set(myagg.sum);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFSumDecimal64ToDecimal.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFSumDecimal64ToDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFSumDecimal64ToDecimal.java
index 117611e..d091f3f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFSumDecimal64ToDecimal.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFSumDecimal64ToDecimal.java
@@ -516,6 +516,6 @@ public class VectorUDAFSumDecimal64ToDecimal extends VectorAggregateExpression {
     }
     outputColVector.isNull[batchIndex] = false;
 
-    outputColVector.set(batchIndex, myagg.regularDecimalSum);
+    outputColVector.vector[batchIndex].set(myagg.regularDecimalSum);
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFEvaluatorDecimalFirstValue.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFEvaluatorDecimalFirstValue.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFEvaluatorDecimalFirstValue.java
index dc037ae..ce118bc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFEvaluatorDecimalFirstValue.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFEvaluatorDecimalFirstValue.java
@@ -102,7 +102,7 @@ public class VectorPTFEvaluatorDecimalFirstValue extends VectorPTFEvaluatorBase
       outputColVector.isNull[0] = true;
     } else {
       outputColVector.isNull[0] = false;
-      outputColVector.set(0, firstValue);
+      outputColVector.vector[0].set(firstValue);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFGroupBatches.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFGroupBatches.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFGroupBatches.java
index a39da0d..573910e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFGroupBatches.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ptf/VectorPTFGroupBatches.java
@@ -206,7 +206,7 @@ public class VectorPTFGroupBatches {
           ((DoubleColumnVector) outputColVector).vector[0] = evaluator.getDoubleGroupResult();
           break;
         case DECIMAL:
-          ((DecimalColumnVector) outputColVector).set(0, evaluator.getDecimalGroupResult());
+          ((DecimalColumnVector) outputColVector).vector[0].set(evaluator.getDecimalGroupResult());
           break;
         default:
           throw new RuntimeException("Unexpected column vector type " + evaluator.getResultColumnVectorType());

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
index 002aef6..961eea2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
@@ -34,7 +34,6 @@ import com.google.common.collect.ImmutableSortedMultiset;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf.StrictChecks;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
-import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -145,9 +144,7 @@ public class ExprNodeGenericFuncDesc extends ExprNodeDesc implements
     if (genericUDF instanceof GenericUDFBridge) {
       GenericUDFBridge genericUDFBridge = (GenericUDFBridge) genericUDF;
       sb.append(" ==> ");
-      String udfName = genericUDFBridge.getUdfName();
-      Class<? extends UDF> udfClass = genericUDFBridge.getUdfClass();
-      sb.append(udfName != null ? udfName : (udfClass != null ? udfClass.getSimpleName() : "null"));
+      sb.append(genericUDFBridge.getUdfName());
       sb.append(" ");
     }
     sb.append("(");

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
index 3ac7a06..d7d8bcc 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
@@ -27,7 +27,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.CastStringToBoolean;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastStringToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToBooleanViaDoubleToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToBooleanViaLongToLong;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDateToBoolean;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDateToBooleanViaLongToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToBoolean;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
@@ -48,7 +48,7 @@ import org.apache.hadoop.io.Text;
  *
  */
 @VectorizedExpressions({CastLongToBooleanViaLongToLong.class,
-    CastDateToBoolean.class, CastTimestampToBoolean.class, CastStringToBoolean.class,
+    CastDateToBooleanViaLongToLong.class, CastTimestampToBoolean.class, CastStringToBoolean.class,
   CastDoubleToBooleanViaDoubleToLong.class, CastDecimalToBoolean.class, CastStringToLong.class})
 public class UDFToBoolean extends UDF {
   private final BooleanWritable booleanWritable = new BooleanWritable();

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
index 1128b32..8c6629e 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
@@ -187,12 +187,7 @@ public class UDFToByte extends UDF {
     if (i == null) {
       return null;
     } else {
-      final long longValue = i.getSeconds();
-      final byte byteValue = (byte) longValue;
-      if (byteValue != longValue) {
-        return null;
-      }
-      byteWritable.set(byteValue);
+      byteWritable.set((byte)i.getSeconds());
       return byteWritable;
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
index 748a688..9540449 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
@@ -197,12 +197,7 @@ public class UDFToInteger extends UDF {
     if (i == null) {
       return null;
     } else {
-      final long longValue = i.getSeconds();
-      final int intValue = (int) longValue;
-      if (intValue != longValue) {
-        return null;
-      }
-      intWritable.set(intValue);
+      intWritable.set((int) i.getSeconds());
       return intWritable;
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
index e003ff3..94bbe82 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
@@ -189,12 +189,7 @@ public class UDFToShort extends UDF {
     if (i == null) {
       return null;
     } else {
-      final long longValue = i.getSeconds();
-      final short shortValue = (short) longValue;
-      if (shortValue != longValue) {
-        return null;
-      }
-      shortWritable.set(shortValue);
+      shortWritable.set((short) i.getSeconds());
       return shortWritable;
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java
index 7877532..fa5c775 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java
@@ -222,23 +222,21 @@ public class VectorRandomRowSource {
       "map"
   };
 
-  private static String getRandomTypeName(Random random, SupportedTypes supportedTypes,
-      Set<String> allowedTypeNameSet) {
-
+  private String getRandomTypeName(SupportedTypes supportedTypes, Set<String> allowedTypeNameSet) {
     String typeName = null;
     do {
-      if (random.nextInt(10 ) != 0) {
-        typeName = possibleHivePrimitiveTypeNames[random.nextInt(possibleHivePrimitiveTypeNames.length)];
+      if (r.nextInt(10 ) != 0) {
+        typeName = possibleHivePrimitiveTypeNames[r.nextInt(possibleHivePrimitiveTypeNames.length)];
       } else {
         switch (supportedTypes) {
         case PRIMITIVES:
-          typeName = possibleHivePrimitiveTypeNames[random.nextInt(possibleHivePrimitiveTypeNames.length)];
+          typeName = possibleHivePrimitiveTypeNames[r.nextInt(possibleHivePrimitiveTypeNames.length)];
           break;
         case ALL_EXCEPT_MAP:
-          typeName = possibleHiveComplexTypeNames[random.nextInt(possibleHiveComplexTypeNames.length - 1)];
+          typeName = possibleHiveComplexTypeNames[r.nextInt(possibleHiveComplexTypeNames.length - 1)];
           break;
         case ALL:
-          typeName = possibleHiveComplexTypeNames[random.nextInt(possibleHiveComplexTypeNames.length)];
+          typeName = possibleHiveComplexTypeNames[r.nextInt(possibleHiveComplexTypeNames.length)];
           break;
         }
       }
@@ -246,22 +244,17 @@ public class VectorRandomRowSource {
     return typeName;
   }
 
-  public static String getDecoratedTypeName(Random random, String typeName) {
-    return getDecoratedTypeName(random, typeName, null, null, 0, 1);
-  }
-
-  private static String getDecoratedTypeName(Random random, String typeName,
-      SupportedTypes supportedTypes, Set<String> allowedTypeNameSet, int depth, int maxDepth) {
-
+  private String getDecoratedTypeName(String typeName, SupportedTypes supportedTypes,
+      Set<String> allowedTypeNameSet, int depth, int maxDepth) {
     depth++;
     if (depth < maxDepth) {
       supportedTypes = SupportedTypes.PRIMITIVES;
     }
     if (typeName.equals("char")) {
-      final int maxLength = 1 + random.nextInt(100);
+      final int maxLength = 1 + r.nextInt(100);
       typeName = String.format("char(%d)", maxLength);
     } else if (typeName.equals("varchar")) {
-      final int maxLength = 1 + random.nextInt(100);
+      final int maxLength = 1 + r.nextInt(100);
       typeName = String.format("varchar(%d)", maxLength);
     } else if (typeName.equals("decimal")) {
       typeName =
@@ -270,34 +263,26 @@ public class VectorRandomRowSource {
               HiveDecimal.SYSTEM_DEFAULT_PRECISION,
               HiveDecimal.SYSTEM_DEFAULT_SCALE);
     } else if (typeName.equals("array")) {
-      String elementTypeName = getRandomTypeName(random, supportedTypes, allowedTypeNameSet);
+      String elementTypeName = getRandomTypeName(supportedTypes, allowedTypeNameSet);
       elementTypeName =
-          getDecoratedTypeName(random, elementTypeName, supportedTypes, allowedTypeNameSet, depth, maxDepth);
+          getDecoratedTypeName(elementTypeName, supportedTypes, allowedTypeNameSet, depth, maxDepth);
       typeName = String.format("array<%s>", elementTypeName);
     } else if (typeName.equals("map")) {
-      String keyTypeName =
-          getRandomTypeName(
-              random, SupportedTypes.PRIMITIVES, allowedTypeNameSet);
+      String keyTypeName = getRandomTypeName(SupportedTypes.PRIMITIVES, allowedTypeNameSet);
       keyTypeName =
-          getDecoratedTypeName(
-              random, keyTypeName, supportedTypes, allowedTypeNameSet, depth, maxDepth);
-      String valueTypeName =
-          getRandomTypeName(
-              random, supportedTypes, allowedTypeNameSet);
+          getDecoratedTypeName(keyTypeName, supportedTypes, allowedTypeNameSet, depth, maxDepth);
+      String valueTypeName = getRandomTypeName(supportedTypes, allowedTypeNameSet);
       valueTypeName =
-          getDecoratedTypeName(
-              random, valueTypeName, supportedTypes, allowedTypeNameSet, depth, maxDepth);
+          getDecoratedTypeName(valueTypeName, supportedTypes, allowedTypeNameSet, depth, maxDepth);
       typeName = String.format("map<%s,%s>", keyTypeName, valueTypeName);
     } else if (typeName.equals("struct")) {
-      final int fieldCount = 1 + random.nextInt(10);
+      final int fieldCount = 1 + r.nextInt(10);
       final StringBuilder sb = new StringBuilder();
       for (int i = 0; i < fieldCount; i++) {
-        String fieldTypeName =
-            getRandomTypeName(
-                random, supportedTypes, allowedTypeNameSet);
+        String fieldTypeName = getRandomTypeName(supportedTypes, allowedTypeNameSet);
         fieldTypeName =
             getDecoratedTypeName(
-                random, fieldTypeName, supportedTypes, allowedTypeNameSet, depth, maxDepth);
+                fieldTypeName, supportedTypes, allowedTypeNameSet, depth, maxDepth);
         if (i > 0) {
           sb.append(",");
         }
@@ -309,15 +294,13 @@ public class VectorRandomRowSource {
       typeName = String.format("struct<%s>", sb.toString());
     } else if (typeName.equals("struct") ||
         typeName.equals("uniontype")) {
-      final int fieldCount = 1 + random.nextInt(10);
+      final int fieldCount = 1 + r.nextInt(10);
       final StringBuilder sb = new StringBuilder();
       for (int i = 0; i < fieldCount; i++) {
-        String fieldTypeName =
-            getRandomTypeName(
-                random, supportedTypes, allowedTypeNameSet);
+        String fieldTypeName = getRandomTypeName(supportedTypes, allowedTypeNameSet);
         fieldTypeName =
             getDecoratedTypeName(
-                random, fieldTypeName, supportedTypes, allowedTypeNameSet, depth, maxDepth);
+                fieldTypeName, supportedTypes, allowedTypeNameSet, depth, maxDepth);
         if (i > 0) {
           sb.append(",");
         }
@@ -328,11 +311,6 @@ public class VectorRandomRowSource {
     return typeName;
   }
 
-  private String getDecoratedTypeName(String typeName,
-      SupportedTypes supportedTypes, Set<String> allowedTypeNameSet, int depth, int maxDepth) {
-    return getDecoratedTypeName(r, typeName, supportedTypes, allowedTypeNameSet, depth, maxDepth);
-  }
-
   private ObjectInspector getObjectInspector(TypeInfo typeInfo) {
     return getObjectInspector(typeInfo, DataTypePhysicalVariation.NONE);
   }
@@ -476,7 +454,7 @@ public class VectorRandomRowSource {
         typeName = explicitTypeNameList.get(c);
         dataTypePhysicalVariation = explicitDataTypePhysicalVariationList.get(c);
       } else if (onlyOne || allowedTypeNameSet != null) {
-        typeName = getRandomTypeName(r, supportedTypes, allowedTypeNameSet);
+        typeName = getRandomTypeName(supportedTypes, allowedTypeNameSet);
       } else {
         int typeNum;
         if (allTypes) {

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorCastStatement.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorCastStatement.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorCastStatement.java
deleted file mode 100644
index 0e300cf..0000000
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorCastStatement.java
+++ /dev/null
@@ -1,502 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Random;
-
-import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
-import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow;
-import org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource;
-import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.IdentityExpression;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
-import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIf;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen;
-import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
-import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.io.LongWritable;
-
-import junit.framework.Assert;
-
-import org.junit.Ignore;
-import org.junit.Test;
-
-public class TestVectorCastStatement {
-
-  @Test
-  public void testBoolean() throws Exception {
-    Random random = new Random(12882);
-
-    doIfTests(random, "boolean");
-  }
-
-  @Test
-  public void testTinyInt() throws Exception {
-    Random random = new Random(5371);
-
-    doIfTests(random, "tinyint");
-  }
-
-  @Test
-  public void testSmallInt() throws Exception {
-    Random random = new Random(2772);
-
-    doIfTests(random, "smallint");
-  }
-
-  @Test
-  public void testInt() throws Exception {
-    Random random = new Random(12882);
-
-    doIfTests(random, "int");
-  }
-
-  @Test
-  public void testBigInt() throws Exception {
-    Random random = new Random(12882);
-
-    doIfTests(random, "bigint");
-  }
-
-  @Test
-  public void testString() throws Exception {
-    Random random = new Random(12882);
-
-    doIfTests(random, "string");
-  }
-
-  @Test
-  public void testTimestamp() throws Exception {
-    Random random = new Random(12882);
-
-    doIfTests(random, "timestamp");
-  }
-
-  @Test
-  public void testDate() throws Exception {
-    Random random = new Random(12882);
-
-    doIfTests(random, "date");
-  }
-
-  @Test
-  public void testFloat() throws Exception {
-    Random random = new Random(7322);
-
-    doIfTests(random, "float");
-  }
-
-  @Test
-  public void testDouble() throws Exception {
-    Random random = new Random(12882);
-
-    doIfTests(random, "double");
-  }
-
-  @Test
-  public void testChar() throws Exception {
-    Random random = new Random(12882);
-
-    doIfTests(random, "char(10)");
-  }
-
-  @Test
-  public void testVarchar() throws Exception {
-    Random random = new Random(12882);
-
-    doIfTests(random, "varchar(15)");
-  }
-
-  @Test
-  public void testBinary() throws Exception {
-    Random random = new Random(12882);
-
-    doIfTests(random, "binary");
-  }
-
-  @Test
-  public void testDecimal() throws Exception {
-    Random random = new Random(9300);
-
-    doIfTests(random, "decimal(38,18)");
-    doIfTests(random, "decimal(38,0)");
-    doIfTests(random, "decimal(20,8)");
-    doIfTests(random, "decimal(10,4)");
-  }
-
-  public enum CastStmtTestMode {
-    ROW_MODE,
-    ADAPTOR,
-    VECTOR_EXPRESSION;
-
-    static final int count = values().length;
-  }
-
-  private void doIfTests(Random random, String typeName)
-      throws Exception {
-    doIfTests(random, typeName, DataTypePhysicalVariation.NONE);
-  }
-
-  private void doIfTests(Random random, String typeName,
-      DataTypePhysicalVariation dataTypePhysicalVariation)
-          throws Exception {
-
-    TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
-    PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
-
-    for (PrimitiveCategory targetPrimitiveCategory : PrimitiveCategory.values()) {
-
-      if (targetPrimitiveCategory == PrimitiveCategory.VOID ||
-          targetPrimitiveCategory == PrimitiveCategory.INTERVAL_YEAR_MONTH ||
-          targetPrimitiveCategory == PrimitiveCategory.INTERVAL_DAY_TIME ||
-          targetPrimitiveCategory == PrimitiveCategory.TIMESTAMPLOCALTZ ||
-          targetPrimitiveCategory == PrimitiveCategory.UNKNOWN) {
-        continue;
-      }
-
-      // BINARY conversions supported by GenericUDFDecimal, GenericUDFTimestamp.
-      if (primitiveCategory == PrimitiveCategory.BINARY) {
-        if (targetPrimitiveCategory == PrimitiveCategory.DECIMAL ||
-            targetPrimitiveCategory == PrimitiveCategory.TIMESTAMP) {
-          continue;
-        }
-      }
-
-      // DATE conversions supported by GenericUDFDecimal.
-      if (primitiveCategory == PrimitiveCategory.DATE) {
-        if (targetPrimitiveCategory == PrimitiveCategory.DECIMAL) {
-          continue;
-        }
-      }
-
-      if (primitiveCategory == targetPrimitiveCategory) {
-        if (primitiveCategory != PrimitiveCategory.CHAR &&
-            primitiveCategory != PrimitiveCategory.VARCHAR &&
-            primitiveCategory != PrimitiveCategory.DECIMAL) {
-          continue;
-        }
-      }
-
-      doIfTestOneCast(random, typeName, dataTypePhysicalVariation, targetPrimitiveCategory);
-    }
-  }
-
-  private void doIfTestOneCast(Random random, String typeName,
-      DataTypePhysicalVariation dataTypePhysicalVariation,
-      PrimitiveCategory targetPrimitiveCategory)
-          throws Exception {
-
-    TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
-
-    boolean isDecimal64 = (dataTypePhysicalVariation == DataTypePhysicalVariation.DECIMAL_64);
-    final int decimal64Scale =
-        (isDecimal64 ? ((DecimalTypeInfo) typeInfo).getScale() : 0);
-
-    List<String> explicitTypeNameList = new ArrayList<String>();
-    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
-    explicitTypeNameList.add(typeName);
-    explicitDataTypePhysicalVariationList.add(dataTypePhysicalVariation);
-
-    VectorRandomRowSource rowSource = new VectorRandomRowSource();
-
-    rowSource.initExplicitSchema(
-        random, explicitTypeNameList, /* maxComplexDepth */ 0, /* allowNull */ true,
-        explicitDataTypePhysicalVariationList);
-
-    List<String> columns = new ArrayList<String>();
-    columns.add("col0");
-    ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(typeInfo, "col0", "table", false);
-
-    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
-    children.add(col1Expr);
-
-    //----------------------------------------------------------------------------------------------
-
-    String targetTypeName;
-    if (targetPrimitiveCategory == PrimitiveCategory.BYTE) {
-      targetTypeName = "tinyint";
-    } else if (targetPrimitiveCategory == PrimitiveCategory.SHORT) {
-      targetTypeName = "smallint";
-    } else if (targetPrimitiveCategory == PrimitiveCategory.LONG) {
-      targetTypeName = "bigint";
-    } else {
-      targetTypeName = targetPrimitiveCategory.name().toLowerCase();
-    }
-    targetTypeName = VectorRandomRowSource.getDecoratedTypeName(random, targetTypeName);
-    TypeInfo targetTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(targetTypeName);
-
-    //----------------------------------------------------------------------------------------------
-
-    String[] columnNames = columns.toArray(new String[0]);
-
-    Object[][] randomRows = rowSource.randomRows(100000);
-
-    VectorRandomBatchSource batchSource =
-        VectorRandomBatchSource.createInterestingBatches(
-            random,
-            rowSource,
-            randomRows,
-            null);
-
-    final int rowCount = randomRows.length;
-    Object[][] resultObjectsArray = new Object[CastStmtTestMode.count][];
-    for (int i = 0; i < CastStmtTestMode.count; i++) {
-
-      Object[] resultObjects = new Object[rowCount];
-      resultObjectsArray[i] = resultObjects;
-
-      CastStmtTestMode ifStmtTestMode = CastStmtTestMode.values()[i];
-      switch (ifStmtTestMode) {
-      case ROW_MODE:
-        if (!doRowCastTest(
-              typeInfo,
-              targetTypeInfo,
-              columns,
-              children,
-              randomRows,
-              rowSource.rowStructObjectInspector(),
-              resultObjects)) {
-          return;
-        }
-        break;
-      case ADAPTOR:
-      case VECTOR_EXPRESSION:
-        if (!doVectorCastTest(
-              typeInfo,
-              targetTypeInfo,
-              columns,
-              columnNames,
-              rowSource.typeInfos(),
-              rowSource.dataTypePhysicalVariations(),
-              children,
-              ifStmtTestMode,
-              batchSource,
-              resultObjects)) {
-          return;
-        }
-        break;
-      default:
-        throw new RuntimeException("Unexpected IF statement test mode " + ifStmtTestMode);
-      }
-    }
-
-    for (int i = 0; i < rowCount; i++) {
-      // Row-mode is the expected value.
-      Object expectedResult = resultObjectsArray[0][i];
-
-      for (int v = 1; v < CastStmtTestMode.count; v++) {
-        Object vectorResult = resultObjectsArray[v][i];
-        if (expectedResult == null || vectorResult == null) {
-          if (expectedResult != null || vectorResult != null) {
-            Assert.fail(
-                "Row " + i +
-                " sourceTypeName " + typeName +
-                " targetTypeName " + targetTypeName +
-                " " + CastStmtTestMode.values()[v] +
-                " result is NULL " + (vectorResult == null ? "YES" : "NO") +
-                " does not match row-mode expected result is NULL " +
-                (expectedResult == null ? "YES" : "NO"));
-          }
-        } else {
-
-          if (isDecimal64 && expectedResult instanceof LongWritable) {
-
-            HiveDecimalWritable expectedHiveDecimalWritable = new HiveDecimalWritable(0);
-            expectedHiveDecimalWritable.deserialize64(
-                ((LongWritable) expectedResult).get(), decimal64Scale);
-            expectedResult = expectedHiveDecimalWritable;
-          }
-
-          if (!expectedResult.equals(vectorResult)) {
-            Assert.fail(
-                "Row " + i +
-                " sourceTypeName " + typeName +
-                " targetTypeName " + targetTypeName +
-                " " + CastStmtTestMode.values()[v] +
-                " result " + vectorResult.toString() +
-                " (" + vectorResult.getClass().getSimpleName() + ")" +
-                " does not match row-mode expected result " + expectedResult.toString() +
-                " (" + expectedResult.getClass().getSimpleName() + ")");
-          }
-        }
-      }
-    }
-  }
-
-  private boolean doRowCastTest(TypeInfo typeInfo, TypeInfo targetTypeInfo,
-      List<String> columns, List<ExprNodeDesc> children,
-      Object[][] randomRows, ObjectInspector rowInspector, Object[] resultObjects)
-          throws Exception {
-
-    GenericUDF udf;
-    try {
-      udf = VectorizationContext.getGenericUDFForCast(targetTypeInfo);
-    } catch (HiveException e) {
-      return false;
-    }
-
-    ExprNodeGenericFuncDesc exprDesc =
-        new ExprNodeGenericFuncDesc(targetTypeInfo, udf, children);
-
-    /*
-    System.out.println(
-        "*DEBUG* typeInfo " + typeInfo.toString() +
-        " targetTypeInfo " + targetTypeInfo +
-        " castStmtTestMode ROW_MODE" +
-        " exprDesc " + exprDesc.toString());
-    */
-
-    HiveConf hiveConf = new HiveConf();
-    ExprNodeEvaluator evaluator =
-        ExprNodeEvaluatorFactory.get(exprDesc, hiveConf);
-    try {
-        evaluator.initialize(rowInspector);
-    } catch (HiveException e) {
-      return false;
-    }
-
-    ObjectInspector objectInspector = TypeInfoUtils
-        .getStandardWritableObjectInspectorFromTypeInfo(targetTypeInfo);
-
-    final int rowCount = randomRows.length;
-    for (int i = 0; i < rowCount; i++) {
-      Object[] row = randomRows[i];
-      Object result = evaluator.evaluate(row);
-      Object copyResult =
-          ObjectInspectorUtils.copyToStandardObject(
-              result, objectInspector, ObjectInspectorCopyOption.WRITABLE);
-      resultObjects[i] = copyResult;
-    }
-
-    return true;
-  }
-
-  private void extractResultObjects(VectorizedRowBatch batch, int rowIndex,
-      VectorExtractRow resultVectorExtractRow, Object[] scrqtchRow, Object[] resultObjects) {
-    // UNDONE: selectedInUse
-    for (int i = 0; i < batch.size; i++) {
-      resultVectorExtractRow.extractRow(batch, i, scrqtchRow);
-
-      // UNDONE: Need to copy the object.
-      resultObjects[rowIndex++] = scrqtchRow[0];
-    }
-  }
-
-  private boolean doVectorCastTest(TypeInfo typeInfo, TypeInfo targetTypeInfo,
-      List<String> columns, String[] columnNames,
-      TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations,
-      List<ExprNodeDesc> children,
-      CastStmtTestMode castStmtTestMode,
-      VectorRandomBatchSource batchSource,
-      Object[] resultObjects)
-          throws Exception {
-
-    GenericUDF udf;
-    try {
-      udf = VectorizationContext.getGenericUDFForCast(targetTypeInfo);
-    } catch (HiveException e) {
-      return false;
-    }
-
-    ExprNodeGenericFuncDesc exprDesc =
-        new ExprNodeGenericFuncDesc(targetTypeInfo, udf, children);
-
-    HiveConf hiveConf = new HiveConf();
-    if (castStmtTestMode == CastStmtTestMode.ADAPTOR) {
-      hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
-    }
-
-    VectorizationContext vectorizationContext =
-        new VectorizationContext(
-            "name",
-            columns,
-            Arrays.asList(typeInfos),
-            Arrays.asList(dataTypePhysicalVariations),
-            hiveConf);
-    VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
-    vectorExpression.transientInit();
-
-    /*
-    System.out.println(
-        "*DEBUG* typeInfo " + typeInfo.toString() +
-        " targetTypeInfo " + targetTypeInfo +
-        " castStmtTestMode " + castStmtTestMode +
-        " vectorExpression " + vectorExpression.toString());
-    */
-
-    VectorRandomRowSource rowSource = batchSource.getRowSource();
-    VectorizedRowBatchCtx batchContext =
-        new VectorizedRowBatchCtx(
-            columnNames,
-            rowSource.typeInfos(),
-            rowSource.dataTypePhysicalVariations(),
-            /* dataColumnNums */ null,
-            /* partitionColumnCount */ 0,
-            /* virtualColumnCount */ 0,
-            /* neededVirtualColumns */ null,
-            vectorizationContext.getScratchColumnTypeNames(),
-            vectorizationContext.getScratchDataTypePhysicalVariations());
-
-    VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
-
-    VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
-
-    resultVectorExtractRow.init(
-        new TypeInfo[] { targetTypeInfo }, new int[] { vectorExpression.getOutputColumnNum() });
-    Object[] scrqtchRow = new Object[1];
-
-    batchSource.resetBatchIteration();
-    int rowIndex = 0;
-    while (true) {
-      if (!batchSource.fillNextBatch(batch)) {
-        break;
-      }
-      vectorExpression.evaluate(batch);
-      extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, resultObjects);
-      rowIndex += batch.size;
-    }
-
-    return true;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
index 58ed151..8499da6 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
@@ -33,7 +33,6 @@ import java.util.concurrent.TimeUnit;
 
 import junit.framework.Assert;
 
-import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
@@ -168,9 +167,6 @@ public class TestVectorTypeCasts {
     LongColumnVector resultV = (LongColumnVector) b.cols[1];
     b.cols[0].noNulls = true;
     VectorExpression expr = new CastTimestampToLong(0, 1);
-    expr.setOutputTypeInfo(TypeInfoFactory.longTypeInfo);
-    expr.setOutputDataTypePhysicalVariation(DataTypePhysicalVariation.NONE);
-    expr.transientInit();
     expr.evaluate(b);
     for (int i = 0; i < longValues.length; i++) {
       long actual = resultV.vector[i];

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out
index 6cd1e8d..902d137 100644
--- a/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out
@@ -806,7 +806,7 @@ POSTHOOK: Input: default@decimal_vgby_small
 626923679	1024	9723.40270	-9778.95135	10541.05247	10.29399655273437500000000000000	5742.091453325365	5744.897264122335	1024	11646	-11712	12641	12.3447	6877.306686989158	6880.6672084147185
 6981	2	-515.62107	-515.62107	-1031.24214	-515.62107000000000000000000000000	0.0	0.0	3	6984454	-618	6983218	2327739.3333	3292794.518850853	4032833.1995089175
 762	1	1531.21941	1531.21941	1531.21941	1531.21941000000000000000000000000	0.0	NULL	2	6984454	1834	6986288	3493144.0000	3491310.0	4937457.95244881
-NULL	3072	9318.43514	-4298.15135	5018444.11392	NULL	5695.4830839098695	5696.410309489299	3072	11161	-5148	6010880	1956.6667	6821.647911041892	6822.758476439734
+NULL	3072	9318.43514	-4298.15135	5018444.11392	1633.60811000000000000000000000000	5695.4830839098695	5696.410309489299	3072	11161	-5148	6010880	1956.6667	6821.647911041892	6822.758476439734
 PREHOOK: query: SELECT SUM(HASH(*))
 FROM (SELECT cint,
     COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
@@ -825,4 +825,4 @@ FROM (SELECT cint,
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_vgby_small
 #### A masked pattern was here ####
-96673467876
+96966670826

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/test/results/clientpositive/spark/timestamp_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/timestamp_1.q.out b/ql/src/test/results/clientpositive/spark/timestamp_1.q.out
index fab69ec..d3ca5cf 100644
--- a/ql/src/test/results/clientpositive/spark/timestamp_1.q.out
+++ b/ql/src/test/results/clientpositive/spark/timestamp_1.q.out
@@ -46,7 +46,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-NULL
+77
 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -55,7 +55,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-NULL
+-4787
 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -129,7 +129,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-NULL
+77
 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -138,7 +138,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-NULL
+-4787
 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -212,7 +212,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-NULL
+77
 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -221,7 +221,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-NULL
+-4787
 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -295,7 +295,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-NULL
+77
 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -304,7 +304,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-NULL
+-4787
 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -378,7 +378,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-NULL
+77
 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -387,7 +387,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-NULL
+-4787
 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -461,7 +461,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-NULL
+77
 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -470,7 +470,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-NULL
+-4787
 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/test/results/clientpositive/spark/timestamp_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/timestamp_2.q.out b/ql/src/test/results/clientpositive/spark/timestamp_2.q.out
index 9a05dfe..f9bfb09 100644
--- a/ql/src/test/results/clientpositive/spark/timestamp_2.q.out
+++ b/ql/src/test/results/clientpositive/spark/timestamp_2.q.out
@@ -46,7 +46,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-NULL
+77
 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -55,7 +55,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-NULL
+-4787
 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -129,7 +129,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-NULL
+77
 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -138,7 +138,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-NULL
+-4787
 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -212,7 +212,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-NULL
+77
 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -221,7 +221,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-NULL
+-4787
 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -295,7 +295,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-NULL
+77
 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -304,7 +304,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-NULL
+-4787
 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -378,7 +378,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-NULL
+77
 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -387,7 +387,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-NULL
+-4787
 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -461,7 +461,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-NULL
+77
 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -470,7 +470,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-NULL
+-4787
 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2

http://git-wip-us.apache.org/repos/asf/hive/blob/fc040d52/ql/src/test/results/clientpositive/spark/timestamp_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/timestamp_3.q.out b/ql/src/test/results/clientpositive/spark/timestamp_3.q.out
index 6d59269..0664abf 100644
--- a/ql/src/test/results/clientpositive/spark/timestamp_3.q.out
+++ b/ql/src/test/results/clientpositive/spark/timestamp_3.q.out
@@ -46,7 +46,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_3 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_3
 #### A masked pattern was here ####
-NULL
+48
 PREHOOK: query: select cast(t as smallint) from timestamp_3 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_3
@@ -55,7 +55,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_3 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_3
 #### A masked pattern was here ####
-NULL
+-31184
 PREHOOK: query: select cast(t as int) from timestamp_3 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_3


Mime
View raw message