hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mmccl...@apache.org
Subject [34/51] [partial] hive git commit: HIVE-17433: Vectorization: Support Decimal64 in Hive Query Engine (Matt McCline, reviewed by Teddy Choi)
Date Sun, 29 Oct 2017 20:40:13 GMT
http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java
index 66ee06a..aacac33 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java
@@ -30,7 +30,8 @@ public class AbstractOperatorDesc implements OperatorDesc {
 
   protected boolean vectorMode = false;
 
-  // Extra parameters only for vectorization.
+  // Reference to vectorization description needed for EXPLAIN VECTORIZATION, hash table loading,
+  // etc.
   protected VectorDesc vectorDesc;
 
   protected Statistics statistics;

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/plan/AppMasterEventDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/AppMasterEventDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/AppMasterEventDesc.java
index 97fcd09..82d35fe 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/AppMasterEventDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/AppMasterEventDesc.java
@@ -70,20 +70,22 @@ public class AppMasterEventDesc extends AbstractOperatorDesc {
     private final AppMasterEventDesc appMasterEventDesc;
     private final VectorAppMasterEventDesc vectorAppMasterEventDesc;
 
-    public AppMasterEventOperatorExplainVectorization(AppMasterEventDesc appMasterEventDesc, VectorDesc vectorDesc) {
+    public AppMasterEventOperatorExplainVectorization(AppMasterEventDesc appMasterEventDesc,
+        VectorAppMasterEventDesc vectorAppMasterEventDesc) {
       // Native vectorization supported.
-      super(vectorDesc, true);
+      super(vectorAppMasterEventDesc, true);
       this.appMasterEventDesc = appMasterEventDesc;
-      vectorAppMasterEventDesc = (VectorAppMasterEventDesc) vectorDesc;
+      this.vectorAppMasterEventDesc = vectorAppMasterEventDesc;
     }
   }
 
   @Explain(vectorization = Vectorization.OPERATOR, displayName = "App Master Event Vectorization", explainLevels = { Level.DEFAULT, Level.EXTENDED })
   public AppMasterEventOperatorExplainVectorization getAppMasterEventVectorization() {
-    if (vectorDesc == null) {
+    VectorAppMasterEventDesc vectorAppMasterEventDesc = (VectorAppMasterEventDesc) getVectorDesc();
+    if (vectorAppMasterEventDesc == null) {
       return null;
     }
-    return new AppMasterEventOperatorExplainVectorization(this, vectorDesc);
+    return new AppMasterEventOperatorExplainVectorization(this, vectorAppMasterEventDesc);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java
index ce21add..9adb56e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java
@@ -30,12 +30,15 @@ import java.util.Stack;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.HashTableDummyOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport.Support;
 import org.apache.hadoop.hive.ql.parse.RuntimeValuesInfo;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
 import org.apache.hadoop.hive.ql.optimizer.physical.VectorizerReason;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 import org.apache.hadoop.hive.ql.plan.Explain.Vectorization;
@@ -83,6 +86,10 @@ public abstract class BaseWork extends AbstractOperatorDesc {
 
   protected boolean useVectorizedInputFileFormat;
 
+  protected Set<Support> inputFormatSupportSet;
+  protected Set<Support> supportSetInUse;
+  protected List<String> supportRemovedReasons;
+
   private VectorizerReason notVectorizedReason;
 
   private boolean groupByVectorOutput;
@@ -239,14 +246,6 @@ public abstract class BaseWork extends AbstractOperatorDesc {
     return notVectorizedReason;
   }
 
-  public void setGroupByVectorOutput(boolean groupByVectorOutput) {
-    this.groupByVectorOutput = groupByVectorOutput;
-  }
-
-  public boolean getGroupByVectorOutput() {
-    return groupByVectorOutput;
-  }
-
   public void setUsesVectorUDFAdaptor(boolean usesVectorUDFAdaptor) {
     this.usesVectorUDFAdaptor = usesVectorUDFAdaptor;
   }
@@ -271,6 +270,23 @@ public abstract class BaseWork extends AbstractOperatorDesc {
       this.baseWork = baseWork;
     }
 
+    public static List<String> getColumnAndTypes(
+        int[] projectionColumns,
+        String[] columnNames, TypeInfo[] typeInfos,
+        DataTypePhysicalVariation[] dataTypePhysicalVariations) {
+      final int size = columnNames.length;
+      List<String> result = new ArrayList<String>(size);
+      for (int i = 0; i < size; i++) {
+        String displayString = projectionColumns[i] + ":" + columnNames[i] + ":" + typeInfos[i];
+        if (dataTypePhysicalVariations != null &&
+            dataTypePhysicalVariations[i] != DataTypePhysicalVariation.NONE) {
+          displayString += "/" + dataTypePhysicalVariations[i].toString();
+        }
+        result.add(displayString);
+      }
+      return result;
+    }
+
     @Explain(vectorization = Vectorization.SUMMARY, displayName = "enabled", explainLevels = { Level.DEFAULT, Level.EXTENDED })
     public boolean enabled() {
       return baseWork.getVectorizationEnabled();
@@ -296,14 +312,6 @@ public abstract class BaseWork extends AbstractOperatorDesc {
       return notVectorizedReason.toString();
     }
 
-    @Explain(vectorization = Vectorization.SUMMARY, displayName = "groupByVectorOutput", explainLevels = { Level.DEFAULT, Level.EXTENDED })
-    public Boolean groupByRowOutputCascade() {
-      if (!baseWork.getVectorMode()) {
-        return null;
-      }
-      return baseWork.getGroupByVectorOutput();
-    }
-
     @Explain(vectorization = Vectorization.SUMMARY, displayName = "allNative", explainLevels = { Level.DEFAULT, Level.EXTENDED })
     public Boolean nativeVectorized() {
       if (!baseWork.getVectorMode()) {
@@ -331,10 +339,18 @@ public abstract class BaseWork extends AbstractOperatorDesc {
       private List<String> getColumns(int startIndex, int count) {
         String[] rowColumnNames = vectorizedRowBatchCtx.getRowColumnNames();
         TypeInfo[] rowColumnTypeInfos = vectorizedRowBatchCtx.getRowColumnTypeInfos();
+        DataTypePhysicalVariation[]  dataTypePhysicalVariations =
+            vectorizedRowBatchCtx.getRowdataTypePhysicalVariations();
+
         List<String> result = new ArrayList<String>(count);
         final int end = startIndex + count;
         for (int i = startIndex; i < end; i++) {
-          result.add(rowColumnNames[i] + ":" + rowColumnTypeInfos[i]);
+          String displayString = rowColumnNames[i] + ":" + rowColumnTypeInfos[i];
+          if (dataTypePhysicalVariations != null &&
+              dataTypePhysicalVariations[i] != DataTypePhysicalVariation.NONE) {
+            displayString += "/" + dataTypePhysicalVariations[i].toString();
+          }
+          result.add(displayString);
         }
         return result;
       }
@@ -369,8 +385,28 @@ public abstract class BaseWork extends AbstractOperatorDesc {
       }
 
       @Explain(vectorization = Vectorization.DETAIL, displayName = "scratchColumnTypeNames", explainLevels = { Level.DEFAULT, Level.EXTENDED })
-      public List<String> getScratchColumnTypeNames() {
-        return Arrays.asList(vectorizedRowBatchCtx.getScratchColumnTypeNames());
+      public String getScratchColumnTypeNames() {
+        String[] scratchColumnTypeNames = vectorizedRowBatchCtx.getScratchColumnTypeNames();
+        DataTypePhysicalVariation[] scratchDataTypePhysicalVariations = vectorizedRowBatchCtx.getScratchDataTypePhysicalVariations();
+        final int size = scratchColumnTypeNames.length;
+        List<String> result = new ArrayList<String>(size);
+        for (int i = 0; i < size; i++) {
+          String displayString = scratchColumnTypeNames[i];
+          if (scratchDataTypePhysicalVariations != null && scratchDataTypePhysicalVariations[i] != DataTypePhysicalVariation.NONE) {
+            displayString += "/" + scratchDataTypePhysicalVariations[i].toString();
+          }
+          result.add(displayString);
+        }
+        return result.toString();
+      }
+
+      @Explain(vectorization = Vectorization.DETAIL, displayName = "neededVirtualColumns", explainLevels = { Level.DEFAULT, Level.EXTENDED })
+      public String getNeededVirtualColumns() {
+        VirtualColumn[] neededVirtualColumns = vectorizedRowBatchCtx.getNeededVirtualColumns();
+        if (neededVirtualColumns == null || neededVirtualColumns.length == 0) {
+          return null;
+        }
+        return Arrays.toString(neededVirtualColumns);
       }
 
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
index 04a6421..36184f6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
@@ -536,18 +536,19 @@ public class FileSinkDesc extends AbstractOperatorDesc {
 
   public class FileSinkOperatorExplainVectorization extends OperatorExplainVectorization {
 
-    public FileSinkOperatorExplainVectorization(VectorDesc vectorDesc) {
+    public FileSinkOperatorExplainVectorization(VectorFileSinkDesc vectorFileSinkDesc) {
       // Native vectorization not supported.
-      super(vectorDesc, false);
+      super(vectorFileSinkDesc, false);
     }
   }
 
   @Explain(vectorization = Vectorization.OPERATOR, displayName = "File Sink Vectorization", explainLevels = { Level.DEFAULT, Level.EXTENDED })
   public FileSinkOperatorExplainVectorization getFileSinkVectorization() {
-    if (vectorDesc == null) {
+    VectorFileSinkDesc vectorFileSinkDesc = (VectorFileSinkDesc) getVectorDesc();
+    if (vectorFileSinkDesc == null) {
       return null;
     }
-    return new FileSinkOperatorExplainVectorization(vectorDesc);
+    return new FileSinkOperatorExplainVectorization(vectorFileSinkDesc);
   }
 
   public void setInsertOverwrite(boolean isInsertOverwrite) {

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java
index 4b69380..a9e77fc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java
@@ -202,11 +202,11 @@ public class FilterDesc extends AbstractOperatorDesc {
     private final FilterDesc filterDesc;
     private final VectorFilterDesc vectorFilterDesc;
 
-    public FilterOperatorExplainVectorization(FilterDesc filterDesc, VectorDesc vectorDesc) {
+    public FilterOperatorExplainVectorization(FilterDesc filterDesc, VectorFilterDesc vectorFilterDesc) {
       // Native vectorization supported.
-      super(vectorDesc, true);
+      super(vectorFilterDesc, true);
       this.filterDesc = filterDesc;
-      vectorFilterDesc = (VectorFilterDesc) vectorDesc;
+      this.vectorFilterDesc = vectorFilterDesc;
     }
 
     @Explain(vectorization = Vectorization.EXPRESSION, displayName = "predicateExpression", explainLevels = { Level.DEFAULT, Level.EXTENDED })
@@ -217,10 +217,11 @@ public class FilterDesc extends AbstractOperatorDesc {
 
   @Explain(vectorization = Vectorization.OPERATOR, displayName = "Filter Vectorization", explainLevels = { Level.DEFAULT, Level.EXTENDED })
   public FilterOperatorExplainVectorization getFilterVectorization() {
-    if (vectorDesc == null) {
+    VectorFilterDesc vectorFilterDesc = (VectorFilterDesc) getVectorDesc();
+    if (vectorFilterDesc == null) {
       return null;
     }
-    return new FilterOperatorExplainVectorization(this, vectorDesc);
+    return new FilterOperatorExplainVectorization(this, vectorFilterDesc);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java
index a44b780..9d4ad22 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java
@@ -24,10 +24,12 @@ import java.util.List;
 import java.util.Objects;
 
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationDesc;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
 import org.apache.hadoop.hive.ql.udf.UDFType;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
 import org.apache.hive.common.util.AnnotationUtils;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.optimizer.physical.Vectorizer;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 import org.apache.hadoop.hive.ql.plan.Explain.Vectorization;
@@ -79,7 +81,6 @@ public class GroupByDesc extends AbstractOperatorDesc {
   private boolean dontResetAggrsDistinct;
 
   public GroupByDesc() {
-    vectorDesc = new VectorGroupByDesc();
   }
 
   public GroupByDesc(
@@ -110,7 +111,6 @@ public class GroupByDesc extends AbstractOperatorDesc {
       final boolean groupingSetsPresent,
       final int groupingSetsPosition,
       final boolean isDistinct) {
-    vectorDesc = new VectorGroupByDesc();
     this.mode = mode;
     this.outputColumnNames = outputColumnNames;
     this.keys = keys;
@@ -327,11 +327,12 @@ public class GroupByDesc extends AbstractOperatorDesc {
     private final GroupByDesc groupByDesc;
     private final VectorGroupByDesc vectorGroupByDesc;
 
-    public GroupByOperatorExplainVectorization(GroupByDesc groupByDesc, VectorDesc vectorDesc) {
+    public GroupByOperatorExplainVectorization(GroupByDesc groupByDesc,
+        VectorGroupByDesc vectorGroupByDesc) {
       // Native vectorization not supported.
-      super(vectorDesc, false);
+      super(vectorGroupByDesc, false);
       this.groupByDesc = groupByDesc;
-      vectorGroupByDesc = (VectorGroupByDesc) vectorDesc;
+      this.vectorGroupByDesc = vectorGroupByDesc;
     }
 
     @Explain(vectorization = Vectorization.EXPRESSION, displayName = "keyExpressions", explainLevels = { Level.DEFAULT, Level.EXTENDED })
@@ -341,19 +342,14 @@ public class GroupByDesc extends AbstractOperatorDesc {
 
     @Explain(vectorization = Vectorization.EXPRESSION, displayName = "aggregators", explainLevels = { Level.DEFAULT, Level.EXTENDED })
     public List<String> getAggregators() {
-      VectorAggregateExpression[] vecAggregators = vectorGroupByDesc.getAggregators();
-      List<String> vecAggrList = new ArrayList<String>(vecAggregators.length);
-      for (VectorAggregateExpression vecAggr : vecAggregators) {
-        vecAggrList.add(vecAggr.toString());
+      VectorAggregationDesc[] vecAggrDescs = vectorGroupByDesc.getVecAggrDescs();
+      List<String> vecAggrList = new ArrayList<String>(vecAggrDescs.length);
+      for (VectorAggregationDesc vecAggrDesc : vecAggrDescs) {
+        vecAggrList.add(vecAggrDesc.toString());
       }
       return vecAggrList;
     }
 
-    @Explain(vectorization = Vectorization.OPERATOR, displayName = "vectorOutput", explainLevels = { Level.DEFAULT, Level.EXTENDED })
-    public boolean getGroupByRowOutputCascade() {
-      return vectorGroupByDesc.isVectorOutput();
-    }
-
     @Explain(vectorization = Vectorization.OPERATOR, displayName = "vectorProcessingMode", explainLevels = { Level.DEFAULT, Level.EXTENDED })
     public String getProcessingMode() {
       return vectorGroupByDesc.getProcessingMode().name();
@@ -375,36 +371,25 @@ public class GroupByDesc extends AbstractOperatorDesc {
         return null;
       }
 
-      VectorAggregateExpression[] vecAggregators = vectorGroupByDesc.getAggregators();
-      for (VectorAggregateExpression vecAggr : vecAggregators) {
-        Category category = Vectorizer.aggregationOutputCategory(vecAggr);
-        if (category != ObjectInspector.Category.PRIMITIVE) {
-          results.add(
-              "Vector output of " + vecAggr.toString() + " output type " + category + " requires PRIMITIVE type IS false");
-        }
-      }
-      if (results.size() == 0) {
-        return null;
-      }
-
       results.add(
           getComplexTypeWithGroupByEnabledCondition(
               isVectorizationComplexTypesEnabled, isVectorizationGroupByComplexTypesEnabled));
       return results;
     }
 
-    @Explain(vectorization = Vectorization.EXPRESSION, displayName = "projectedOutputColumns", explainLevels = { Level.DEFAULT, Level.EXTENDED })
-    public String getProjectedOutputColumns() {
+    @Explain(vectorization = Vectorization.EXPRESSION, displayName = "projectedOutputColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED })
+    public String getProjectedOutputColumnNums() {
       return Arrays.toString(vectorGroupByDesc.getProjectedOutputColumns());
     }
   }
 
   @Explain(vectorization = Vectorization.OPERATOR, displayName = "Group By Vectorization", explainLevels = { Level.DEFAULT, Level.EXTENDED })
   public GroupByOperatorExplainVectorization getGroupByVectorization() {
-    if (vectorDesc == null) {
+    VectorGroupByDesc vectorGroupByDesc = (VectorGroupByDesc) getVectorDesc();
+    if (vectorGroupByDesc == null) {
       return null;
     }
-    return new GroupByOperatorExplainVectorization(this, vectorDesc);
+    return new GroupByOperatorExplainVectorization(this, vectorGroupByDesc);
   }
 
   public static String getComplexTypeEnabledCondition(

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java
index 952c586..7b8fc2d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java
@@ -77,18 +77,19 @@ public class LimitDesc extends AbstractOperatorDesc {
 
   public class LimitOperatorExplainVectorization extends OperatorExplainVectorization {
 
-    public LimitOperatorExplainVectorization(LimitDesc limitDesc, VectorDesc vectorDesc) {
+    public LimitOperatorExplainVectorization(LimitDesc limitDesc, VectorLimitDesc vectorLimitDesc) {
       // Native vectorization supported.
-      super(vectorDesc, true);
+      super(vectorLimitDesc, true);
     }
   }
 
   @Explain(vectorization = Vectorization.OPERATOR, displayName = "Limit Vectorization", explainLevels = { Level.DEFAULT, Level.EXTENDED })
   public LimitOperatorExplainVectorization getLimitVectorization() {
-    if (vectorDesc == null) {
+    VectorLimitDesc vectorLimitDesc = (VectorLimitDesc) getVectorDesc();
+    if (vectorLimitDesc == null) {
       return null;
     }
-    return new LimitOperatorExplainVectorization(this, vectorDesc);
+    return new LimitOperatorExplainVectorization(this, vectorLimitDesc);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java
index 1b5bd78..ef8dd05 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java
@@ -83,15 +83,11 @@ public class MapJoinDesc extends JoinDesc implements Serializable {
   private boolean isDynamicPartitionHashJoin = false;
 
   public MapJoinDesc() {
-    vectorDesc = null;
     bigTableBucketNumMapping = new LinkedHashMap<String, Integer>();
   }
 
   public MapJoinDesc(MapJoinDesc clone) {
     super(clone);
-    if (clone.vectorDesc != null) {
-      vectorDesc = (VectorDesc) clone.vectorDesc.clone();
-    }
     this.keys = clone.keys;
     this.keyTblDesc = clone.keyTblDesc;
     this.valueTblDescs = clone.valueTblDescs;
@@ -117,7 +113,6 @@ public class MapJoinDesc extends JoinDesc implements Serializable {
     final Map<Byte, List<ExprNodeDesc>> filters, boolean noOuterJoin, String dumpFilePrefix,
     final MemoryMonitorInfo memoryMonitorInfo, final long inMemoryDataSize) {
     super(values, outputColumnNames, noOuterJoin, conds, filters, null, memoryMonitorInfo);
-    vectorDesc = null;
     this.keys = keys;
     this.keyTblDesc = keyTblDesc;
     this.valueTblDescs = valueTblDescs;
@@ -403,11 +398,12 @@ public class MapJoinDesc extends JoinDesc implements Serializable {
 
     private VectorizationCondition[] nativeConditions;
 
-    public MapJoinOperatorExplainVectorization(MapJoinDesc mapJoinDesc, VectorDesc vectorDesc) {
+    public MapJoinOperatorExplainVectorization(MapJoinDesc mapJoinDesc,
+        VectorMapJoinDesc vectorMapJoinDesc) {
       // VectorMapJoinOperator is not native vectorized.
-      super(vectorDesc, ((VectorMapJoinDesc) vectorDesc).getHashTableImplementationType() != HashTableImplementationType.NONE);
+      super(vectorMapJoinDesc, vectorMapJoinDesc.getHashTableImplementationType() != HashTableImplementationType.NONE);
       this.mapJoinDesc = mapJoinDesc;
-      vectorMapJoinDesc = (VectorMapJoinDesc) vectorDesc;
+      this.vectorMapJoinDesc = vectorMapJoinDesc;
       vectorMapJoinInfo = vectorMapJoinDesc.getVectorMapJoinInfo();
     }
 
@@ -490,8 +486,8 @@ public class MapJoinDesc extends JoinDesc implements Serializable {
       return vectorExpressionsToStringList(vectorMapJoinInfo.getBigTableKeyExpressions());
     }
 
-    @Explain(vectorization = Vectorization.DETAIL, displayName = "bigTableKeyColumns", explainLevels = { Level.DEFAULT, Level.EXTENDED })
-    public String getBigTableKeyColumns() {
+    @Explain(vectorization = Vectorization.DETAIL, displayName = "bigTableKeyColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED })
+    public String getBigTableKeyColumnNums() {
       if (!isNative) {
         return null;
       }
@@ -510,8 +506,8 @@ public class MapJoinDesc extends JoinDesc implements Serializable {
       return vectorExpressionsToStringList(vectorMapJoinInfo.getBigTableValueExpressions());
     }
 
-    @Explain(vectorization = Vectorization.DETAIL, displayName = "bigTableValueColumns", explainLevels = { Level.DEFAULT, Level.EXTENDED })
-    public String getBigTableValueColumns() {
+    @Explain(vectorization = Vectorization.DETAIL, displayName = "bigTableValueColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED })
+    public String getBigTableValueColumnNums() {
       if (!isNative) {
         return null;
       }
@@ -530,8 +526,8 @@ public class MapJoinDesc extends JoinDesc implements Serializable {
       return outputColumnsToStringList(vectorMapJoinInfo.getSmallTableMapping());
     }
 
-    @Explain(vectorization = Vectorization.DETAIL, displayName = "projectedOutputColumns", explainLevels = { Level.DEFAULT, Level.EXTENDED })
-    public String getProjectedOutputColumns() {
+    @Explain(vectorization = Vectorization.DETAIL, displayName = "projectedOutputColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED })
+    public String getProjectedOutputColumnNums() {
       if (!isNative) {
         return null;
       }
@@ -546,8 +542,8 @@ public class MapJoinDesc extends JoinDesc implements Serializable {
       return columnMappingToStringList(vectorMapJoinInfo.getBigTableOuterKeyMapping());
     }
 
-    @Explain(vectorization = Vectorization.DETAIL, displayName = "bigTableRetainedColumns", explainLevels = { Level.DEFAULT, Level.EXTENDED })
-    public String getBigTableRetainedColumns() {
+    @Explain(vectorization = Vectorization.DETAIL, displayName = "bigTableRetainedColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED })
+    public String getBigTableRetainedColumnNums() {
       if (!isNative) {
         return null;
       }
@@ -562,10 +558,11 @@ public class MapJoinDesc extends JoinDesc implements Serializable {
 
   @Explain(vectorization = Vectorization.OPERATOR, displayName = "Map Join Vectorization", explainLevels = { Level.DEFAULT, Level.EXTENDED })
   public MapJoinOperatorExplainVectorization getMapJoinVectorization() {
-    if (vectorDesc == null || this instanceof SMBJoinDesc) {
+    VectorMapJoinDesc vectorMapJoinDesc = (VectorMapJoinDesc) getVectorDesc();
+    if (vectorMapJoinDesc == null || this instanceof SMBJoinDesc) {
       return null;
     }
-    return new MapJoinOperatorExplainVectorization(this, vectorDesc);
+    return new MapJoinOperatorExplainVectorization(this, vectorMapJoinDesc);
   }
 
   public class SMBJoinOperatorExplainVectorization extends OperatorExplainVectorization {
@@ -573,21 +570,23 @@ public class MapJoinDesc extends JoinDesc implements Serializable {
     private final SMBJoinDesc smbJoinDesc;
     private final VectorSMBJoinDesc vectorSMBJoinDesc;
 
-    public SMBJoinOperatorExplainVectorization(SMBJoinDesc smbJoinDesc, VectorDesc vectorDesc) {
+    public SMBJoinOperatorExplainVectorization(SMBJoinDesc smbJoinDesc,
+        VectorSMBJoinDesc vectorSMBJoinDesc) {
       // Native vectorization NOT supported.
-      super(vectorDesc, false);
+      super(vectorSMBJoinDesc, false);
       this.smbJoinDesc = smbJoinDesc;
-      vectorSMBJoinDesc = (VectorSMBJoinDesc) vectorDesc;
+      this.vectorSMBJoinDesc = vectorSMBJoinDesc;
     }
   }
 
   // Handle dual nature.
   @Explain(vectorization = Vectorization.OPERATOR, displayName = "SMB Map Join Vectorization", explainLevels = { Level.DEFAULT, Level.EXTENDED })
   public SMBJoinOperatorExplainVectorization getSMBJoinVectorization() {
-    if (vectorDesc == null || !(this instanceof SMBJoinDesc)) {
+    VectorSMBJoinDesc vectorSMBJoinDesc = (VectorSMBJoinDesc) getVectorDesc();
+    if (vectorSMBJoinDesc == null || !(this instanceof SMBJoinDesc)) {
       return null;
     }
-    return new SMBJoinOperatorExplainVectorization((SMBJoinDesc) this, vectorDesc);
+    return new SMBJoinOperatorExplainVectorization((SMBJoinDesc) this, vectorSMBJoinDesc);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
index 0011d11..e466b32 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
@@ -44,6 +44,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorUtils;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport.Support;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
 import org.apache.hadoop.hive.ql.io.HiveInputFormat;
@@ -751,6 +752,30 @@ public class MapWork extends BaseWork {
     return useVectorizedInputFileFormat;
   }
 
+  public void setInputFormatSupportSet(Set<Support> inputFormatSupportSet) {
+    this.inputFormatSupportSet = inputFormatSupportSet;
+  }
+
+  public Set<Support> getInputFormatSupportSet() {
+    return inputFormatSupportSet;
+  }
+
+  public void setSupportSetInUse(Set<Support> supportSetInUse) {
+    this.supportSetInUse = supportSetInUse;
+  }
+
+  public Set<Support> getSupportSetInUse() {
+    return supportSetInUse;
+  }
+
+  public void setSupportRemovedReasons(List<String> supportRemovedReasons) {
+    this.supportRemovedReasons =supportRemovedReasons;
+  }
+
+  public List<String> getSupportRemovedReasons() {
+    return supportRemovedReasons;
+  }
+
   public void setNotEnabledInputFileFormatReason(VectorizerReason notEnabledInputFileFormatReason) {
     this.notEnabledInputFileFormatReason = notEnabledInputFileFormatReason;
   }
@@ -797,6 +822,33 @@ public class MapWork extends BaseWork {
       return mapWork.getVectorizationInputFileFormatClassNameSet();
     }
 
+    @Explain(vectorization = Vectorization.SUMMARY, displayName = "inputFormatFeatureSupport", explainLevels = { Level.DEFAULT, Level.EXTENDED })
+    public String getInputFormatSupport() {
+      Set<Support> inputFormatSupportSet = mapWork.getInputFormatSupportSet();
+      if (inputFormatSupportSet == null) {
+        return null;
+      }
+      return inputFormatSupportSet.toString();
+    }
+
+    @Explain(vectorization = Vectorization.SUMMARY, displayName = "featureSupportInUse", explainLevels = { Level.DEFAULT, Level.EXTENDED })
+    public String getVectorizationSupportInUse() {
+      Set<Support> supportSet = mapWork.getSupportSetInUse();
+      if (supportSet == null) {
+        return null;
+      }
+      return supportSet.toString();
+    }
+
+    @Explain(vectorization = Vectorization.SUMMARY, displayName = "vectorizationSupportRemovedReasons", explainLevels = { Level.DEFAULT, Level.EXTENDED })
+    public String getSupportRemovedReasons() {
+      List<String> supportRemovedReasons = mapWork.getSupportRemovedReasons();
+      if (supportRemovedReasons == null || supportRemovedReasons.isEmpty()) {
+        return null;
+      }
+      return supportRemovedReasons.toString();
+    }
+
     @Explain(vectorization = Vectorization.SUMMARY, displayName = "enabledConditionsMet", explainLevels = { Level.DEFAULT, Level.EXTENDED })
     public List<String> enabledConditionsMet() {
       return mapWork.getVectorizationEnabledConditionsMet();

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
index 29a41a2..dd241e1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
@@ -138,11 +138,11 @@ public class PTFDesc extends AbstractOperatorDesc {
 
     private VectorizationCondition[] nativeConditions;
 
-    public PTFOperatorExplainVectorization(PTFDesc PTFDesc, VectorDesc vectorDesc) {
+    public PTFOperatorExplainVectorization(PTFDesc PTFDesc, VectorPTFDesc vectorPTFDesc) {
       // VectorPTFOperator is native vectorized.
-      super(vectorDesc, true);
+      super(vectorPTFDesc, true);
       this.PTFDesc = PTFDesc;
-      vectorPTFDesc = (VectorPTFDesc) vectorDesc;
+      this.vectorPTFDesc = vectorPTFDesc;
       vectorPTFInfo = vectorPTFDesc.getVectorPTFInfo();
     }
 
@@ -221,9 +221,10 @@ public class PTFDesc extends AbstractOperatorDesc {
 
   @Explain(vectorization = Vectorization.OPERATOR, displayName = "PTF Vectorization", explainLevels = { Level.DEFAULT, Level.EXTENDED })
   public PTFOperatorExplainVectorization getPTFVectorization() {
-    if (vectorDesc == null) {
+    VectorPTFDesc vectorPTFDesc = (VectorPTFDesc) getVectorDesc();
+    if (vectorPTFDesc == null) {
       return null;
     }
-    return new PTFOperatorExplainVectorization(this, vectorDesc);
+    return new PTFOperatorExplainVectorization(this, vectorPTFDesc);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
index 8820833..24e107a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
@@ -150,7 +150,6 @@ public class ReduceSinkDesc extends AbstractOperatorDesc {
     this.distinctColumnIndices = distinctColumnIndices;
     this.setNumBuckets(-1);
     this.setBucketCols(null);
-    this.vectorDesc = null;
   }
 
   @Override
@@ -180,10 +179,6 @@ public class ReduceSinkDesc extends AbstractOperatorDesc {
     desc.reduceTraits = reduceTraits.clone();
     desc.setDeduplicated(isDeduplicated);
     desc.setHasOrderBy(hasOrderBy);
-    if (vectorDesc != null) {
-      throw new RuntimeException("Clone with vectorization desc not supported");
-    }
-    desc.vectorDesc = null;
     desc.outputName = outputName;
     return desc;
   }
@@ -504,15 +499,16 @@ public class ReduceSinkDesc extends AbstractOperatorDesc {
 
     private final ReduceSinkDesc reduceSinkDesc;
     private final VectorReduceSinkDesc vectorReduceSinkDesc;
-    private final VectorReduceSinkInfo vectorReduceSinkInfo; 
+    private final VectorReduceSinkInfo vectorReduceSinkInfo;
 
     private VectorizationCondition[] nativeConditions;
 
-    public ReduceSinkOperatorExplainVectorization(ReduceSinkDesc reduceSinkDesc, VectorDesc vectorDesc) {
+    public ReduceSinkOperatorExplainVectorization(ReduceSinkDesc reduceSinkDesc,
+        VectorReduceSinkDesc vectorReduceSinkDesc) {
       // VectorReduceSinkOperator is not native vectorized.
-      super(vectorDesc, ((VectorReduceSinkDesc) vectorDesc).reduceSinkKeyType()!= ReduceSinkKeyType.NONE);
+      super(vectorReduceSinkDesc, vectorReduceSinkDesc.reduceSinkKeyType()!= ReduceSinkKeyType.NONE);
       this.reduceSinkDesc = reduceSinkDesc;
-      vectorReduceSinkDesc = (VectorReduceSinkDesc) vectorDesc;
+      this.vectorReduceSinkDesc = vectorReduceSinkDesc;
       vectorReduceSinkInfo = vectorReduceSinkDesc.getVectorReduceSinkInfo();
     }
 
@@ -532,8 +528,8 @@ public class ReduceSinkDesc extends AbstractOperatorDesc {
       return vectorExpressionsToStringList(vectorReduceSinkInfo.getReduceSinkValueExpressions());
     }
 
-    @Explain(vectorization = Vectorization.DETAIL, displayName = "keyColumns", explainLevels = { Level.DEFAULT, Level.EXTENDED })
-    public String getKeyColumns() {
+    @Explain(vectorization = Vectorization.DETAIL, displayName = "keyColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED })
+    public String getKeyColumnNums() {
       if (!isNative) {
         return null;
       }
@@ -545,8 +541,8 @@ public class ReduceSinkDesc extends AbstractOperatorDesc {
       return Arrays.toString(keyColumnMap);
     }
 
-    @Explain(vectorization = Vectorization.DETAIL, displayName = "valueColumns", explainLevels = { Level.DEFAULT, Level.EXTENDED })
-    public String getValueColumns() {
+    @Explain(vectorization = Vectorization.DETAIL, displayName = "valueColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED })
+    public String getValueColumnNums() {
       if (!isNative) {
         return null;
       }
@@ -558,8 +554,8 @@ public class ReduceSinkDesc extends AbstractOperatorDesc {
       return Arrays.toString(valueColumnMap);
     }
 
-    @Explain(vectorization = Vectorization.DETAIL, displayName = "bucketColumns", explainLevels = { Level.DEFAULT, Level.EXTENDED })
-    public String getBucketColumns() {
+    @Explain(vectorization = Vectorization.DETAIL, displayName = "bucketColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED })
+    public String getBucketColumnNums() {
       if (!isNative) {
         return null;
       }
@@ -571,8 +567,8 @@ public class ReduceSinkDesc extends AbstractOperatorDesc {
       return Arrays.toString(bucketColumnMap);
     }
 
-    @Explain(vectorization = Vectorization.DETAIL, displayName = "partitionColumns", explainLevels = { Level.DEFAULT, Level.EXTENDED })
-    public String getPartitionColumns() {
+    @Explain(vectorization = Vectorization.DETAIL, displayName = "partitionColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED })
+    public String getPartitionColumnNums() {
       if (!isNative) {
         return null;
       }
@@ -644,10 +640,11 @@ public class ReduceSinkDesc extends AbstractOperatorDesc {
 
   @Explain(vectorization = Vectorization.OPERATOR, displayName = "Reduce Sink Vectorization", explainLevels = { Level.DEFAULT, Level.EXTENDED })
   public ReduceSinkOperatorExplainVectorization getReduceSinkVectorization() {
-    if (vectorDesc == null) {
+    VectorReduceSinkDesc vectorReduceSinkDesc = (VectorReduceSinkDesc) getVectorDesc();
+    if (vectorReduceSinkDesc == null) {
       return null;
     }
-    return new ReduceSinkOperatorExplainVectorization(this, vectorDesc);
+    return new ReduceSinkOperatorExplainVectorization(this, vectorReduceSinkDesc);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java
index fcfd911..106e487 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java
@@ -146,11 +146,12 @@ public class SelectDesc extends AbstractOperatorDesc {
     private final SelectDesc selectDesc;
     private final VectorSelectDesc vectorSelectDesc;
 
-    public SelectOperatorExplainVectorization(SelectDesc selectDesc, VectorDesc vectorDesc) {
+    public SelectOperatorExplainVectorization(SelectDesc selectDesc,
+        VectorSelectDesc vectorSelectDesc) {
       // Native vectorization supported.
-      super(vectorDesc, true);
+      super(vectorSelectDesc, true);
       this.selectDesc = selectDesc;
-      vectorSelectDesc = (VectorSelectDesc) vectorDesc;
+      this.vectorSelectDesc = vectorSelectDesc;
     }
 
     @Explain(vectorization = Vectorization.OPERATOR, displayName = "selectExpressions", explainLevels = { Level.DEFAULT, Level.EXTENDED })
@@ -158,18 +159,19 @@ public class SelectDesc extends AbstractOperatorDesc {
       return vectorExpressionsToStringList(vectorSelectDesc.getSelectExpressions());
     }
 
-    @Explain(vectorization = Vectorization.EXPRESSION, displayName = "projectedOutputColumns", explainLevels = { Level.DEFAULT, Level.EXTENDED })
-    public String getProjectedOutputColumns() {
+    @Explain(vectorization = Vectorization.EXPRESSION, displayName = "projectedOutputColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED })
+    public String getProjectedOutputColumnNums() {
       return Arrays.toString(vectorSelectDesc.getProjectedOutputColumns());
     }
   }
 
   @Explain(vectorization = Vectorization.OPERATOR, displayName = "Select Vectorization", explainLevels = { Level.DEFAULT, Level.EXTENDED })
   public SelectOperatorExplainVectorization getSelectVectorization() {
-    if (vectorDesc == null) {
+    VectorSelectDesc vectorSelectDesc = (VectorSelectDesc) getVectorDesc();
+    if (vectorSelectDesc == null) {
       return null;
     }
-    return new SelectOperatorExplainVectorization(this, vectorDesc);
+    return new SelectOperatorExplainVectorization(this, vectorSelectDesc);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/plan/SparkHashTableSinkDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/SparkHashTableSinkDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/SparkHashTableSinkDesc.java
index 260bc07..d6061de 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/SparkHashTableSinkDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/SparkHashTableSinkDesc.java
@@ -52,20 +52,22 @@ public class SparkHashTableSinkDesc extends HashTableSinkDesc {
     private final HashTableSinkDesc filterDesc;
     private final VectorSparkHashTableSinkDesc vectorHashTableSinkDesc;
 
-    public SparkHashTableSinkOperatorExplainVectorization(HashTableSinkDesc filterDesc, VectorDesc vectorDesc) {
+    public SparkHashTableSinkOperatorExplainVectorization(HashTableSinkDesc filterDesc,
+        VectorSparkHashTableSinkDesc vectorSparkHashTableSinkDesc) {
       // Native vectorization supported.
-      super(vectorDesc, true);
+      super(vectorSparkHashTableSinkDesc, true);
       this.filterDesc = filterDesc;
-      vectorHashTableSinkDesc = (VectorSparkHashTableSinkDesc) vectorDesc;
+      this.vectorHashTableSinkDesc = vectorSparkHashTableSinkDesc;
     }
   }
 
   @Explain(vectorization = Vectorization.OPERATOR, displayName = "Spark Hash Table Sink Vectorization", explainLevels = { Level.DEFAULT, Level.EXTENDED })
   public SparkHashTableSinkOperatorExplainVectorization getHashTableSinkVectorization() {
-    if (vectorDesc == null) {
+    VectorSparkHashTableSinkDesc vectorHashTableSinkDesc = (VectorSparkHashTableSinkDesc) getVectorDesc();
+    if (vectorHashTableSinkDesc == null) {
       return null;
     }
-    return new SparkHashTableSinkOperatorExplainVectorization(this, vectorDesc);
+    return new SparkHashTableSinkOperatorExplainVectorization(this, vectorHashTableSinkDesc);
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
index ca20afb..75d0f43 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
@@ -26,13 +26,16 @@ import java.util.List;
 import java.util.Map;
 import java.util.Objects;
 
+import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
 import org.apache.hadoop.hive.ql.parse.TableSample;
+import org.apache.hadoop.hive.ql.plan.BaseWork.BaseExplainVectorization;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 import org.apache.hadoop.hive.ql.plan.Explain.Vectorization;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 
 /**
  * Table Scan Descriptor Currently, data is only read from a base source as part
@@ -427,25 +430,45 @@ public class TableScanDesc extends AbstractOperatorDesc {
     private final TableScanDesc tableScanDesc;
     private final VectorTableScanDesc vectorTableScanDesc;
 
-    public TableScanOperatorExplainVectorization(TableScanDesc tableScanDesc, VectorDesc vectorDesc) {
+    public TableScanOperatorExplainVectorization(TableScanDesc tableScanDesc,
+        VectorTableScanDesc vectorTableScanDesc) {
       // Native vectorization supported.
-      super(vectorDesc, true);
+      super(vectorTableScanDesc, true);
       this.tableScanDesc = tableScanDesc;
-      vectorTableScanDesc = (VectorTableScanDesc) vectorDesc;
+      this.vectorTableScanDesc = vectorTableScanDesc;
     }
 
-    @Explain(vectorization = Vectorization.EXPRESSION, displayName = "projectedOutputColumns", explainLevels = { Level.DEFAULT, Level.EXTENDED })
-    public String getProjectedOutputColumns() {
-      return Arrays.toString(vectorTableScanDesc.getProjectedOutputColumns());
+    @Explain(vectorization = Vectorization.DETAIL, displayName = "vectorizationSchemaColumns", explainLevels = { Level.DEFAULT, Level.EXTENDED })
+    public String getSchemaColumns() {
+      String[] projectedColumnNames = vectorTableScanDesc.getProjectedColumnNames();
+      TypeInfo[] projectedColumnTypeInfos = vectorTableScanDesc.getProjectedColumnTypeInfos();
+
+      // We currently include all data, partition, and any vectorization available
+      // virtual columns in the VRB.
+      final int size = projectedColumnNames.length;
+      int[] projectionColumns = new int[size];
+      for (int i = 0; i < size; i++) {
+        projectionColumns[i] = i;
+      }
+
+      DataTypePhysicalVariation[] projectedColumnDataTypePhysicalVariations =
+          vectorTableScanDesc.getProjectedColumnDataTypePhysicalVariations();
+
+      return BaseExplainVectorization.getColumnAndTypes(
+          projectionColumns,
+          projectedColumnNames,
+          projectedColumnTypeInfos,
+          projectedColumnDataTypePhysicalVariations).toString();
     }
   }
 
   @Explain(vectorization = Vectorization.OPERATOR, displayName = "TableScan Vectorization", explainLevels = { Level.DEFAULT, Level.EXTENDED })
   public TableScanOperatorExplainVectorization getTableScanVectorization() {
-    if (vectorDesc == null) {
+    VectorTableScanDesc vectorTableScanDesc = (VectorTableScanDesc) getVectorDesc();
+    if (vectorTableScanDesc == null) {
       return null;
     }
-    return new TableScanOperatorExplainVectorization(this, vectorDesc);
+    return new TableScanOperatorExplainVectorization(this, vectorTableScanDesc);
   }
 
   public void setVectorized(boolean vectorized) {

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/plan/VectorGroupByDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/VectorGroupByDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/VectorGroupByDesc.java
index 89d868d..039863b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/VectorGroupByDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/VectorGroupByDesc.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
+import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationDesc;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
 
@@ -60,17 +61,14 @@ public class VectorGroupByDesc extends AbstractVectorDesc  {
 
   private ProcessingMode processingMode;
 
-  private boolean isVectorOutput;
-
   private VectorExpression[] keyExpressions;
-  private VectorAggregateExpression[] aggregators;
+  private VectorAggregationDesc[] vecAggrDescs;
   private int[] projectedOutputColumns;
   private boolean isVectorizationComplexTypesEnabled;
   private boolean isVectorizationGroupByComplexTypesEnabled;
 
   public VectorGroupByDesc() {
     this.processingMode = ProcessingMode.NONE;
-    this.isVectorOutput = false;
   }
 
   public void setProcessingMode(ProcessingMode processingMode) {
@@ -80,14 +78,6 @@ public class VectorGroupByDesc extends AbstractVectorDesc  {
     return processingMode;
   }
 
-  public boolean isVectorOutput() {
-    return isVectorOutput;
-  }
-
-  public void setVectorOutput(boolean isVectorOutput) {
-    this.isVectorOutput = isVectorOutput;
-  }
-
   public void setKeyExpressions(VectorExpression[] keyExpressions) {
     this.keyExpressions = keyExpressions;
   }
@@ -96,12 +86,12 @@ public class VectorGroupByDesc extends AbstractVectorDesc  {
     return keyExpressions;
   }
 
-  public void setAggregators(VectorAggregateExpression[] aggregators) {
-    this.aggregators = aggregators;
+  public void setVecAggrDescs(VectorAggregationDesc[] vecAggrDescs) {
+    this.vecAggrDescs = vecAggrDescs;
   }
 
-  public VectorAggregateExpression[] getAggregators() {
-    return aggregators;
+  public VectorAggregationDesc[] getVecAggrDescs() {
+    return vecAggrDescs;
   }
 
   public void setProjectedOutputColumns(int[] projectedOutputColumns) {

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/plan/VectorTableScanDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/VectorTableScanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/VectorTableScanDesc.java
index 84729a5..5d55d6e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/VectorTableScanDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/VectorTableScanDesc.java
@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
+import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+
 /**
  * VectorTableScanDesc.
  *
@@ -30,16 +33,45 @@ public class VectorTableScanDesc extends AbstractVectorDesc  {
 
   private static final long serialVersionUID = 1L;
 
-  private int[] projectedOutputColumns;
+  private int[] projectedColumns;
+  private String[] projectedColumnNames;
+  private TypeInfo[] projectedColumnTypeInfos;
+  private DataTypePhysicalVariation[] projectedColumnDataTypePhysicalVariation;
 
   public VectorTableScanDesc() {
   }
 
-  public void setProjectedOutputColumns(int[] projectedOutputColumns) {
-    this.projectedOutputColumns = projectedOutputColumns;
+  public void setProjectedColumns(int[] projectedColumns) {
+    this.projectedColumns = projectedColumns;
+  }
+
+  public int[] getProjectedColumns() {
+    return projectedColumns;
+  }
+
+  public void setProjectedColumnNames(String[] projectedColumnNames) {
+    this.projectedColumnNames = projectedColumnNames;
+  }
+
+  public String[] getProjectedColumnNames() {
+    return projectedColumnNames;
+  }
+
+  public void setProjectedColumnTypeInfos(TypeInfo[] projectedColumnTypeInfos) {
+    this.projectedColumnTypeInfos = projectedColumnTypeInfos;
+  }
+
+  public TypeInfo[] getProjectedColumnTypeInfos() {
+    return projectedColumnTypeInfos;
+  }
+
+  public void setProjectedColumnDataTypePhysicalVariations(
+      DataTypePhysicalVariation[] projectedColumnDataTypePhysicalVariation) {
+    this.projectedColumnDataTypePhysicalVariation =
+        projectedColumnDataTypePhysicalVariation;
   }
 
-  public int[] getProjectedOutputColumns() {
-    return projectedOutputColumns;
+  public DataTypePhysicalVariation[] getProjectedColumnDataTypePhysicalVariations() {
+    return projectedColumnDataTypePhysicalVariation;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
index 2ea426c..7c4423d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
@@ -27,6 +27,8 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.PTFPartition;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedUDAFs;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef;
@@ -117,6 +119,11 @@ public class GenericUDAFAverage extends AbstractGenericUDAFResolver {
     return eval;
   }
 
+  @VectorizedUDAFs({
+    VectorUDAFAvgLong.class, VectorUDAFAvgLongComplete.class,
+    VectorUDAFAvgDouble.class, VectorUDAFAvgDoubleComplete.class,
+    VectorUDAFAvgTimestamp.class, VectorUDAFAvgTimestampComplete.class,
+    VectorUDAFAvgPartial2.class, VectorUDAFAvgFinal.class})
   public static class GenericUDAFAverageEvaluatorDouble extends AbstractGenericUDAFAverageEvaluator<Double> {
 
     @Override
@@ -237,6 +244,10 @@ public class GenericUDAFAverage extends AbstractGenericUDAFResolver {
     }
   }
 
+  @VectorizedUDAFs({
+    VectorUDAFAvgDecimal.class, VectorUDAFAvgDecimalComplete.class,
+    VectorUDAFAvgDecimal64ToDecimal.class, VectorUDAFAvgDecimal64ToDecimalComplete.class,
+    VectorUDAFAvgDecimalPartial2.class, VectorUDAFAvgDecimalFinal.class})
   public static class GenericUDAFAverageEvaluatorDecimal extends AbstractGenericUDAFAverageEvaluator<HiveDecimal> {
 
     @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java
index 3d85cc4..1df4e17 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java
@@ -23,6 +23,9 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.SelectOperator;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedUDAFs;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.*;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ColStatistics;
@@ -64,6 +67,9 @@ public class GenericUDAFBloomFilter implements GenericUDAFResolver2 {
   /**
    * GenericUDAFBloomFilterEvaluator - Evaluator class for BloomFilter
    */
+  @VectorizedUDAFs({
+    VectorUDAFBloomFilter.class,
+    VectorUDAFBloomFilterMerge.class})
   public static class GenericUDAFBloomFilterEvaluator extends GenericUDAFEvaluator {
     // Source operator to get the number of entries
     private SelectOperator sourceOperator;

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
index d1d0131..a4aff23 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
@@ -21,6 +21,8 @@ import java.util.HashSet;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedUDAFs;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
@@ -84,6 +86,10 @@ public class GenericUDAFCount implements GenericUDAFResolver2 {
    * GenericUDAFCountEvaluator.
    *
    */
+  @VectorizedUDAFs({
+    VectorUDAFCount.class,
+    VectorUDAFCountMerge.class,
+    VectorUDAFCountStar.class})
   public static class GenericUDAFCountEvaluator extends GenericUDAFEvaluator {
     private boolean isWindowing = false;
     private boolean countAllColumns = false;

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java
index 763bfd5..ace96b5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java
@@ -24,6 +24,8 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedUDAFs;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.parse.WindowingSpec.BoundarySpec;
@@ -60,6 +62,14 @@ public class GenericUDAFMax extends AbstractGenericUDAFResolver {
   }
 
   @UDFType(distinctLike=true)
+  @VectorizedUDAFs({
+    VectorUDAFMaxLong.class,
+    VectorUDAFMaxDouble.class,
+    VectorUDAFMaxDecimal.class,
+    VectorUDAFMaxDecimal64.class,
+    VectorUDAFMaxTimestamp.class,
+    VectorUDAFMaxIntervalDayTime.class,
+    VectorUDAFMaxString.class})
   public static class GenericUDAFMaxEvaluator extends GenericUDAFEvaluator {
 
     private transient ObjectInspector inputOI;

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java
index 132bad6..ddab54a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java
@@ -21,6 +21,8 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedUDAFs;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ptf.BoundaryDef;
@@ -58,6 +60,14 @@ public class GenericUDAFMin extends AbstractGenericUDAFResolver {
   }
 
   @UDFType(distinctLike=true)
+  @VectorizedUDAFs({
+    VectorUDAFMinLong.class,
+    VectorUDAFMinDouble.class,
+    VectorUDAFMinDecimal.class,
+    VectorUDAFMinDecimal64.class,
+    VectorUDAFMinTimestamp.class,
+    VectorUDAFMinIntervalDayTime.class,
+    VectorUDAFMinString.class})
   public static class GenericUDAFMinEvaluator extends GenericUDAFEvaluator {
 
     private transient ObjectInspector inputOI;

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java
index 071884c..3e778c4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java
@@ -19,6 +19,8 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedUDAFs;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -74,9 +76,23 @@ public class GenericUDAFStd extends GenericUDAFVariance {
    * and overriding the terminate() method of the evaluator.
    *
    */
+  @VectorizedUDAFs({
+    VectorUDAFVarLong.class, VectorUDAFVarLongComplete.class,
+    VectorUDAFVarDouble.class, VectorUDAFVarDoubleComplete.class,
+    VectorUDAFVarDecimal.class, VectorUDAFVarDecimalComplete.class,
+    VectorUDAFVarTimestamp.class, VectorUDAFVarTimestampComplete.class,
+    VectorUDAFVarPartial2.class, VectorUDAFVarFinal.class})
   public static class GenericUDAFStdEvaluator extends
       GenericUDAFVarianceEvaluator {
 
+    /*
+     * Calculate the std result when count > 1.  Public so vectorization code can
+     * use it, etc.
+     */
+    public static double calculateStdResult(double variance, long count) {
+      return Math.sqrt(variance / count);
+    }
+
     @Override
     public Object terminate(AggregationBuffer agg) throws HiveException {
       StdAgg myagg = (StdAgg) agg;
@@ -85,7 +101,8 @@ public class GenericUDAFStd extends GenericUDAFVariance {
         return null;
       } else {
         if (myagg.count > 1) {
-          getResult().set(Math.sqrt(myagg.variance / (myagg.count)));
+          getResult().set(
+              calculateStdResult(myagg.variance, myagg.count));
         } else { // for one element the variance is always 0
           getResult().set(0);
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java
index e032982..e18d224 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java
@@ -19,6 +19,8 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedUDAFs;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -74,9 +76,24 @@ public class GenericUDAFStdSample extends GenericUDAFVariance {
    * GenericUDAFVarianceEvaluator and overriding the terminate() method of the
    * evaluator.
    */
+  @VectorizedUDAFs({
+    VectorUDAFVarLong.class, VectorUDAFVarLongComplete.class,
+    VectorUDAFVarDouble.class, VectorUDAFVarDoubleComplete.class,
+    VectorUDAFVarDecimal.class, VectorUDAFVarDecimalComplete.class,
+    VectorUDAFVarTimestamp.class, VectorUDAFVarTimestampComplete.class,
+    VectorUDAFVarPartial2.class, VectorUDAFVarFinal.class})
   public static class GenericUDAFStdSampleEvaluator extends
       GenericUDAFVarianceEvaluator {
 
+
+    /*
+     * Calculate the std result when count > 1.  Public so vectorization code can
+     * use it, etc.
+     */
+    public static double calculateStdSampleResult(double variance, long count) {
+      return Math.sqrt(variance / (count - 1));
+    }
+
     @Override
     public Object terminate(AggregationBuffer agg) throws HiveException {
       StdAgg myagg = (StdAgg) agg;
@@ -84,7 +101,8 @@ public class GenericUDAFStdSample extends GenericUDAFVariance {
       if (myagg.count <= 1) { // SQL standard - return null for zero or one elements
         return null;
       } else {
-        getResult().set(Math.sqrt(myagg.variance / (myagg.count - 1)));
+        getResult().set(
+            calculateStdSampleResult(myagg.variance, myagg.count));
         return getResult();
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
index a041ffc..789f0fc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
@@ -24,6 +24,9 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.PTFPartition;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedUDAFs;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.*;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef;
@@ -203,6 +206,10 @@ public class GenericUDAFSum extends AbstractGenericUDAFResolver {
    * GenericUDAFSumHiveDecimal.
    *
    */
+  @VectorizedUDAFs({
+      VectorUDAFSumDecimal.class,
+      VectorUDAFSumDecimal64.class,
+      VectorUDAFSumDecimal64ToDecimal.class})
   public static class GenericUDAFSumHiveDecimal extends GenericUDAFSumEvaluator<HiveDecimalWritable> {
 
     @Override
@@ -297,6 +304,7 @@ public class GenericUDAFSum extends AbstractGenericUDAFResolver {
         if (isWindowingDistinct()) {
           throw new HiveException("Distinct windowing UDAF doesn't support merge and terminatePartial");
         } else {
+          // If partial is NULL, then there was an overflow and myagg.sum will be marked as not set.
           myagg.sum.mutateAdd(PrimitiveObjectInspectorUtils.getHiveDecimal(partial, inputOI));
         }
       }
@@ -368,6 +376,9 @@ public class GenericUDAFSum extends AbstractGenericUDAFResolver {
    * GenericUDAFSumDouble.
    *
    */
+  @VectorizedUDAFs({
+    VectorUDAFSumDouble.class,
+    VectorUDAFSumTimestamp.class})
   public static class GenericUDAFSumDouble extends GenericUDAFSumEvaluator<DoubleWritable> {
     @Override
     public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
@@ -496,6 +507,8 @@ public class GenericUDAFSum extends AbstractGenericUDAFResolver {
    * GenericUDAFSumLong.
    *
    */
+  @VectorizedUDAFs({
+    VectorUDAFSumLong.class})
   public static class GenericUDAFSumLong extends GenericUDAFSumEvaluator<LongWritable> {
     @Override
     public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
index dcd90eb..bae633d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
@@ -18,13 +18,20 @@
 package org.apache.hadoop.hive.ql.udf.generic;
 
 import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedUDAFs;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFStd.GenericUDAFStdEvaluator;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFStdSample.GenericUDAFStdSampleEvaluator;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFVarianceSample.GenericUDAFVarianceSampleEvaluator;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -52,6 +59,98 @@ public class GenericUDAFVariance extends AbstractGenericUDAFResolver {
 
   static final Logger LOG = LoggerFactory.getLogger(GenericUDAFVariance.class.getName());
 
+  public static enum VarianceKind {
+    NONE,
+    VARIANCE,
+    VARIANCE_SAMPLE,
+    STANDARD_DEVIATION,
+    STANDARD_DEVIATION_SAMPLE;
+
+    public static final Map<String,VarianceKind> nameMap = new HashMap<String,VarianceKind>();
+    static
+    {
+      nameMap.put("variance", VARIANCE);
+      nameMap.put("var_pop", VARIANCE);
+
+      nameMap.put("var_samp", VARIANCE_SAMPLE);
+
+      nameMap.put("std", STANDARD_DEVIATION);
+      nameMap.put("stddev", STANDARD_DEVIATION);
+      nameMap.put("stddev_pop", STANDARD_DEVIATION);
+
+      nameMap.put("stddev_samp", STANDARD_DEVIATION_SAMPLE);
+    }
+  };
+
+  public static boolean isVarianceFamilyName(String name) {
+    return (VarianceKind.nameMap.get(name) != null);
+  }
+
+  public static boolean isVarianceNull(long count, VarianceKind varianceKind) {
+    switch (varianceKind) {
+    case VARIANCE:
+    case STANDARD_DEVIATION:
+      return (count == 0);
+    case VARIANCE_SAMPLE:
+    case STANDARD_DEVIATION_SAMPLE:
+      return (count <= 1);
+    default:
+      throw new RuntimeException("Unexpected variance kind " + varianceKind);
+    }
+  }
+
+  /*
+   * Use when calculating intermediate variance and count > 1.
+   *
+   * NOTE: count has been incremented; sum included value.
+   */
+  public static double calculateIntermediate(
+      long count, double sum, double value, double variance) {
+    double t = count * value - sum;
+    variance += (t * t) / ((double) count * (count - 1));
+    return variance;
+  }
+
+  /*
+   * Use when merging variance and partialCount > 0 and mergeCount > 0.
+   *
+   * NOTE: mergeCount and mergeSum do not include partialCount and partialSum yet.
+   */
+  public static double calculateMerge(
+      long partialCount, long mergeCount, double partialSum, double mergeSum,
+      double partialVariance, double mergeVariance) {
+
+    final double doublePartialCount = (double) partialCount;
+    final double doubleMergeCount = (double) mergeCount;
+
+    double t = (doublePartialCount / doubleMergeCount) * mergeSum - partialSum;
+    mergeVariance +=
+        partialVariance + ((doubleMergeCount / doublePartialCount) /
+            (doubleMergeCount + doublePartialCount)) * t * t;
+    return mergeVariance;
+  }
+
+  /*
+   * Calculate the variance family {VARIANCE, VARIANCE_SAMPLE, STANDARD_DEVIATION, or
+   * STANDARD_DEVIATION_STAMPLE) result when count > 1.  Public so vectorization code can
+   * use it, etc.
+   */
+  public static double calculateVarianceFamilyResult(double variance, long count,
+      VarianceKind varianceKind) {
+    switch (varianceKind) {
+    case VARIANCE:
+      return GenericUDAFVarianceEvaluator.calculateVarianceResult(variance, count);
+    case VARIANCE_SAMPLE:
+      return GenericUDAFVarianceSampleEvaluator.calculateVarianceSampleResult(variance, count);
+    case STANDARD_DEVIATION:
+      return GenericUDAFStdEvaluator.calculateStdResult(variance, count);
+    case STANDARD_DEVIATION_SAMPLE:
+      return GenericUDAFStdSampleEvaluator.calculateStdSampleResult(variance, count);
+    default:
+      throw new RuntimeException("Unexpected variance kind " + varianceKind);
+    }
+  }
+
   @Override
   public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
     if (parameters.length != 1) {
@@ -103,6 +202,12 @@ public class GenericUDAFVariance extends AbstractGenericUDAFResolver {
    * Numer. Math, 58 (1991) pp. 583--590
    *
    */
+  @VectorizedUDAFs({
+    VectorUDAFVarLong.class, VectorUDAFVarLongComplete.class,
+    VectorUDAFVarDouble.class, VectorUDAFVarDoubleComplete.class,
+    VectorUDAFVarDecimal.class, VectorUDAFVarDecimalComplete.class,
+    VectorUDAFVarTimestamp.class, VectorUDAFVarTimestampComplete.class,
+    VectorUDAFVarPartial2.class, VectorUDAFVarFinal.class})
   public static class GenericUDAFVarianceEvaluator extends GenericUDAFEvaluator {
 
     // For PARTIAL1 and COMPLETE
@@ -210,8 +315,8 @@ public class GenericUDAFVariance extends AbstractGenericUDAFResolver {
           myagg.count++;
           myagg.sum += v;
           if(myagg.count > 1) {
-            double t = myagg.count*v - myagg.sum;
-            myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+            myagg.variance = calculateIntermediate(
+                myagg.count, myagg.sum, v, myagg.variance);
           }
         } catch (NumberFormatException e) {
           if (!warned) {
@@ -251,6 +356,7 @@ public class GenericUDAFVariance extends AbstractGenericUDAFResolver {
           myagg.variance = sumFieldOI.get(partialVariance);
           myagg.count = countFieldOI.get(partialCount);
           myagg.sum = sumFieldOI.get(partialSum);
+          return;
         }
 
         if (m != 0 && n != 0) {
@@ -259,14 +365,25 @@ public class GenericUDAFVariance extends AbstractGenericUDAFResolver {
           double a = myagg.sum;
           double b = sumFieldOI.get(partialSum);
 
+          myagg.variance =
+              calculateMerge(
+                  /* partialCount */ m, /* mergeCount */ n,
+                  /* partialSum */ b, /* mergeSum */ a,
+                  sumFieldOI.get(partialVariance), myagg.variance);
+
           myagg.count += m;
           myagg.sum += b;
-          double t = (m/(double)n)*a - b;
-          myagg.variance += sumFieldOI.get(partialVariance) + ((n/(double)m)/((double)n+m)) * t * t;
         }
       }
     }
 
+    /*
+     * Calculate the variance result when count > 1.  Public so vectorization code can use it, etc.
+     */
+    public static double calculateVarianceResult(double variance, long count) {
+      return variance / count;
+    }
+
     @Override
     public Object terminate(AggregationBuffer agg) throws HiveException {
       StdAgg myagg = (StdAgg) agg;
@@ -275,7 +392,8 @@ public class GenericUDAFVariance extends AbstractGenericUDAFResolver {
         return null;
       } else {
         if (myagg.count > 1) {
-          getResult().set(myagg.variance / (myagg.count));
+          getResult().set(
+              calculateVarianceResult(myagg.variance, myagg.count));
         } else { // for one element the variance is always 0
           getResult().set(0);
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java
index 8815086..6ef6300 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java
@@ -19,6 +19,8 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedUDAFs;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -74,9 +76,23 @@ public class GenericUDAFVarianceSample extends GenericUDAFVariance {
    * Compute the sample variance by extending GenericUDAFVarianceEvaluator and
    * overriding the terminate() method of the evaluator.
    */
+  @VectorizedUDAFs({
+    VectorUDAFVarLong.class, VectorUDAFVarLongComplete.class,
+    VectorUDAFVarDouble.class, VectorUDAFVarDoubleComplete.class,
+    VectorUDAFVarDecimal.class, VectorUDAFVarDecimalComplete.class,
+    VectorUDAFVarTimestamp.class, VectorUDAFVarTimestampComplete.class,
+    VectorUDAFVarPartial2.class, VectorUDAFVarFinal.class})
   public static class GenericUDAFVarianceSampleEvaluator extends
       GenericUDAFVarianceEvaluator {
 
+    /*
+     * Calculate the variance sample result when count > 1.  Public so vectorization code can
+     * use it, etc.
+     */
+    public static double calculateVarianceSampleResult(double variance, long count) {
+      return variance / (count - 1);
+    }
+
     @Override
     public Object terminate(AggregationBuffer agg) throws HiveException {
       StdAgg myagg = (StdAgg) agg;
@@ -84,7 +100,8 @@ public class GenericUDAFVarianceSample extends GenericUDAFVariance {
       if (myagg.count <= 1) {
         return null;
       } else {
-        getResult().set(myagg.variance / (myagg.count - 1));
+        getResult().set(
+            calculateVarianceSampleResult(myagg.variance, myagg.count));
         return getResult();
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java
index b393843..4567446 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressionsSupportDecimal64;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColEqualLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColEqualLongScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongScalarEqualLongColumn;
@@ -58,6 +59,9 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
   FilterDecimalColEqualDecimalColumn.class, FilterDecimalColEqualDecimalScalar.class,
   FilterDecimalScalarEqualDecimalColumn.class,
 
+  FilterDecimal64ColEqualDecimal64Column.class, FilterDecimal64ColEqualDecimal64Scalar.class,
+  FilterDecimal64ScalarEqualDecimal64Column.class,
+
   TimestampColEqualTimestampColumn.class,
   TimestampColEqualTimestampScalar.class, TimestampScalarEqualTimestampColumn.class,
   TimestampColEqualLongColumn.class,
@@ -90,6 +94,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
   DateColEqualDateScalar.class,FilterDateColEqualDateScalar.class,
   DateScalarEqualDateColumn.class,FilterDateScalarEqualDateColumn.class,
   })
+@VectorizedExpressionsSupportDecimal64()
 @NDV(maxNdv = 2)
 public class GenericUDFOPEqual extends GenericUDFBaseCompare {
   public GenericUDFOPEqual(){

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java
index 50c9d09..783471d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressionsSupportDecimal64;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColGreaterEqualLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColGreaterEqualLongScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongScalarGreaterEqualLongColumn;
@@ -59,6 +60,9 @@ import org.apache.hadoop.io.Text;
   FilterDecimalColGreaterEqualDecimalColumn.class, FilterDecimalColGreaterEqualDecimalScalar.class,
   FilterDecimalScalarGreaterEqualDecimalColumn.class,
 
+  FilterDecimal64ColGreaterEqualDecimal64Column.class, FilterDecimal64ColGreaterEqualDecimal64Scalar.class,
+  FilterDecimal64ScalarGreaterEqualDecimal64Column.class,
+
   TimestampColGreaterEqualTimestampColumn.class,
   TimestampColGreaterEqualTimestampScalar.class, TimestampScalarGreaterEqualTimestampColumn.class,
   TimestampColGreaterEqualLongColumn.class,
@@ -91,6 +95,7 @@ import org.apache.hadoop.io.Text;
   DateColGreaterEqualDateScalar.class,FilterDateColGreaterEqualDateScalar.class,
   DateScalarGreaterEqualDateColumn.class,FilterDateScalarGreaterEqualDateColumn.class,
   })
+@VectorizedExpressionsSupportDecimal64()
 @NDV(maxNdv = 2)
 public class GenericUDFOPEqualOrGreaterThan extends GenericUDFBaseCompare {
   public GenericUDFOPEqualOrGreaterThan(){

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java
index c28d797..1d9de0e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressionsSupportDecimal64;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColLessEqualLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColLessEqualLongScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongScalarLessEqualLongColumn;
@@ -55,9 +56,13 @@ import org.apache.hadoop.io.Text;
   FilterDoubleColLessEqualLongScalar.class, FilterDoubleColLessEqualDoubleScalar.class,
   FilterLongScalarLessEqualLongColumn.class, FilterLongScalarLessEqualDoubleColumn.class,
   FilterDoubleScalarLessEqualLongColumn.class, FilterDoubleScalarLessEqualDoubleColumn.class,
+
   FilterDecimalColLessEqualDecimalColumn.class, FilterDecimalColLessEqualDecimalScalar.class,
   FilterDecimalScalarLessEqualDecimalColumn.class,
 
+  FilterDecimal64ColLessEqualDecimal64Column.class, FilterDecimal64ColLessEqualDecimal64Scalar.class,
+  FilterDecimal64ScalarLessEqualDecimal64Column.class,
+
   TimestampColLessEqualTimestampColumn.class,
   TimestampColLessEqualTimestampScalar.class, TimestampScalarLessEqualTimestampColumn.class,
   TimestampColLessEqualLongColumn.class,
@@ -90,6 +95,7 @@ import org.apache.hadoop.io.Text;
   DateColLessEqualDateScalar.class,FilterDateColLessEqualDateScalar.class,
   DateScalarLessEqualDateColumn.class,FilterDateScalarLessEqualDateColumn.class,
   })
+@VectorizedExpressionsSupportDecimal64()
 @NDV(maxNdv = 2)
 public class GenericUDFOPEqualOrLessThan extends GenericUDFBaseCompare {
   public GenericUDFOPEqualOrLessThan(){

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java
index 72fe43d..1db94f0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressionsSupportDecimal64;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColGreaterLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColGreaterLongScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongScalarGreaterLongColumn;
@@ -55,9 +56,13 @@ import org.apache.hadoop.io.Text;
   FilterDoubleColGreaterLongScalar.class, FilterDoubleColGreaterDoubleScalar.class,
   FilterLongScalarGreaterLongColumn.class, FilterLongScalarGreaterDoubleColumn.class,
   FilterDoubleScalarGreaterLongColumn.class, FilterDoubleScalarGreaterDoubleColumn.class,
+
   FilterDecimalColGreaterDecimalColumn.class, FilterDecimalColGreaterDecimalScalar.class,
   FilterDecimalScalarGreaterDecimalColumn.class,
 
+  FilterDecimal64ColGreaterDecimal64Column.class, FilterDecimal64ColGreaterDecimal64Scalar.class,
+  FilterDecimal64ScalarGreaterDecimal64Column.class,
+
   TimestampColGreaterTimestampColumn.class,
   TimestampColGreaterTimestampScalar.class, TimestampScalarGreaterTimestampColumn.class,
   TimestampColGreaterLongColumn.class,
@@ -90,6 +95,7 @@ import org.apache.hadoop.io.Text;
   DateColGreaterDateScalar.class,FilterDateColGreaterDateScalar.class,
   DateScalarGreaterDateColumn.class,FilterDateScalarGreaterDateColumn.class,
   })
+@VectorizedExpressionsSupportDecimal64()
 @NDV(maxNdv = 2)
 public class GenericUDFOPGreaterThan extends GenericUDFBaseCompare {
   public GenericUDFOPGreaterThan(){

http://git-wip-us.apache.org/repos/asf/hive/blob/e63ebccc/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java
index 114d190..8a9c2d2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressionsSupportDecimal64;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColLessLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColLessLongScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongScalarLessLongColumn;
@@ -58,6 +59,9 @@ import org.apache.hadoop.io.Text;
     FilterDecimalColLessDecimalColumn.class, FilterDecimalColLessDecimalScalar.class,
     FilterDecimalScalarLessDecimalColumn.class,
 
+    FilterDecimal64ColLessDecimal64Column.class, FilterDecimal64ColLessDecimal64Scalar.class,
+    FilterDecimal64ScalarLessDecimal64Column.class,
+
     TimestampColLessTimestampColumn.class,
     TimestampColLessTimestampScalar.class, TimestampScalarLessTimestampColumn.class,
     TimestampColLessLongColumn.class,
@@ -90,6 +94,7 @@ import org.apache.hadoop.io.Text;
     DateColLessDateScalar.class,FilterDateColLessDateScalar.class,
     DateScalarLessDateColumn.class,FilterDateScalarLessDateColumn.class,
     })
+@VectorizedExpressionsSupportDecimal64()
 @NDV(maxNdv = 2)
 public class GenericUDFOPLessThan extends GenericUDFBaseCompare {
   public GenericUDFOPLessThan(){


Mime
View raw message