hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mmccl...@apache.org
Subject [32/34] hive git commit: HIVE-16589: Vectorization: Support Complex Types and GroupBy modes PARTIAL2, FINAL, and COMPLETE for AVG, VARIANCE (Matt McCline, reviewed by Jason Dere)
Date Thu, 22 Jun 2017 23:41:08 GMT
http://git-wip-us.apache.org/repos/asf/hive/blob/92fbe256/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index ac62dcc..0aadee3 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -623,6 +623,7 @@ minillaplocal.query.files=acid_globallimit.q,\
   union_remove_26.q,\
   union_top_level.q,\
   vector_auto_smb_mapjoin_14.q,\
+  vector_complex_all.q,\
   vector_decimal_2.q,\
   vector_decimal_udf.q,\
   vector_groupby_cube1.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/92fbe256/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt
index 46cbb5b..a463373 100644
--- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -38,6 +39,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 
+import com.google.common.base.Preconditions;
+
 /**
  * Generated from template VectorUDAFAvg.txt.
  */
@@ -46,7 +49,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
 public class <ClassName> extends VectorAggregateExpression {
 
     private static final long serialVersionUID = 1L;
-    
+
     /** class for storing the current aggregate value. */
     static class Aggregation implements AggregationBuffer {
 
@@ -59,10 +62,10 @@ public class <ClassName> extends VectorAggregateExpression {
       * Value is explicitly (re)initialized in reset()
       */
       transient private boolean isNull = true;
-      
-      public void sumValue(<ValueType> value) {
+
+      public void avgValue(<ValueType> value) {
         if (isNull) {
-          sum = value; 
+          sum = value;
           count = 1;
           isNull = false;
         } else {
@@ -75,7 +78,7 @@ public class <ClassName> extends VectorAggregateExpression {
       public int getVariableSize() {
         throw new UnsupportedOperationException();
       }
-      
+
       @Override
       public void reset () {
         isNull = true;
@@ -83,44 +86,65 @@ public class <ClassName> extends VectorAggregateExpression {
         count = 0L;
       }
     }
-    
-    private VectorExpression inputExpression;
-
-    @Override
-    public VectorExpression inputExpression() {
-      return inputExpression;
-    }
 
+#IF PARTIAL1
     transient private Object[] partialResult;
     transient private LongWritable resultCount;
     transient private DoubleWritable resultSum;
+    transient private <CamelCaseValueType>Writable resultInput;
     transient private StructObjectInspector soi;
-        
-    public <ClassName>(VectorExpression inputExpression) {
-      this();
-      this.inputExpression = inputExpression;
+#ENDIF PARTIAL1
+#IF COMPLETE
+    transient private DoubleWritable fullResult;
+    transient private ObjectInspector oi;
+#ENDIF COMPLETE
+
+    public <ClassName>(VectorExpression inputExpression, GenericUDAFEvaluator.Mode mode) {
+      super(inputExpression, mode);
+#IF PARTIAL1
+      Preconditions.checkState(this.mode == GenericUDAFEvaluator.Mode.PARTIAL1);
+#ENDIF PARTIAL1
+#IF COMPLETE
+      Preconditions.checkState(this.mode == GenericUDAFEvaluator.Mode.COMPLETE);
+#ENDIF COMPLETE
     }
 
-    public <ClassName>() {
-      super();
-      partialResult = new Object[2];
+    private void init() {
+#IF PARTIAL1
+      partialResult = new Object[3];
       resultCount = new LongWritable();
       resultSum = new DoubleWritable();
+      resultInput = new <CamelCaseValueType>Writable();
       partialResult[0] = resultCount;
       partialResult[1] = resultSum;
+      partialResult[2] = resultInput;
       initPartialResultInspector();
+#ENDIF PARTIAL1
+#IF COMPLETE
+      fullResult = new DoubleWritable();
+      initFullResultInspector();
+#ENDIF COMPLETE
     }
 
+#IF PARTIAL1
     private void initPartialResultInspector() {
         List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
         foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
         foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writable<CamelCaseValueType>ObjectInspector);
         List<String> fname = new ArrayList<String>();
         fname.add("count");
         fname.add("sum");
+        fname.add("input");
         soi = ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
     }
-    
+#ENDIF PARTIAL1
+#IF COMPLETE
+    private void initFullResultInspector() {
+      oi = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+    }
+#ENDIF COMPLETE
+
     private Aggregation getCurrentAggregationBuffer(
         VectorAggregationBufferRow[] aggregationBufferSets,
         int bufferIndex,
@@ -129,21 +153,21 @@ public class <ClassName> extends VectorAggregateExpression {
       Aggregation myagg = (Aggregation) mySet.getAggregationBuffer(bufferIndex);
       return myagg;
     }
-    
+
     @Override
     public void aggregateInputSelection(
       VectorAggregationBufferRow[] aggregationBufferSets,
-      int bufferIndex, 
+      int bufferIndex,
       VectorizedRowBatch batch) throws HiveException {
-      
+
       int batchSize = batch.size;
-      
+
       if (batchSize == 0) {
         return;
       }
-      
+
       inputExpression.evaluate(batch);
-      
+
        <InputColumnVectorType> inputVector = ( <InputColumnVectorType>)batch.
         cols[this.inputExpression.getOutputColumn()];
       <ValueType>[] vector = inputVector.vector;
@@ -197,12 +221,12 @@ public class <ClassName> extends VectorAggregateExpression {
 
       for (int i=0; i < batchSize; ++i) {
         Aggregation myagg = getCurrentAggregationBuffer(
-          aggregationBufferSets, 
+          aggregationBufferSets,
           bufferIndex,
           i);
-        myagg.sumValue(value);
+        myagg.avgValue(value);
       }
-    } 
+    }
 
     private void iterateNoNullsSelectionWithAggregationSelection(
       VectorAggregationBufferRow[] aggregationBufferSets,
@@ -210,13 +234,13 @@ public class <ClassName> extends VectorAggregateExpression {
       <ValueType>[] values,
       int[] selection,
       int batchSize) {
-      
+
       for (int i=0; i < batchSize; ++i) {
         Aggregation myagg = getCurrentAggregationBuffer(
-          aggregationBufferSets, 
+          aggregationBufferSets,
           bufferIndex,
           i);
-        myagg.sumValue(values[selection[i]]);
+        myagg.avgValue(values[selection[i]]);
       }
     }
 
@@ -227,10 +251,10 @@ public class <ClassName> extends VectorAggregateExpression {
       int batchSize) {
       for (int i=0; i < batchSize; ++i) {
         Aggregation myagg = getCurrentAggregationBuffer(
-          aggregationBufferSets, 
+          aggregationBufferSets,
           bufferIndex,
           i);
-        myagg.sumValue(values[i]);
+        myagg.avgValue(values[i]);
       }
     }
 
@@ -245,15 +269,15 @@ public class <ClassName> extends VectorAggregateExpression {
       if (isNull[0]) {
         return;
       }
-      
+
       for (int i=0; i < batchSize; ++i) {
         Aggregation myagg = getCurrentAggregationBuffer(
           aggregationBufferSets,
           bufferIndex,
           i);
-        myagg.sumValue(value);
+        myagg.avgValue(value);
       }
-      
+
     }
 
     private void iterateHasNullsRepeatingWithAggregationSelection(
@@ -272,7 +296,7 @@ public class <ClassName> extends VectorAggregateExpression {
           aggregationBufferSets,
           bufferIndex,
           i);
-        myagg.sumValue(value);
+        myagg.avgValue(value);
       }
     }
 
@@ -288,10 +312,10 @@ public class <ClassName> extends VectorAggregateExpression {
         int i = selection[j];
         if (!isNull[i]) {
           Aggregation myagg = getCurrentAggregationBuffer(
-            aggregationBufferSets, 
+            aggregationBufferSets,
             bufferIndex,
             j);
-          myagg.sumValue(values[i]);
+          myagg.avgValue(values[i]);
         }
       }
    }
@@ -306,68 +330,64 @@ public class <ClassName> extends VectorAggregateExpression {
       for (int i=0; i < batchSize; ++i) {
         if (!isNull[i]) {
           Aggregation myagg = getCurrentAggregationBuffer(
-            aggregationBufferSets, 
+            aggregationBufferSets,
             bufferIndex,
             i);
-          myagg.sumValue(values[i]);
+          myagg.avgValue(values[i]);
         }
       }
    }
 
-    
     @Override
-    public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch) 
+    public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch)
         throws HiveException {
-        
-        inputExpression.evaluate(batch);
-        
-        <InputColumnVectorType> inputVector = 
-            (<InputColumnVectorType>)batch.cols[this.inputExpression.getOutputColumn()];
-        
-        int batchSize = batch.size;
-        
-        if (batchSize == 0) {
-          return;
-        }
-        
-        Aggregation myagg = (Aggregation)agg;
-  
-        <ValueType>[] vector = inputVector.vector;
-        
-        if (inputVector.isRepeating) {
-          if (inputVector.noNulls) {
-            if (myagg.isNull) {
-              myagg.isNull = false;
-              myagg.sum = 0;
-              myagg.count = 0;
-            }
-            myagg.sum += vector[0]*batchSize;
-            myagg.count += batchSize;
+
+      inputExpression.evaluate(batch);
+
+      <InputColumnVectorType> inputVector =
+          (<InputColumnVectorType>)batch.cols[this.inputExpression.getOutputColumn()];
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      Aggregation myagg = (Aggregation)agg;
+
+      <ValueType>[] vector = inputVector.vector;
+
+      if (inputVector.isRepeating) {
+        if (inputVector.noNulls) {
+          if (myagg.isNull) {
+            myagg.isNull = false;
+            myagg.sum = 0;
+            myagg.count = 0;
           }
-          return;
-        }
-        
-        if (!batch.selectedInUse && inputVector.noNulls) {
-          iterateNoSelectionNoNulls(myagg, vector, batchSize);
-        }
-        else if (!batch.selectedInUse) {
-          iterateNoSelectionHasNulls(myagg, vector, batchSize, inputVector.isNull);
-        }
-        else if (inputVector.noNulls){
-          iterateSelectionNoNulls(myagg, vector, batchSize, batch.selected);
-        }
-        else {
-          iterateSelectionHasNulls(myagg, vector, batchSize, inputVector.isNull, batch.selected);
+          myagg.sum += vector[0]*batchSize;
+          myagg.count += batchSize;
         }
+        return;
+      }
+
+      if (!batch.selectedInUse && inputVector.noNulls) {
+        iterateNoSelectionNoNulls(myagg, vector, batchSize);
+      } else if (!batch.selectedInUse) {
+        iterateNoSelectionHasNulls(myagg, vector, batchSize, inputVector.isNull);
+      } else if (inputVector.noNulls){
+        iterateSelectionNoNulls(myagg, vector, batchSize, batch.selected);
+      } else {
+        iterateSelectionHasNulls(myagg, vector, batchSize, inputVector.isNull, batch.selected);
+      }
     }
-  
+
     private void iterateSelectionHasNulls(
-        Aggregation myagg, 
-        <ValueType>[] vector, 
+        Aggregation myagg,
+        <ValueType>[] vector,
         int batchSize,
-        boolean[] isNull, 
+        boolean[] isNull,
         int[] selected) {
-      
+
       for (int j=0; j< batchSize; ++j) {
         int i = selected[j];
         if (!isNull[i]) {
@@ -384,17 +404,17 @@ public class <ClassName> extends VectorAggregateExpression {
     }
 
     private void iterateSelectionNoNulls(
-        Aggregation myagg, 
-        <ValueType>[] vector, 
-        int batchSize, 
+        Aggregation myagg,
+        <ValueType>[] vector,
+        int batchSize,
         int[] selected) {
-      
+
       if (myagg.isNull) {
         myagg.isNull = false;
         myagg.sum = 0;
         myagg.count = 0;
       }
-      
+
       for (int i=0; i< batchSize; ++i) {
         <ValueType> value = vector[selected[i]];
         myagg.sum += value;
@@ -403,15 +423,15 @@ public class <ClassName> extends VectorAggregateExpression {
     }
 
     private void iterateNoSelectionHasNulls(
-        Aggregation myagg, 
-        <ValueType>[] vector, 
+        Aggregation myagg,
+        <ValueType>[] vector,
         int batchSize,
         boolean[] isNull) {
-      
+
       for(int i=0;i<batchSize;++i) {
         if (!isNull[i]) {
           <ValueType> value = vector[i];
-          if (myagg.isNull) { 
+          if (myagg.isNull) {
             myagg.isNull = false;
             myagg.sum = 0;
             myagg.count = 0;
@@ -423,15 +443,15 @@ public class <ClassName> extends VectorAggregateExpression {
     }
 
     private void iterateNoSelectionNoNulls(
-        Aggregation myagg, 
-        <ValueType>[] vector, 
+        Aggregation myagg,
+        <ValueType>[] vector,
         int batchSize) {
       if (myagg.isNull) {
         myagg.isNull = false;
         myagg.sum = 0;
         myagg.count = 0;
       }
-      
+
       for (int i=0;i<batchSize;++i) {
         <ValueType> value = vector[i];
         myagg.sum += value;
@@ -456,19 +476,29 @@ public class <ClassName> extends VectorAggregateExpression {
       Aggregation myagg = (Aggregation) agg;
       if (myagg.isNull) {
         return null;
-      }
-      else {
-        assert(0 < myagg.count);
+      } else {
+        Preconditions.checkState(myagg.count > 0);
+#IF PARTIAL1
         resultCount.set (myagg.count);
         resultSum.set (myagg.sum);
         return partialResult;
+#ENDIF PARTIAL1
+#IF COMPLETE
+        fullResult.set (myagg.sum / myagg.count);
+        return fullResult;
+#ENDIF COMPLETE
       }
     }
-    
+
   @Override
-    public ObjectInspector getOutputObjectInspector() {
+  public ObjectInspector getOutputObjectInspector() {
+#IF PARTIAL1
     return soi;
-  }     
+#ENDIF PARTIAL1
+#IF COMPLETE
+    return oi;
+#ENDIF COMPLETE
+  }
 
   @Override
   public long getAggregationBufferFixedSize() {
@@ -481,15 +511,6 @@ public class <ClassName> extends VectorAggregateExpression {
 
   @Override
   public void init(AggregationDesc desc) throws HiveException {
-    // No-op
+    init();
   }
-  
-  public VectorExpression getInputExpression() {
-    return inputExpression;
-  }
-
-  public void setInputExpression(VectorExpression inputExpression) {
-    this.inputExpression = inputExpression;
-  }
-}
-
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/92fbe256/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimal.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimal.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimal.txt
new file mode 100644
index 0000000..fa7b7c7
--- /dev/null
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimal.txt
@@ -0,0 +1,566 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage.GenericUDAFAverageEvaluatorDecimal;
+import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+
+import com.google.common.base.Preconditions;
+
+/**
+ * Generated from template VectorUDAFAvg.txt.
+ */
+@Description(name = "avg",
+    value = "_FUNC_(AVG) - Returns the average value of expr (vectorized, type: decimal)")
+public class <ClassName> extends VectorAggregateExpression {
+
+    private static final long serialVersionUID = 1L;
+
+    /** class for storing the current aggregate value. */
+    static class Aggregation implements AggregationBuffer {
+
+      private static final long serialVersionUID = 1L;
+
+      transient private final HiveDecimalWritable sum = new HiveDecimalWritable();
+      transient private long count;
+      transient private boolean isNull;
+
+      public void avgValue(HiveDecimalWritable writable) {
+        if (isNull) {
+          // Make a copy since we intend to mutate sum.
+          sum.set(writable);
+          count = 1;
+          isNull = false;
+        } else {
+          // Note that if sum is out of range, mutateAdd will ignore the call.
+          // At the end, sum.isSet() can be checked for null.
+          sum.mutateAdd(writable);
+          count++;
+        }
+      }
+
+      public void avgValueNoNullCheck(HiveDecimalWritable writable) {
+        sum.mutateAdd(writable);
+        count++;
+      }
+
+      @Override
+      public int getVariableSize() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public void reset() {
+        isNull = true;
+        sum.setFromLong(0L);
+        count = 0;
+      }
+    }
+
+#IF PARTIAL1
+    transient private Object[] partialResult;
+    transient private LongWritable resultCount;
+    transient private HiveDecimalWritable resultSum;
+    transient private HiveDecimalWritable resultInput;
+    transient private StructObjectInspector soi;
+#ENDIF PARTIAL1
+#IF COMPLETE
+    transient private HiveDecimalWritable tempDecWritable;
+    transient private HiveDecimalWritable fullResult;
+    transient private ObjectInspector oi;
+#ENDIF COMPLETE
+
+    /**
+     * The scale of the SUM in the partial output
+     */
+    private int sumScale;
+
+    /**
+     * The precision of the SUM in the partial output
+     */
+    private int sumPrecision;
+
+    /**
+     * the scale of the input expression
+     */
+    private int inputScale;
+
+    /**
+     * the precision of the input expression
+     */
+    private int inputPrecision;
+
+    public <ClassName>(VectorExpression inputExpression,
+        GenericUDAFEvaluator.Mode mode) {
+      super(inputExpression, mode);
+#IF PARTIAL1
+      Preconditions.checkState(this.mode == GenericUDAFEvaluator.Mode.PARTIAL1);
+#ENDIF PARTIAL1
+#IF COMPLETE
+      Preconditions.checkState(this.mode == GenericUDAFEvaluator.Mode.COMPLETE);
+#ENDIF COMPLETE
+    }
+
+    private void init() {
+#IF PARTIAL1
+      partialResult = new Object[3];
+      resultCount = new LongWritable();
+      resultSum = new HiveDecimalWritable();
+      resultInput = new HiveDecimalWritable(0L);
+      partialResult[0] = resultCount;
+      partialResult[1] = resultSum;
+      partialResult[2] = resultInput;
+#ENDIF PARTIAL1
+#IF COMPLETE
+      tempDecWritable = new HiveDecimalWritable();
+      fullResult = new HiveDecimalWritable();
+#ENDIF COMPLETE
+    }
+
+#IF PARTIAL1
+    private void initPartialResultInspector() {
+#ENDIF PARTIAL1
+#IF COMPLETE
+    private void initFullResultInspector() {
+#ENDIF COMPLETE
+      // the output type of the vectorized partial aggregate must match the
+      // expected type for the row-mode aggregation
+      // For decimal, the type is "same number of integer digits and 4 more decimal digits"
+
+      DecimalTypeInfo decTypeInfo =
+          GenericUDAFAverageEvaluatorDecimal.deriveResultDecimalTypeInfo(
+              inputPrecision, inputScale, mode);
+      this.sumScale = decTypeInfo.scale();
+      this.sumPrecision = decTypeInfo.precision();
+
+#IF PARTIAL1
+      List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+      foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+      foi.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(decTypeInfo));
+      foi.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(decTypeInfo));
+      List<String> fname = new ArrayList<String>();
+      fname.add("count");
+      fname.add("sum");
+      fname.add("input");
+      soi = ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
+#ENDIF PARTIAL1
+#IF COMPLETE
+      oi = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(decTypeInfo);
+#ENDIF COMPLETE
+    }
+
+    private Aggregation getCurrentAggregationBuffer(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int bufferIndex,
+        int row) {
+      VectorAggregationBufferRow mySet = aggregationBufferSets[row];
+      Aggregation myagg = (Aggregation) mySet.getAggregationBuffer(bufferIndex);
+      return myagg;
+    }
+
+    @Override
+    public void aggregateInputSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      VectorizedRowBatch batch) throws HiveException {
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      inputExpression.evaluate(batch);
+
+       DecimalColumnVector inputVector = (DecimalColumnVector) batch.
+        cols[this.inputExpression.getOutputColumn()];
+      HiveDecimalWritable[] vector = inputVector.vector;
+
+      if (inputVector.noNulls) {
+        if (inputVector.isRepeating) {
+          iterateNoNullsRepeatingWithAggregationSelection(
+            aggregationBufferSets, bufferIndex,
+            vector[0], batchSize);
+        } else {
+          if (batch.selectedInUse) {
+            iterateNoNullsSelectionWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              vector, batch.selected, batchSize);
+          } else {
+            iterateNoNullsWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              vector, batchSize);
+          }
+        }
+      } else {
+        if (inputVector.isRepeating) {
+          if (batch.selectedInUse) {
+            iterateHasNullsRepeatingSelectionWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              vector[0], batchSize, batch.selected, inputVector.isNull);
+          } else {
+            iterateHasNullsRepeatingWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              vector[0], batchSize, inputVector.isNull);
+          }
+        } else {
+          if (batch.selectedInUse) {
+            iterateHasNullsSelectionWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              vector, batchSize, batch.selected, inputVector.isNull);
+          } else {
+            iterateHasNullsWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              vector, batchSize, inputVector.isNull);
+          }
+        }
+      }
+    }
+
+    private void iterateNoNullsRepeatingWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      HiveDecimalWritable value,
+      int batchSize) {
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.avgValue(value);
+      }
+    }
+
+    private void iterateNoNullsSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      HiveDecimalWritable[] values,
+      int[] selection,
+      int batchSize) {
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.avgValue(values[selection[i]]);
+      }
+    }
+
+    private void iterateNoNullsWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      HiveDecimalWritable[] values,
+      int batchSize) {
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.avgValue(values[i]);
+      }
+    }
+
+    private void iterateHasNullsRepeatingSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      HiveDecimalWritable value,
+      int batchSize,
+      int[] selection,
+      boolean[] isNull) {
+
+      if (isNull[0]) {
+        return;
+      }
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.avgValue(value);
+      }
+
+    }
+
+    private void iterateHasNullsRepeatingWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      HiveDecimalWritable value,
+      int batchSize,
+      boolean[] isNull) {
+
+      if (isNull[0]) {
+        return;
+      }
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.avgValue(value);
+      }
+    }
+
+    private void iterateHasNullsSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      HiveDecimalWritable[] values,
+      int batchSize,
+      int[] selection,
+      boolean[] isNull) {
+
+      for (int j=0; j < batchSize; ++j) {
+        int i = selection[j];
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            bufferIndex,
+            j);
+          myagg.avgValue(values[i]);
+        }
+      }
+   }
+
+    private void iterateHasNullsWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      HiveDecimalWritable[] values,
+      int batchSize,
+      boolean[] isNull) {
+
+      for (int i=0; i < batchSize; ++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            bufferIndex,
+            i);
+          myagg.avgValue(values[i]);
+        }
+      }
+   }
+
+
+    @Override
+    public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch)
+        throws HiveException {
+
+        inputExpression.evaluate(batch);
+
+        DecimalColumnVector inputVector =
+            (DecimalColumnVector)batch.cols[this.inputExpression.getOutputColumn()];
+
+        int batchSize = batch.size;
+
+        if (batchSize == 0) {
+          return;
+        }
+
+        Aggregation myagg = (Aggregation)agg;
+
+        HiveDecimalWritable[] vector = inputVector.vector;
+
+        if (inputVector.isRepeating) {
+          if (inputVector.noNulls) {
+            if (myagg.isNull) {
+              myagg.isNull = false;
+              myagg.sum.setFromLong(0L);
+              myagg.count = 0;
+            }
+            HiveDecimal value = vector[0].getHiveDecimal();
+            HiveDecimal multiple = value.multiply(HiveDecimal.create(batchSize));
+            myagg.sum.mutateAdd(multiple);
+            myagg.count += batchSize;
+          }
+          return;
+        }
+
+        if (!batch.selectedInUse && inputVector.noNulls) {
+          iterateNoSelectionNoNulls(myagg, vector, batchSize);
+        }
+        else if (!batch.selectedInUse) {
+          iterateNoSelectionHasNulls(myagg, vector, batchSize, inputVector.isNull);
+        }
+        else if (inputVector.noNulls){
+          iterateSelectionNoNulls(myagg, vector, batchSize, batch.selected);
+        }
+        else {
+          iterateSelectionHasNulls(myagg, vector, batchSize, inputVector.isNull, batch.selected);
+        }
+    }
+
+    private void iterateSelectionHasNulls(
+        Aggregation myagg,
+        HiveDecimalWritable[] vector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        int i = selected[j];
+        if (!isNull[i]) {
+          myagg.avgValue(vector[i]);
+        }
+      }
+    }
+
+    private void iterateSelectionNoNulls(
+        Aggregation myagg,
+        HiveDecimalWritable[] vector,
+        int batchSize,
+        int[] selected) {
+
+      if (myagg.isNull) {
+        myagg.isNull = false;
+        myagg.sum.setFromLong(0L);
+        myagg.count = 0;
+      }
+
+      for (int i=0; i< batchSize; ++i) {
+        myagg.avgValueNoNullCheck(vector[selected[i]]);
+      }
+    }
+
+    private void iterateNoSelectionHasNulls(
+        Aggregation myagg,
+        HiveDecimalWritable[] vector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          myagg.avgValue(vector[i]);
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNulls(
+        Aggregation myagg,
+        HiveDecimalWritable[] vector,
+        int batchSize) {
+      if (myagg.isNull) {
+        myagg.isNull = false;
+        myagg.sum.setFromLong(0L);
+        myagg.count = 0;
+      }
+
+      for (int i=0;i<batchSize;++i) {
+        myagg.avgValueNoNullCheck(vector[i]);
+      }
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new Aggregation();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      Aggregation myAgg = (Aggregation) agg;
+      myAgg.reset();
+    }
+
+    @Override
+    public Object evaluateOutput(
+        AggregationBuffer agg) throws HiveException {
+      Aggregation myagg = (Aggregation) agg;
+      // !isSet checks for overflow.
+      if (myagg.isNull || !myagg.sum.isSet()) {
+        return null;
+      }
+      else {
+        Preconditions.checkState(myagg.count > 0);
+#IF PARTIAL1
+        resultCount.set (myagg.count);
+        resultSum.set(myagg.sum);
+        return partialResult;
+#ENDIF PARTIAL1
+#IF COMPLETE
+        tempDecWritable.setFromLong (myagg.count);
+        fullResult.set(myagg.sum);
+        fullResult.mutateDivide(tempDecWritable);
+        fullResult.mutateEnforcePrecisionScale(sumPrecision, sumScale);
+        return fullResult;
+#ENDIF COMPLETE
+      }
+    }
+
+  @Override
+    public ObjectInspector getOutputObjectInspector() {
+#IF PARTIAL1
+    return soi;
+#ENDIF PARTIAL1
+#IF COMPLETE
+    return oi;
+#ENDIF COMPLETE
+  }
+
+  @Override
+  public long getAggregationBufferFixedSize() {
+    JavaDataModel model = JavaDataModel.get();
+    return JavaDataModel.alignUp(
+      model.object() +
+      model.primitive2() * 2,
+      model.memoryAlign());
+  }
+
+  @Override
+  public void init(AggregationDesc desc) throws HiveException {
+    init();
+
+    ExprNodeDesc inputExpr = desc.getParameters().get(0);
+    DecimalTypeInfo tiInput = (DecimalTypeInfo) inputExpr.getTypeInfo();
+    this.inputScale = tiInput.scale();
+    this.inputPrecision = tiInput.precision();
+
+#IF PARTIAL1
+    initPartialResultInspector();
+#ENDIF PARTIAL1
+#IF COMPLETE
+    initFullResultInspector();
+#ENDIF COMPLETE
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/92fbe256/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimalMerge.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimalMerge.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimalMerge.txt
new file mode 100644
index 0000000..071efc9
--- /dev/null
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgDecimalMerge.txt
@@ -0,0 +1,597 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage.GenericUDAFAverageEvaluatorDecimal;
+import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+
+import com.google.common.base.Preconditions;
+
+/**
+ * Generated from template VectorUDAFAvg.txt.
+ */
+@Description(name = "avg",
+    value = "_FUNC_(AVG) - Returns the average value of expr (vectorized, type: decimal)")
+public class <ClassName> extends VectorAggregateExpression {
+
+    private static final long serialVersionUID = 1L;
+
+    /** class for storing the current aggregate value. */
+    static class Aggregation implements AggregationBuffer {
+
+      private static final long serialVersionUID = 1L;
+
+      transient private final HiveDecimalWritable mergeSum = new HiveDecimalWritable();
+      transient private long mergeCount;
+      transient private boolean isNull;
+
+      public void merge(long count, HiveDecimalWritable sum) {
+        if (isNull) {
+          // Make a copy since we intend to mutate sum.
+          mergeCount = count;
+          mergeSum.set(sum);
+          isNull = false;
+        } else {
+          // Note that if sum is out of range, mutateAdd will ignore the call.
+          // At the end, sum.isSet() can be checked for null.
+          mergeCount += count;
+          mergeSum.mutateAdd(sum);
+        }
+      }
+
+      public void mergeNoNullCheck(long count, HiveDecimalWritable sum) {
+        mergeCount += count;
+        mergeSum.mutateAdd(sum);
+      }
+
+      @Override
+      public int getVariableSize() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public void reset() {
+        isNull = true;
+        mergeCount = 0;
+        mergeSum.setFromLong(0L);
+      }
+    }
+
+#IF PARTIAL2
+    transient private Object[] partialResult;
+    transient private LongWritable resultCount;
+    transient private HiveDecimalWritable resultSum;
+    transient private HiveDecimalWritable resultInput;
+    transient private StructObjectInspector soi;
+#ENDIF PARTIAL2
+#IF FINAL
+    transient private HiveDecimalWritable tempDecWritable;
+    transient private HiveDecimalWritable fullResult;
+    transient private ObjectInspector oi;
+#ENDIF FINAL
+
+    private transient int countOffset;
+    private transient int sumOffset;
+    private transient int inputOffset;
+
+    /**
+     * The scale of the SUM in the partial output
+     */
+    private int sumScale;
+
+    /**
+     * The precision of the SUM in the partial output
+     */
+    private int sumPrecision;
+
+    /**
+     * the scale of the input expression
+     */
+    private int inputScale;
+
+    /**
+     * the precision of the input expression
+     */
+    private int inputPrecision;
+
+    public <ClassName>(VectorExpression inputExpression,
+        GenericUDAFEvaluator.Mode mode) {
+      super(inputExpression, mode);
+#IF PARTIAL2
+      Preconditions.checkState(this.mode == GenericUDAFEvaluator.Mode.PARTIAL2);
+#ENDIF PARTIAL2
+#IF FINAL
+      Preconditions.checkState(this.mode == GenericUDAFEvaluator.Mode.FINAL);
+#ENDIF FINAL
+    }
+
+    private void init() {
+#IF PARTIAL2
+      partialResult = new Object[3];
+      resultCount = new LongWritable();
+      resultSum = new HiveDecimalWritable();
+      resultInput = new HiveDecimalWritable(0L);
+      partialResult[0] = resultCount;
+      partialResult[1] = resultSum;
+      partialResult[2] = resultInput;
+#ENDIF PARTIAL2
+#IF FINAL
+      tempDecWritable = new HiveDecimalWritable();
+      fullResult = new HiveDecimalWritable();
+#ENDIF FINAL
+    }
+
+#IF PARTIAL2
+    private void initPartialResultInspector() {
+#ENDIF PARTIAL2
+#IF FINAL
+    private void initFullResultInspector() {
+#ENDIF FINAL
+
+      // the output type of the vectorized partial aggregate must match the
+      // expected type for the row-mode aggregation
+      // For decimal, the type is "same number of integer digits and 4 more decimal digits"
+
+      DecimalTypeInfo decTypeInfo =
+          GenericUDAFAverageEvaluatorDecimal.deriveResultDecimalTypeInfo(
+               inputPrecision, inputScale, mode);
+      this.sumScale = decTypeInfo.scale();
+      this.sumPrecision = decTypeInfo.precision();
+
+#IF PARTIAL2
+      List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+      foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+      foi.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(decTypeInfo));
+      foi.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(decTypeInfo));
+      List<String> fname = new ArrayList<String>();
+      fname.add("count");
+      fname.add("sum");
+      fname.add("input");
+      soi = ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
+#ENDIF PARTIAL2
+#IF FINAL
+      oi = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(decTypeInfo);
+#ENDIF FINAL
+    }
+
+    private Aggregation getCurrentAggregationBuffer(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int bufferIndex,
+        int row) {
+      VectorAggregationBufferRow mySet = aggregationBufferSets[row];
+      Aggregation myagg = (Aggregation) mySet.getAggregationBuffer(bufferIndex);
+      return myagg;
+    }
+
+    @Override
+    public void aggregateInputSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      VectorizedRowBatch batch) throws HiveException {
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      inputExpression.evaluate(batch);
+
+      StructColumnVector inputStructColVector =
+          (StructColumnVector) batch.cols[this.inputExpression.getOutputColumn()];
+      ColumnVector[] fields = inputStructColVector.fields;
+
+      long[] countVector = ((LongColumnVector) fields[countOffset]).vector;
+      HiveDecimalWritable[] sumVector = ((DecimalColumnVector) fields[sumOffset]).vector;
+
+      if (inputStructColVector.noNulls) {
+        if (inputStructColVector.isRepeating) {
+          iterateNoNullsRepeatingWithAggregationSelection(
+            aggregationBufferSets, bufferIndex,
+            countVector[0], sumVector[0], batchSize);
+        } else {
+          if (batch.selectedInUse) {
+            iterateNoNullsSelectionWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              countVector, sumVector, batch.selected, batchSize);
+          } else {
+            iterateNoNullsWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              countVector, sumVector, batchSize);
+          }
+        }
+      } else {
+        if (inputStructColVector.isRepeating) {
+          if (batch.selectedInUse) {
+            iterateHasNullsRepeatingSelectionWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              countVector[0], sumVector[0], batchSize, batch.selected, inputStructColVector.isNull);
+          } else {
+            iterateHasNullsRepeatingWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              countVector[0], sumVector[0], batchSize, inputStructColVector.isNull);
+          }
+        } else {
+          if (batch.selectedInUse) {
+            iterateHasNullsSelectionWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              countVector, sumVector, batchSize, batch.selected, inputStructColVector.isNull);
+          } else {
+            iterateHasNullsWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              countVector, sumVector, batchSize, inputStructColVector.isNull);
+          }
+        }
+      }
+    }
+
+    private void iterateNoNullsRepeatingWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      long count,
+      HiveDecimalWritable sum,
+      int batchSize) {
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.merge(count, sum);
+      }
+    }
+
+    private void iterateNoNullsSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      long[] countVector,
+      HiveDecimalWritable[] sumVector,
+      int[] selection,
+      int batchSize) {
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        final int batchIndex = selection[i];
+        myagg.merge(countVector[batchIndex], sumVector[batchIndex]);
+      }
+    }
+
+    private void iterateNoNullsWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      long[] countVector,
+      HiveDecimalWritable[] sumVector,
+      int batchSize) {
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.merge(countVector[i], sumVector[i]);
+      }
+    }
+
+    private void iterateHasNullsRepeatingSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      long count,
+      HiveDecimalWritable sum,
+      int batchSize,
+      int[] selection,
+      boolean[] isNull) {
+
+      if (isNull[0]) {
+        return;
+      }
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.merge(count, sum);
+      }
+
+    }
+
+    private void iterateHasNullsRepeatingWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      long count,
+      HiveDecimalWritable sum,
+      int batchSize,
+      boolean[] isNull) {
+
+      if (isNull[0]) {
+        return;
+      }
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.merge(count, sum);
+      }
+    }
+
+    private void iterateHasNullsSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      long[] countVector,
+      HiveDecimalWritable[] sumVector,
+      int batchSize,
+      int[] selection,
+      boolean[] isNull) {
+
+      for (int i = 0; i < batchSize; i++) {
+        final int batchIndex = selection[i];
+        if (!isNull[batchIndex]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            bufferIndex,
+            i);
+          myagg.merge(countVector[batchIndex], sumVector[batchIndex]);
+        }
+      }
+   }
+
+    private void iterateHasNullsWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      long[] countVector,
+      HiveDecimalWritable[] sumVector,
+      int batchSize,
+      boolean[] isNull) {
+
+      for (int i=0; i < batchSize; ++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            bufferIndex,
+            i);
+          myagg.merge(countVector[i], sumVector[i]);
+        }
+      }
+   }
+
+    @Override
+    public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch)
+        throws HiveException {
+
+      inputExpression.evaluate(batch);
+
+      StructColumnVector inputStructColVector =
+          (StructColumnVector) batch.cols[this.inputExpression.getOutputColumn()];
+      ColumnVector[] fields = inputStructColVector.fields;
+
+      long[] countVector = ((LongColumnVector) fields[countOffset]).vector;
+      HiveDecimalWritable[] sumVector = ((DecimalColumnVector) fields[sumOffset]).vector;
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      Aggregation myagg = (Aggregation)agg;
+
+      if (inputStructColVector.isRepeating) {
+        if (inputStructColVector.noNulls) {
+          if (myagg.isNull) {
+            myagg.isNull = false;
+            myagg.mergeSum.setFromLong(0L);
+            myagg.mergeCount = 0;
+          }
+          myagg.mergeCount += countVector[0] * batchSize;
+          HiveDecimal sum = sumVector[0].getHiveDecimal();
+          HiveDecimal multiple = sum.multiply(HiveDecimal.create(batchSize));
+          myagg.mergeSum.mutateAdd(multiple);
+        }
+        return;
+      }
+
+      if (!batch.selectedInUse && inputStructColVector.noNulls) {
+        iterateNoSelectionNoNulls(myagg, countVector, sumVector, batchSize);
+      } else if (!batch.selectedInUse) {
+        iterateNoSelectionHasNulls(myagg, countVector, sumVector, batchSize, inputStructColVector.isNull);
+      } else if (inputStructColVector.noNulls){
+        iterateSelectionNoNulls(myagg, countVector, sumVector, batchSize, batch.selected);
+      } else {
+        iterateSelectionHasNulls(myagg, countVector, sumVector, batchSize, inputStructColVector.isNull, batch.selected);
+      }
+    }
+
+    private void iterateSelectionHasNulls(
+        Aggregation myagg,
+        long[] countVector,
+        HiveDecimalWritable[] sumVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int i = 0; i < batchSize; i++) {
+        final int batchIndex = selected[i];
+        if (!isNull[batchIndex]) {
+          myagg.merge(countVector[batchIndex], sumVector[batchIndex]);
+        }
+      }
+    }
+
+    private void iterateSelectionNoNulls(
+        Aggregation myagg,
+        long[] countVector,
+        HiveDecimalWritable[] sumVector,
+        int batchSize,
+        int[] selected) {
+
+      if (myagg.isNull) {
+        myagg.isNull = false;
+        myagg.mergeSum.setFromLong(0L);
+        myagg.mergeCount = 0;
+      }
+
+      for (int i = 0; i< batchSize; i++) {
+        final int batchIndex = selected[i];
+        myagg.mergeNoNullCheck(countVector[batchIndex], sumVector[batchIndex]);
+      }
+    }
+
+    private void iterateNoSelectionHasNulls(
+        Aggregation myagg,
+        long[] countVector,
+        HiveDecimalWritable[] sumVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i = 0; i < batchSize; i++) {
+        if (!isNull[i]) {
+          myagg.merge(countVector[i], sumVector[i]);
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNulls(
+        Aggregation myagg,
+        long[] countVector,
+        HiveDecimalWritable[] sumVector,
+        int batchSize) {
+      if (myagg.isNull) {
+        myagg.isNull = false;
+        myagg.mergeSum.setFromLong(0L);
+        myagg.mergeCount = 0;
+      }
+
+      for (int i = 0; i < batchSize; i++) {
+        myagg.mergeNoNullCheck(countVector[i], sumVector[i]);
+      }
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new Aggregation();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      Aggregation myAgg = (Aggregation) agg;
+      myAgg.reset();
+    }
+
+    @Override
+    public Object evaluateOutput(
+        AggregationBuffer agg) throws HiveException {
+      Aggregation myagg = (Aggregation) agg;
+      // !isSet checks for overflow.
+      if (myagg.isNull || !myagg.mergeSum.isSet()) {
+        return null;
+      }
+      else {
+        Preconditions.checkState(myagg.mergeCount > 0);
+#IF PARTIAL2
+        resultCount.set (myagg.mergeCount);
+        resultSum.set(myagg.mergeSum);
+        return partialResult;
+#ENDIF PARTIAL2
+#IF FINAL
+        tempDecWritable.setFromLong (myagg.mergeCount);
+        fullResult.set(myagg.mergeSum);
+        fullResult.mutateDivide(tempDecWritable);
+        fullResult.mutateEnforcePrecisionScale(sumPrecision, sumScale);
+        return fullResult;
+#ENDIF FINAL
+      }
+    }
+
+  @Override
+    public ObjectInspector getOutputObjectInspector() {
+#IF PARTIAL2
+    return soi;
+#ENDIF PARTIAL2
+#IF FINAL
+    return oi;
+#ENDIF FINAL
+  }
+
+  @Override
+  public long getAggregationBufferFixedSize() {
+    JavaDataModel model = JavaDataModel.get();
+    return JavaDataModel.alignUp(
+      model.object() +
+      model.primitive2() * 2,
+      model.memoryAlign());
+  }
+
+  @Override
+  public void init(AggregationDesc desc) throws HiveException {
+    init();
+
+    ExprNodeDesc inputExpr = desc.getParameters().get(0);
+
+     StructTypeInfo partialStructTypeInfo = (StructTypeInfo) inputExpr.getTypeInfo();
+
+    ArrayList<String> fieldNames =  partialStructTypeInfo.getAllStructFieldNames();
+    countOffset = fieldNames.indexOf("count");
+    sumOffset = fieldNames.indexOf("sum");
+    inputOffset = fieldNames.indexOf("input");
+
+    DecimalTypeInfo tiInput = (DecimalTypeInfo) partialStructTypeInfo.getAllStructFieldTypeInfos().get(sumOffset);
+    this.inputScale = tiInput.scale();
+    this.inputPrecision = tiInput.precision();
+
+#IF PARTIAL2
+    initPartialResultInspector();
+#ENDIF PARTIAL2
+#IF FINAL
+    initFullResultInspector();
+#ENDIF FINAL
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/92fbe256/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgMerge.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgMerge.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgMerge.txt
new file mode 100644
index 0000000..996d0dc
--- /dev/null
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgMerge.txt
@@ -0,0 +1,547 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.StructColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+import com.google.common.base.Preconditions;
+
+/**
+ * Generated from template VectorUDAFAvg.txt.
+ */
+@Description(name = "avg",
+    value = "_FUNC_(expr) - Returns the average value of expr (vectorized, type: <ValueType>)")
+public class <ClassName> extends VectorAggregateExpression {
+
+    private static final long serialVersionUID = 1L;
+
+    /** class for storing the current aggregate value. */
+    static class Aggregation implements AggregationBuffer {
+
+      private static final long serialVersionUID = 1L;
+
+      transient private long mergeCount;
+      transient private double mergeSum;
+
+      /**
+      * Value is explicitly (re)initialized in reset()
+      */
+      transient private boolean isNull = true;
+
+      public void merge(long count, double sum) {
+        if (isNull) {
+          mergeCount = count;
+          mergeSum = sum;
+          isNull = false;
+        } else {
+          mergeCount += count;
+          mergeSum += sum;
+        }
+      }
+
+      @Override
+      public int getVariableSize() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public void reset () {
+        isNull = true;
+        mergeCount = 0L;
+        mergeSum = 0;
+      }
+    }
+
+#IF PARTIAL2
+    transient private Object[] partialResult;
+    transient private LongWritable resultCount;
+    transient private DoubleWritable resultSum;
+    transient private DoubleWritable resultInput;
+    transient private StructObjectInspector soi;
+#ENDIF PARTIAL2
+#IF FINAL
+    transient private DoubleWritable fullResult;
+
+    transient private ObjectInspector oi;
+#ENDIF FINAL
+
+    private transient int countOffset;
+    private transient int sumOffset;
+    private transient int inputOffset;
+
+    public <ClassName>(VectorExpression inputExpression, GenericUDAFEvaluator.Mode mode) {
+      super(inputExpression, mode);
+#IF PARTIAL2
+      Preconditions.checkState(this.mode == GenericUDAFEvaluator.Mode.PARTIAL2);
+#ENDIF PARTIAL2
+#IF FINAL
+      Preconditions.checkState(this.mode == GenericUDAFEvaluator.Mode.FINAL);
+#ENDIF FINAL
+    }
+
+    private void init() {
+#IF PARTIAL2
+      partialResult = new Object[3];
+      resultCount = new LongWritable();
+      resultSum = new DoubleWritable();
+      partialResult[0] = resultCount;
+      partialResult[1] = resultSum;
+      partialResult[2] = resultInput;
+      initPartialResultInspector();
+#ENDIF PARTIAL2
+#IF FINAL
+      fullResult = new DoubleWritable();
+      initFullResultInspector();
+#ENDIF FINAL
+    }
+
+#IF PARTIAL2
+    private void initPartialResultInspector() {
+        List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+        foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+        List<String> fname = new ArrayList<String>();
+        fname.add("count");
+        fname.add("sum");
+        fname.add("input");
+        soi = ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
+    }
+#ENDIF PARTIAL2
+#IF FINAL
+    private void initFullResultInspector() {
+      oi = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+    }
+#ENDIF FINAL
+
+    private Aggregation getCurrentAggregationBuffer(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int bufferIndex,
+        int row) {
+      VectorAggregationBufferRow mySet = aggregationBufferSets[row];
+      Aggregation myagg = (Aggregation) mySet.getAggregationBuffer(bufferIndex);
+      return myagg;
+    }
+
+    @Override
+    public void aggregateInputSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      VectorizedRowBatch batch) throws HiveException {
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      inputExpression.evaluate(batch);
+
+      StructColumnVector inputStructColVector =
+          (StructColumnVector) batch.cols[this.inputExpression.getOutputColumn()];
+      ColumnVector[] fields = inputStructColVector.fields;
+
+      long[] countVector = ((LongColumnVector) fields[countOffset]).vector;
+      double[] sumVector = ((DoubleColumnVector) fields[sumOffset]).vector;
+
+      if (inputStructColVector.noNulls) {
+        if (inputStructColVector.isRepeating) {
+          iterateNoNullsRepeatingWithAggregationSelection(
+            aggregationBufferSets, bufferIndex,
+            countVector[0], sumVector[0], batchSize);
+        } else {
+          if (batch.selectedInUse) {
+            iterateNoNullsSelectionWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              countVector, sumVector, batch.selected, batchSize);
+          } else {
+            iterateNoNullsWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              countVector, sumVector, batchSize);
+          }
+        }
+      } else {
+        if (inputStructColVector.isRepeating) {
+          if (batch.selectedInUse) {
+            iterateHasNullsRepeatingSelectionWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              countVector[0], sumVector[0], batchSize, batch.selected, inputStructColVector.isNull);
+          } else {
+            iterateHasNullsRepeatingWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              countVector[0], sumVector[0], batchSize, inputStructColVector.isNull);
+          }
+        } else {
+          if (batch.selectedInUse) {
+            iterateHasNullsSelectionWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              countVector, sumVector, batchSize, batch.selected, inputStructColVector.isNull);
+          } else {
+            iterateHasNullsWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              countVector, sumVector, batchSize, inputStructColVector.isNull);
+          }
+        }
+      }
+    }
+
+    private void iterateNoNullsRepeatingWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      long count,
+      double sum,
+      int batchSize) {
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.merge(count, sum);
+      }
+    }
+
+    private void iterateNoNullsSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      long[] countVector,
+      double[] sumVector,
+      int[] selection,
+      int batchSize) {
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        final int batchIndex = selection[i];
+        myagg.merge(countVector[batchIndex], sumVector[batchIndex]);
+      }
+    }
+
+    private void iterateNoNullsWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      long[] countVector,
+      double[] sumVector,
+      int batchSize) {
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.merge(countVector[i], sumVector[i]);
+      }
+    }
+
+    private void iterateHasNullsRepeatingSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      long count,
+      double sum,
+      int batchSize,
+      int[] selection,
+      boolean[] isNull) {
+
+      if (isNull[0]) {
+        return;
+      }
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.merge(count, sum);
+      }
+
+    }
+
+    private void iterateHasNullsRepeatingWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      long count,
+      double sum,
+      int batchSize,
+      boolean[] isNull) {
+
+      if (isNull[0]) {
+        return;
+      }
+
+      for (int i = 0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.merge(count, sum);
+      }
+    }
+
+    private void iterateHasNullsSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      long[] countVector,
+      double[] sumVector,
+      int batchSize,
+      int[] selection,
+      boolean[] isNull) {
+
+      for (int i = 0; i < batchSize; i++) {
+        final int batchIndex = selection[i];
+        if (!isNull[batchIndex]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            bufferIndex,
+            i);
+          myagg.merge(countVector[batchIndex], sumVector[batchIndex]);
+        }
+      }
+   }
+
+    private void iterateHasNullsWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      long[] countVector,
+      double[] sumVector,
+      int batchSize,
+      boolean[] isNull) {
+
+      for (int i=0; i < batchSize; ++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            bufferIndex,
+            i);
+          myagg.merge(countVector[i], sumVector[i]);
+        }
+      }
+   }
+
+    @Override
+    public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch)
+        throws HiveException {
+
+      inputExpression.evaluate(batch);
+
+      StructColumnVector inputStructColVector =
+          (StructColumnVector) batch.cols[this.inputExpression.getOutputColumn()];
+      ColumnVector[] fields = inputStructColVector.fields;
+
+      long[] countVector = ((LongColumnVector) fields[countOffset]).vector;
+      double[] sumVector = ((DoubleColumnVector) fields[sumOffset]).vector;
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      Aggregation myagg = (Aggregation)agg;
+
+      if (inputStructColVector.isRepeating) {
+        if (inputStructColVector.noNulls) {
+          if (myagg.isNull) {
+            myagg.isNull = false;
+            myagg.mergeCount = 0;
+            myagg.mergeSum = 0;
+          }
+          myagg.mergeCount += countVector[0] * batchSize;
+          myagg.mergeSum += sumVector[0] * batchSize;
+        }
+        return;
+      }
+
+      if (!batch.selectedInUse && inputStructColVector.noNulls) {
+        iterateNoSelectionNoNulls(myagg, countVector, sumVector, batchSize);
+      } else if (!batch.selectedInUse) {
+        iterateNoSelectionHasNulls(myagg, countVector, sumVector, batchSize, inputStructColVector.isNull);
+      } else if (inputStructColVector.noNulls){
+        iterateSelectionNoNulls(myagg, countVector, sumVector, batchSize, batch.selected);
+      } else {
+        iterateSelectionHasNulls(myagg, countVector, sumVector, batchSize, inputStructColVector.isNull, batch.selected);
+      }
+    }
+
+    private void iterateSelectionHasNulls(
+        Aggregation myagg,
+        long[] countVector,
+        double[] sumVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int i=0; i < batchSize; i++) {
+        int batchIndex = selected[i];
+        if (!isNull[batchIndex]) {
+          if (myagg.isNull) {
+            myagg.isNull = false;
+            myagg.mergeCount = 0;
+            myagg.mergeSum = 0;
+          }
+          myagg.mergeCount += countVector[batchIndex];
+          myagg.mergeSum += sumVector[batchIndex];
+        }
+      }
+    }
+
+    private void iterateSelectionNoNulls(
+        Aggregation myagg,
+        long[] countVector,
+        double[] sumVector,
+        int batchSize,
+        int[] selected) {
+
+      if (myagg.isNull) {
+        myagg.isNull = false;
+        myagg.mergeCount = 0;
+        myagg.mergeSum = 0;
+      }
+
+      for (int i = 0; i< batchSize; ++i) {
+        final int batchIndex = selected[i];
+        myagg.mergeCount += countVector[batchIndex];
+        myagg.mergeSum += sumVector[batchIndex];
+      }
+    }
+
+    private void iterateNoSelectionHasNulls(
+        Aggregation myagg,
+        long[] countVector,
+        double[] sumVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i = 0; i < batchSize; i++) {
+        if (!isNull[i]) {
+          if (myagg.isNull) {
+            myagg.isNull = false;
+            myagg.mergeCount = 0;
+            myagg.mergeSum = 0;
+          }
+          myagg.mergeCount += countVector[i];
+          myagg.mergeSum += sumVector[i];
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNulls(
+        Aggregation myagg,
+        long[] countVector,
+        double[] sumVector,
+        int batchSize) {
+      if (myagg.isNull) {
+        myagg.isNull = false;
+        myagg.mergeCount = 0;
+        myagg.mergeSum = 0;
+      }
+
+      for (int i=0;i<batchSize;++i) {
+        myagg.mergeCount += countVector[i];
+        myagg.mergeSum += sumVector[i];
+      }
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new Aggregation();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      Aggregation myAgg = (Aggregation) agg;
+      myAgg.reset();
+    }
+
+    @Override
+    public Object evaluateOutput(
+        AggregationBuffer agg) throws HiveException {
+      Aggregation myagg = (Aggregation) agg;
+      if (myagg.isNull) {
+        return null;
+      }
+      else {
+        assert(0 < myagg.mergeCount);
+#IF PARTIAL2
+        resultCount.set (myagg.mergeCount);
+        resultSum.set (myagg.mergeSum);
+        return partialResult;
+#ENDIF PARTIAL2
+#IF FINAL
+        fullResult.set (myagg.mergeSum / myagg.mergeCount);
+        return fullResult;
+#ENDIF FINAL
+      }
+    }
+
+  @Override
+  public ObjectInspector getOutputObjectInspector() {
+#IF PARTIAL2
+    return soi;
+#ENDIF PARTIAL2
+#IF FINAL
+    return oi;
+#ENDIF FINAL
+  }
+
+  @Override
+  public long getAggregationBufferFixedSize() {
+    JavaDataModel model = JavaDataModel.get();
+    return JavaDataModel.alignUp(
+      model.object() +
+      model.primitive2() * 2,
+      model.memoryAlign());
+  }
+
+  @Override
+  public void init(AggregationDesc desc) throws HiveException {
+    init();
+
+    ExprNodeDesc inputExpr = desc.getParameters().get(0);
+    StructTypeInfo partialStructTypeInfo = (StructTypeInfo) inputExpr.getTypeInfo();
+
+    ArrayList<String> fieldNames =  partialStructTypeInfo.getAllStructFieldNames();
+    countOffset = fieldNames.indexOf("count");
+    sumOffset = fieldNames.indexOf("sum");
+    inputOffset = fieldNames.indexOf("input");
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/92fbe256/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgTimestamp.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgTimestamp.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgTimestamp.txt
new file mode 100644
index 0000000..b816a35
--- /dev/null
+++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvgTimestamp.txt
@@ -0,0 +1,517 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.ql.util.TimestampUtils;
+
+import com.google.common.base.Preconditions;
+
+/**
+ * Generated from template VectorUDAFAvg.txt.
+ */
+@Description(name = "avg",
+    value = "_FUNC_(expr) - Returns the average value of expr (vectorized, type: timestamp)")
+public class <ClassName> extends VectorAggregateExpression {
+
+    private static final long serialVersionUID = 1L;
+
+    /** class for storing the current aggregate value. */
+    static class Aggregation implements AggregationBuffer {
+
+      private static final long serialVersionUID = 1L;
+
+      transient private double sum;
+      transient private long count;
+
+      /**
+      * Value is explicitly (re)initialized in reset()
+      */
+      transient private boolean isNull = true;
+
+      public void sumValue(double value) {
+        if (isNull) {
+          sum = value;
+          count = 1;
+          isNull = false;
+        } else {
+          sum += value;
+          count++;
+        }
+      }
+
+      @Override
+      public int getVariableSize() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public void reset() {
+        isNull = true;
+        sum = 0;
+        count = 0L;
+      }
+    }
+
+#IF PARTIAL1
+    transient private Object[] partialResult;
+    transient private LongWritable resultCount;
+    transient private DoubleWritable resultSum;
+    transient private TimestampWritable resultInput;
+    transient private StructObjectInspector soi;
+#ENDIF PARTIAL1
+#IF COMPLETE
+    transient private DoubleWritable fullResult;
+    transient private ObjectInspector oi;
+#ENDIF COMPLETE
+
+    public <ClassName>(VectorExpression inputExpression,
+        GenericUDAFEvaluator.Mode mode) {
+      super(inputExpression, mode);
+#IF PARTIAL1
+      Preconditions.checkState(this.mode == GenericUDAFEvaluator.Mode.PARTIAL1);
+#ENDIF PARTIAL1
+#IF COMPLETE
+      Preconditions.checkState(this.mode == GenericUDAFEvaluator.Mode.COMPLETE);
+#ENDIF COMPLETE
+    }
+
+    private void init() {
+#IF PARTIAL1
+      partialResult = new Object[3];
+      resultCount = new LongWritable();
+      resultSum = new DoubleWritable();
+      resultInput = new TimestampWritable();
+      partialResult[0] = resultCount;
+      partialResult[1] = resultSum;
+      partialResult[2] = resultInput;
+      initPartialResultInspector();
+#ENDIF PARTIAL1
+#IF COMPLETE
+      fullResult = new DoubleWritable();
+#ENDIF COMPLETE
+    }
+
+#IF PARTIAL1
+    private void initPartialResultInspector() {
+        List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+        foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
+        List<String> fname = new ArrayList<String>();
+        fname.add("count");
+        fname.add("sum");
+        fname.add("input");
+        soi = ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
+    }
+#ENDIF PARTIAL1
+#IF COMPLETE
+    private void initFullResultInspector() {
+        oi = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+    }
+#ENDIF COMPLETE
+
+    private Aggregation getCurrentAggregationBuffer(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int bufferIndex,
+        int row) {
+      VectorAggregationBufferRow mySet = aggregationBufferSets[row];
+      Aggregation myagg = (Aggregation) mySet.getAggregationBuffer(bufferIndex);
+      return myagg;
+    }
+
+    @Override
+    public void aggregateInputSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      VectorizedRowBatch batch) throws HiveException {
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      inputExpression.evaluate(batch);
+
+      TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      if (inputColVector.noNulls) {
+        if (inputColVector.isRepeating) {
+          iterateNoNullsRepeatingWithAggregationSelection(
+            aggregationBufferSets, bufferIndex,
+            inputColVector.getDouble(0),
+            batchSize);
+        } else {
+          if (batch.selectedInUse) {
+            iterateNoNullsSelectionWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              inputColVector, batch.selected, batchSize);
+          } else {
+            iterateNoNullsWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              inputColVector, batchSize);
+          }
+        }
+      } else {
+        if (inputColVector.isRepeating) {
+          if (batch.selectedInUse) {
+            iterateHasNullsRepeatingSelectionWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              inputColVector.getDouble(0), batchSize, batch.selected, inputColVector.isNull);
+          } else {
+            iterateHasNullsRepeatingWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              inputColVector.getDouble(0), batchSize, inputColVector.isNull);
+          }
+        } else {
+          if (batch.selectedInUse) {
+            iterateHasNullsSelectionWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              inputColVector, batchSize, batch.selected, inputColVector.isNull);
+          } else {
+            iterateHasNullsWithAggregationSelection(
+              aggregationBufferSets, bufferIndex,
+              inputColVector, batchSize, inputColVector.isNull);
+          }
+        }
+      }
+    }
+
+    private void iterateNoNullsRepeatingWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      double value,
+      int batchSize) {
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.sumValue(value);
+      }
+    }
+
+    private void iterateNoNullsSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      TimestampColumnVector inputColVector,
+      int[] selection,
+      int batchSize) {
+
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.sumValue(
+            inputColVector.getDouble(selection[i]));
+      }
+    }
+
+    private void iterateNoNullsWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      TimestampColumnVector inputColVector,
+      int batchSize) {
+      for (int i=0; i < batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          bufferIndex,
+          i);
+        myagg.sumValue(inputColVector.getDouble(i));
+      }
+    }
+
+    private void iterateHasNullsRepeatingSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      double value,
+      int batchSize,
+      int[] selection,
+      boolean[] isNull) {
+
+      for (int i=0; i < batchSize; ++i) {
+        if (!isNull[selection[i]]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            bufferIndex,
+            i);
+          myagg.sumValue(value);
+        }
+      }
+
+    }
+
+    private void iterateHasNullsRepeatingWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      double value,
+      int batchSize,
+      boolean[] isNull) {
+
+      for (int i=0; i < batchSize; ++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            bufferIndex,
+            i);
+          myagg.sumValue(value);
+        }
+      }
+    }
+
+    private void iterateHasNullsSelectionWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      TimestampColumnVector inputColVector,
+      int batchSize,
+      int[] selection,
+      boolean[] isNull) {
+
+      for (int j=0; j < batchSize; ++j) {
+        int i = selection[j];
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            bufferIndex,
+            j);
+          myagg.sumValue(inputColVector.getDouble(i));
+        }
+      }
+   }
+
+    private void iterateHasNullsWithAggregationSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int bufferIndex,
+      TimestampColumnVector inputColVector,
+      int batchSize,
+      boolean[] isNull) {
+
+      for (int i=0; i < batchSize; ++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            bufferIndex,
+            i);
+          myagg.sumValue(inputColVector.getDouble(i));
+        }
+      }
+   }
+
+    @Override
+    public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch)
+        throws HiveException {
+
+        inputExpression.evaluate(batch);
+
+        TimestampColumnVector inputColVector =
+            (TimestampColumnVector)batch.cols[this.inputExpression.getOutputColumn()];
+
+        int batchSize = batch.size;
+
+        if (batchSize == 0) {
+          return;
+        }
+
+        Aggregation myagg = (Aggregation)agg;
+
+        if (inputColVector.isRepeating) {
+          if (inputColVector.noNulls) {
+            if (myagg.isNull) {
+              myagg.isNull = false;
+              myagg.sum = 0;
+              myagg.count = 0;
+            }
+            myagg.sum += inputColVector.getDouble(0)*batchSize;
+            myagg.count += batchSize;
+          }
+          return;
+        }
+
+        if (!batch.selectedInUse && inputColVector.noNulls) {
+          iterateNoSelectionNoNulls(myagg, inputColVector, batchSize);
+        }
+        else if (!batch.selectedInUse) {
+          iterateNoSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull);
+        }
+        else if (inputColVector.noNulls){
+          iterateSelectionNoNulls(myagg, inputColVector, batchSize, batch.selected);
+        }
+        else {
+          iterateSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull, batch.selected);
+        }
+    }
+
+    private void iterateSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        int i = selected[j];
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.isNull = false;
+            myagg.sum = 0;
+            myagg.count = 0;
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        }
+      }
+    }
+
+    private void iterateSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      if (myagg.isNull) {
+        myagg.isNull = false;
+        myagg.sum = 0;
+        myagg.count = 0;
+      }
+
+      for (int i=0; i< batchSize; ++i) {
+        double value = inputColVector.getDouble(selected[i]);
+        myagg.sum += value;
+        myagg.count += 1;
+      }
+    }
+
+    private void iterateNoSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.isNull = false;
+            myagg.sum = 0;
+            myagg.count = 0;
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize) {
+      if (myagg.isNull) {
+        myagg.isNull = false;
+        myagg.sum = 0;
+        myagg.count = 0;
+      }
+
+      for (int i=0;i<batchSize;++i) {
+        double value = inputColVector.getDouble(i);
+        myagg.sum += value;
+        myagg.count += 1;
+      }
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new Aggregation();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      Aggregation myAgg = (Aggregation) agg;
+      myAgg.reset();
+    }
+
+    @Override
+    public Object evaluateOutput(
+        AggregationBuffer agg) throws HiveException {
+      Aggregation myagg = (Aggregation) agg;
+      if (myagg.isNull) {
+        return null;
+      }
+      else {
+        assert(0 < myagg.count);
+#IF PARTIAL1
+        resultCount.set(myagg.count);
+        resultSum.set(myagg.sum);
+        return partialResult;
+#ENDIF PARTIAL1
+#IF COMPLETE
+        fullResult.set(myagg.sum / myagg.count);
+        return fullResult;
+#ENDIF COMPLETE
+      }
+    }
+
+  @Override
+  public ObjectInspector getOutputObjectInspector() {
+#IF PARTIAL1
+    return soi;
+#ENDIF PARTIAL1
+#IF COMPLETE
+    return oi;
+#ENDIF COMPLETE
+  }
+
+  @Override
+  public long getAggregationBufferFixedSize() {
+    JavaDataModel model = JavaDataModel.get();
+    return JavaDataModel.alignUp(
+      model.object() +
+      model.primitive2() * 2,
+      model.memoryAlign());
+  }
+
+  @Override
+  public void init(AggregationDesc desc) throws HiveException {
+    init();
+  }
+}
+


Mime
View raw message