hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From xu...@apache.org
Subject svn commit: r1669775 [19/35] - in /hive/branches/spark: ./ ant/src/org/apache/hadoop/hive/ant/ beeline/src/java/org/apache/hive/beeline/ common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/...
Date Sat, 28 Mar 2015 14:03:49 GMT
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java Sat Mar 28 14:03:43 2015
@@ -28,10 +28,11 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
@@ -124,7 +125,7 @@ public class GenericUDAFHistogramNumeric
     private transient PrimitiveObjectInspector nbinsOI;
 
     // For PARTIAL2 and FINAL: ObjectInspectors for partial aggregations (list of doubles)
-    private transient StandardListObjectInspector loi;
+    private transient ListObjectInspector loi;
 
 
     @Override
@@ -137,7 +138,7 @@ public class GenericUDAFHistogramNumeric
         inputOI = (PrimitiveObjectInspector) parameters[0];
         nbinsOI = (PrimitiveObjectInspector) parameters[1];
       } else {
-        loi = (StandardListObjectInspector) parameters[0];
+        loi = (ListObjectInspector) parameters[0];
       }
 
       // init output object inspectors
@@ -197,8 +198,10 @@ public class GenericUDAFHistogramNumeric
         return;
       }
       List<DoubleWritable> partialHistogram = (List<DoubleWritable>) loi.getList(partial);
+      DoubleObjectInspector doi = (DoubleObjectInspector)loi.getListElementObjectInspector();
+      
       StdAgg myagg = (StdAgg) agg;
-      myagg.histogram.merge(partialHistogram);
+      myagg.histogram.merge(partialHistogram, doi);
     }
 
     @Override

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMkCollectionEvaluator.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMkCollectionEvaluator.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMkCollectionEvaluator.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMkCollectionEvaluator.java Sat Mar 28 14:03:43 2015
@@ -25,6 +25,7 @@ import java.util.LinkedHashSet;
 import java.util.List;
 
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
@@ -44,7 +45,7 @@ public class GenericUDAFMkCollectionEval
   // of objs)
   private transient StandardListObjectInspector loi;
 
-  private transient StandardListObjectInspector internalMergeOI;
+  private transient ListObjectInspector internalMergeOI;
 
   private BufferType bufferType;
 
@@ -68,14 +69,14 @@ public class GenericUDAFMkCollectionEval
           .getStandardListObjectInspector((PrimitiveObjectInspector) ObjectInspectorUtils
               .getStandardObjectInspector(inputOI));
     } else {
-      if (!(parameters[0] instanceof StandardListObjectInspector)) {
+      if (!(parameters[0] instanceof ListObjectInspector)) {
         //no map aggregation.
         inputOI = (PrimitiveObjectInspector)  ObjectInspectorUtils
         .getStandardObjectInspector(parameters[0]);
         return (StandardListObjectInspector) ObjectInspectorFactory
             .getStandardListObjectInspector(inputOI);
       } else {
-        internalMergeOI = (StandardListObjectInspector) parameters[0];
+        internalMergeOI = (ListObjectInspector) parameters[0];
         inputOI = (PrimitiveObjectInspector) internalMergeOI.getListElementObjectInspector();
         loi = (StandardListObjectInspector) ObjectInspectorUtils.getStandardObjectInspector(internalMergeOI);
         return loi;

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java Sat Mar 28 14:03:43 2015
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 
@@ -300,20 +301,21 @@ public class GenericUDAFPercentileApprox
         return;
       }
       PercentileAggBuf myagg = (PercentileAggBuf) agg;
-      List<DoubleWritable> partialHistogram = (List<DoubleWritable>) loi.getList(partial);
+      List partialHistogram = (List) loi.getList(partial);
+      DoubleObjectInspector doi = (DoubleObjectInspector)loi.getListElementObjectInspector();
 
       // remove requested quantiles from the head of the list
-      int nquantiles = (int) partialHistogram.get(0).get();
+      int nquantiles = (int) doi.get(partialHistogram.get(0));
       if(nquantiles > 0) {
         myagg.quantiles = new double[nquantiles];
         for(int i = 1; i <= nquantiles; i++) {
-          myagg.quantiles[i-1] = partialHistogram.get(i).get();
+          myagg.quantiles[i-1] = doi.get(partialHistogram.get(i));
         }
         partialHistogram.subList(0, nquantiles+1).clear();
       }
 
       // merge histograms
-      myagg.histogram.merge(partialHistogram);
+      myagg.histogram.merge(partialHistogram, doi);
     }
 
     @Override

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java Sat Mar 28 14:03:43 2015
@@ -26,6 +26,7 @@ import org.apache.hadoop.hive.ql.exec.De
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -160,7 +161,7 @@ public class GenericUDAFnGrams implement
    */
   public static class GenericUDAFnGramEvaluator extends GenericUDAFEvaluator {
     // For PARTIAL1 and COMPLETE: ObjectInspectors for original data
-    private transient StandardListObjectInspector outerInputOI;
+    private transient ListObjectInspector outerInputOI;
     private transient StandardListObjectInspector innerInputOI;
     private transient PrimitiveObjectInspector inputOI;
     private transient PrimitiveObjectInspector nOI;
@@ -168,7 +169,7 @@ public class GenericUDAFnGrams implement
     private transient PrimitiveObjectInspector pOI;
 
     // For PARTIAL2 and FINAL: ObjectInspectors for partial aggregations
-    private transient StandardListObjectInspector loi;
+    private transient ListObjectInspector loi;
 
     @Override
     public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
@@ -176,7 +177,7 @@ public class GenericUDAFnGrams implement
 
       // Init input object inspectors
       if (m == Mode.PARTIAL1 || m == Mode.COMPLETE) {
-        outerInputOI = (StandardListObjectInspector) parameters[0];
+        outerInputOI = (ListObjectInspector) parameters[0];
         if(outerInputOI.getListElementObjectInspector().getCategory() ==
             ObjectInspector.Category.LIST) {
           // We're dealing with input that is an array of arrays of strings
@@ -196,7 +197,7 @@ public class GenericUDAFnGrams implement
         }
       } else {
           // Init the list object inspector for handling partial aggregations
-          loi = (StandardListObjectInspector) parameters[0];
+          loi = (ListObjectInspector) parameters[0];
       }
 
       // Init output object inspectors.
@@ -229,7 +230,7 @@ public class GenericUDAFnGrams implement
         return;
       }
       NGramAggBuf myagg = (NGramAggBuf) agg;
-      List<Text> partialNGrams = (List<Text>) loi.getList(partial);
+      List partialNGrams = (List) loi.getList(partial);
       int n = Integer.parseInt(partialNGrams.get(partialNGrams.size()-1).toString());
 
       // A value of 0 for n indicates that the mapper processed data that does not meet

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java Sat Mar 28 14:03:43 2015
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.udf.UDFType;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
@@ -363,6 +364,17 @@ public abstract class GenericUDF impleme
     inputTypes[i] = inputType;
   }
 
+  protected void obtainDoubleConverter(ObjectInspector[] arguments, int i,
+      PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
+    PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
+    PrimitiveCategory inputType = inOi.getPrimitiveCategory();
+    Converter converter = ObjectInspectorConverters.getConverter(
+        (PrimitiveObjectInspector) arguments[i],
+        PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+    converters[i] = converter;
+    inputTypes[i] = inputType;
+  }
+
   protected void obtainDateConverter(ObjectInspector[] arguments, int i,
       PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
     PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
@@ -440,6 +452,17 @@ public abstract class GenericUDF impleme
     return v;
   }
 
+  protected Double getDoubleValue(DeferredObject[] arguments, int i, Converter[] converters)
+      throws HiveException {
+    Object obj;
+    if ((obj = arguments[i].get()) == null) {
+      return null;
+    }
+    Object writableValue = converters[i].convert(obj);
+    double v = ((DoubleWritable) writableValue).get();
+    return v;
+  }
+
   protected Date getDateValue(DeferredObject[] arguments, int i, PrimitiveCategory[] inputTypes,
       Converter[] converters) throws HiveException {
     Object obj;

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseArithmetic.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseArithmetic.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseArithmetic.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseArithmetic.java Sat Mar 28 14:03:43 2015
@@ -58,12 +58,15 @@ public abstract class GenericUDFBaseArit
     // Determine if we are dealing with a numeric or date arithmetic operation
     boolean isDateTimeOp = false;
     for (int idx = 0; idx < 2; ++idx) {
-      if (arguments[idx].getCategory() == Category.PRIMITIVE) {
-        if (PrimitiveGrouping.DATE_GROUP == PrimitiveObjectInspectorUtils.getPrimitiveGrouping(
-            ((PrimitiveObjectInspector) arguments[idx]).getPrimitiveCategory())) {
+      switch (((PrimitiveObjectInspector) arguments[idx]).getPrimitiveCategory()) {
+        case DATE:
+        case TIMESTAMP:
+        case INTERVAL_YEAR_MONTH:
+        case INTERVAL_DAY_TIME:
           isDateTimeOp = true;
           break;
-        }
+        default:
+          break;
       }
     }
 

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java Sat Mar 28 14:03:43 2015
@@ -24,6 +24,8 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -53,6 +55,10 @@ public abstract class GenericUDFBaseUnar
   protected FloatWritable floatWritable = new FloatWritable();
   protected DoubleWritable doubleWritable = new DoubleWritable();
   protected HiveDecimalWritable decimalWritable = new HiveDecimalWritable();
+  protected HiveIntervalYearMonthWritable intervalYearMonthWritable =
+      new HiveIntervalYearMonthWritable();
+  protected HiveIntervalDayTimeWritable intervalDayTimeWritable =
+      new HiveIntervalDayTimeWritable();
 
   public GenericUDFBaseUnary() {
     opName = getClass().getSimpleName();
@@ -74,11 +80,13 @@ public abstract class GenericUDFBaseUnar
     }
 
     inputOI = (PrimitiveObjectInspector) arguments[0];
-    if (!FunctionRegistry.isNumericType(inputOI.getTypeInfo())) {
+    if (!FunctionRegistry.isNumericType(inputOI.getTypeInfo())
+        && (inputOI.getTypeInfo() != TypeInfoFactory.intervalDayTimeTypeInfo)
+        && (inputOI.getTypeInfo() != TypeInfoFactory.intervalYearMonthTypeInfo)) {
       throw new UDFArgumentTypeException(0, "The "
           + GenericUDFUtils.getOrdinal(1)
-          + " argument of " + opName + "  is expected to a "
-          + "numeric type, but "
+          + " argument of " + opName + "  is expected to be a "
+          + "numeric or interval type, but "
           + inputOI.getTypeName() + " is found");
     }
 

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCase.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCase.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCase.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCase.java Sat Mar 28 14:03:43 2015
@@ -101,7 +101,7 @@ public class GenericUDFCase extends Gene
       PrimitiveObjectInspector caseOI = (PrimitiveObjectInspector) caseOIResolver.get();
       if (PrimitiveObjectInspectorUtils.comparePrimitiveObjects(
             caseOIResolver.convertIfNecessary(exprValue, argumentOIs[0]), caseOI,
-            caseOIResolver.convertIfNecessary(caseKey, argumentOIs[i]), caseOI)) {
+            caseOIResolver.convertIfNecessary(caseKey, argumentOIs[i], false), caseOI)) {
         Object caseValue = arguments[i + 1].get();
         return returnOIResolver.convertIfNecessary(caseValue, argumentOIs[i + 1]);
       }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIn.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIn.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIn.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIn.java Sat Mar 28 14:03:43 2015
@@ -181,7 +181,7 @@ public class GenericUDFIn extends Generi
             conversionHelper.convertIfNecessary(
                 arguments[0].get(), argumentOIs[0]), compareOI,
             conversionHelper.convertIfNecessary(
-                arguments[i].get(), argumentOIs[i]), compareOI) == 0) {
+                arguments[i].get(), argumentOIs[i], false), compareOI) == 0) {
           bw.set(true);
           return bw;
         }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNamedStruct.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNamedStruct.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNamedStruct.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNamedStruct.java Sat Mar 28 14:03:43 2015
@@ -19,16 +19,15 @@
 package org.apache.hadoop.hive.ql.udf.generic;
 
 import java.util.ArrayList;
-import java.util.Arrays;
 
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 
 @Description(name = "named_struct",
@@ -51,12 +50,12 @@ public class GenericUDFNamedStruct exten
     ArrayList<String> fname = new ArrayList<String>(numFields / 2);
     ArrayList<ObjectInspector> retOIs = new ArrayList<ObjectInspector>(numFields / 2);
     for (int f = 0; f < numFields; f+=2) {
-      if (!(arguments[f] instanceof WritableConstantStringObjectInspector)) {
+      if (!(arguments[f] instanceof ConstantObjectInspector)) {
         throw new UDFArgumentTypeException(f, "Even arguments" +
             " to NAMED_STRUCT must be a constant STRING." + arguments[f].toString());
       }
-      WritableConstantStringObjectInspector constantOI =
-        (WritableConstantStringObjectInspector)arguments[f];
+      ConstantObjectInspector constantOI =
+        (ConstantObjectInspector)arguments[f];
       fname.add(constantOI.getWritableConstantValue().toString());
       retOIs.add(arguments[f + 1]);
     }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java Sat Mar 28 14:03:43 2015
@@ -55,7 +55,13 @@ import org.apache.hadoop.hive.serde2.obj
   TimestampColEqualLongScalar.class, LongScalarEqualTimestampColumn.class,
   FilterTimestampColEqualLongScalar.class, FilterLongScalarEqualTimestampColumn.class,
   TimestampColEqualDoubleScalar.class, DoubleScalarEqualTimestampColumn.class,
-  FilterTimestampColEqualDoubleScalar.class, FilterDoubleScalarEqualTimestampColumn.class
+  FilterTimestampColEqualDoubleScalar.class, FilterDoubleScalarEqualTimestampColumn.class,
+  IntervalYearMonthScalarEqualIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarEqualIntervalYearMonthColumn.class,
+  IntervalYearMonthColEqualIntervalYearMonthScalar.class, FilterIntervalYearMonthColEqualIntervalYearMonthScalar.class,
+  IntervalDayTimeScalarEqualIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarEqualIntervalDayTimeColumn.class,
+  IntervalDayTimeColEqualIntervalDayTimeScalar.class, FilterIntervalDayTimeColEqualIntervalDayTimeScalar.class,
+  DateColEqualDateScalar.class,FilterDateColEqualDateScalar.class,
+  DateScalarEqualDateColumn.class,FilterDateScalarEqualDateColumn.class,
   })
 public class GenericUDFOPEqual extends GenericUDFBaseCompare {
   public GenericUDFOPEqual(){

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java Sat Mar 28 14:03:43 2015
@@ -56,7 +56,13 @@ import org.apache.hadoop.io.Text;
   TimestampColGreaterEqualLongScalar.class, LongScalarGreaterEqualTimestampColumn.class,
   FilterTimestampColGreaterEqualLongScalar.class, FilterLongScalarGreaterEqualTimestampColumn.class,
   TimestampColGreaterEqualDoubleScalar.class, DoubleScalarGreaterEqualTimestampColumn.class,
-  FilterTimestampColGreaterEqualDoubleScalar.class, FilterDoubleScalarGreaterEqualTimestampColumn.class
+  FilterTimestampColGreaterEqualDoubleScalar.class, FilterDoubleScalarGreaterEqualTimestampColumn.class,
+  IntervalYearMonthScalarGreaterEqualIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarGreaterEqualIntervalYearMonthColumn.class,
+  IntervalYearMonthColGreaterEqualIntervalYearMonthScalar.class, FilterIntervalYearMonthColGreaterEqualIntervalYearMonthScalar.class,
+  IntervalDayTimeScalarGreaterEqualIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarGreaterEqualIntervalDayTimeColumn.class,
+  IntervalDayTimeColGreaterEqualIntervalDayTimeScalar.class, FilterIntervalDayTimeColGreaterEqualIntervalDayTimeScalar.class,
+  DateColGreaterEqualDateScalar.class,FilterDateColGreaterEqualDateScalar.class,
+  DateScalarGreaterEqualDateColumn.class,FilterDateScalarGreaterEqualDateColumn.class,
   })
 public class GenericUDFOPEqualOrGreaterThan extends GenericUDFBaseCompare {
   public GenericUDFOPEqualOrGreaterThan(){

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java Sat Mar 28 14:03:43 2015
@@ -56,7 +56,13 @@ import org.apache.hadoop.io.Text;
   TimestampColLessEqualLongScalar.class, LongScalarLessEqualTimestampColumn.class,
   FilterTimestampColLessEqualLongScalar.class, FilterLongScalarLessEqualTimestampColumn.class,
   TimestampColLessEqualDoubleScalar.class, DoubleScalarLessEqualTimestampColumn.class,
-  FilterTimestampColLessEqualDoubleScalar.class, FilterDoubleScalarLessEqualTimestampColumn.class
+  FilterTimestampColLessEqualDoubleScalar.class, FilterDoubleScalarLessEqualTimestampColumn.class,
+  IntervalYearMonthScalarLessEqualIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarLessEqualIntervalYearMonthColumn.class,
+  IntervalYearMonthColLessEqualIntervalYearMonthScalar.class, FilterIntervalYearMonthColLessEqualIntervalYearMonthScalar.class,
+  IntervalDayTimeScalarLessEqualIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarLessEqualIntervalDayTimeColumn.class,
+  IntervalDayTimeColLessEqualIntervalDayTimeScalar.class, FilterIntervalDayTimeColLessEqualIntervalDayTimeScalar.class,
+  DateColLessEqualDateScalar.class,FilterDateColLessEqualDateScalar.class,
+  DateScalarLessEqualDateColumn.class,FilterDateScalarLessEqualDateColumn.class,
   })
 public class GenericUDFOPEqualOrLessThan extends GenericUDFBaseCompare {
   public GenericUDFOPEqualOrLessThan(){

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java Sat Mar 28 14:03:43 2015
@@ -56,7 +56,13 @@ import org.apache.hadoop.io.Text;
   TimestampColGreaterLongScalar.class, LongScalarGreaterTimestampColumn.class,
   FilterTimestampColGreaterLongScalar.class, FilterLongScalarGreaterTimestampColumn.class,
   TimestampColGreaterDoubleScalar.class, DoubleScalarGreaterTimestampColumn.class,
-  FilterTimestampColGreaterDoubleScalar.class, FilterDoubleScalarGreaterTimestampColumn.class
+  FilterTimestampColGreaterDoubleScalar.class, FilterDoubleScalarGreaterTimestampColumn.class,
+  IntervalYearMonthScalarGreaterIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarGreaterIntervalYearMonthColumn.class,
+  IntervalYearMonthColGreaterIntervalYearMonthScalar.class, FilterIntervalYearMonthColGreaterIntervalYearMonthScalar.class,
+  IntervalDayTimeScalarGreaterIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarGreaterIntervalDayTimeColumn.class,
+  IntervalDayTimeColGreaterIntervalDayTimeScalar.class, FilterIntervalDayTimeColGreaterIntervalDayTimeScalar.class,
+  DateColGreaterDateScalar.class,FilterDateColGreaterDateScalar.class,
+  DateScalarGreaterDateColumn.class,FilterDateScalarGreaterDateColumn.class,
   })
 public class GenericUDFOPGreaterThan extends GenericUDFBaseCompare {
   public GenericUDFOPGreaterThan(){

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java Sat Mar 28 14:03:43 2015
@@ -56,7 +56,13 @@ import org.apache.hadoop.io.Text;
     TimestampColLessLongScalar.class, LongScalarLessTimestampColumn.class,
     FilterTimestampColLessLongScalar.class, FilterLongScalarLessTimestampColumn.class,
     TimestampColLessDoubleScalar.class, DoubleScalarLessTimestampColumn.class,
-    FilterTimestampColLessDoubleScalar.class, FilterDoubleScalarLessTimestampColumn.class
+    FilterTimestampColLessDoubleScalar.class, FilterDoubleScalarLessTimestampColumn.class,
+    IntervalYearMonthScalarLessIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarLessIntervalYearMonthColumn.class,
+    IntervalYearMonthColLessIntervalYearMonthScalar.class, FilterIntervalYearMonthColLessIntervalYearMonthScalar.class,
+    IntervalDayTimeScalarLessIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarLessIntervalDayTimeColumn.class,
+    IntervalDayTimeColLessIntervalDayTimeScalar.class, FilterIntervalDayTimeColLessIntervalDayTimeScalar.class,
+    DateColLessDateScalar.class,FilterDateColLessDateScalar.class,
+    DateScalarLessDateColumn.class,FilterDateScalarLessDateColumn.class,
     })
 public class GenericUDFOPLessThan extends GenericUDFBaseCompare {
   public GenericUDFOPLessThan(){

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java Sat Mar 28 14:03:43 2015
@@ -31,7 +31,38 @@ import org.apache.hadoop.hive.ql.exec.ve
   LongScalarSubtractLongColumn.class, LongScalarSubtractDoubleColumn.class,
   DoubleScalarSubtractLongColumn.class, DoubleScalarSubtractDoubleColumn.class,
   DecimalColSubtractDecimalColumn.class, DecimalColSubtractDecimalScalar.class,
-  DecimalScalarSubtractDecimalColumn.class})
+  DecimalScalarSubtractDecimalColumn.class,
+  IntervalYearMonthColSubtractIntervalYearMonthColumn.class,
+  IntervalYearMonthColSubtractIntervalYearMonthScalar.class,
+  IntervalYearMonthScalarSubtractIntervalYearMonthColumn.class,
+  IntervalDayTimeColSubtractIntervalDayTimeColumn.class,
+  IntervalDayTimeColSubtractIntervalDayTimeScalar.class,
+  IntervalDayTimeScalarSubtractIntervalDayTimeColumn.class,
+  TimestampColSubtractIntervalDayTimeColumn.class,
+  TimestampColSubtractIntervalDayTimeScalar.class,
+  TimestampScalarSubtractIntervalDayTimeColumn.class,
+  TimestampColSubtractTimestampColumn.class,
+  TimestampColSubtractTimestampScalar.class,
+  TimestampScalarSubtractTimestampColumn.class,
+  DateColSubtractDateColumn.class,
+  DateColSubtractDateScalar.class,
+  DateScalarSubtractDateColumn.class,
+  DateColSubtractTimestampColumn.class,
+  DateColSubtractTimestampScalar.class,
+  DateScalarSubtractTimestampColumn.class,
+  TimestampColSubtractDateColumn.class,
+  TimestampColSubtractDateScalar.class,
+  TimestampScalarSubtractDateColumn.class,
+  DateColSubtractIntervalDayTimeColumn.class,
+  DateColSubtractIntervalDayTimeScalar.class,
+  DateScalarSubtractIntervalDayTimeColumn.class,
+  DateColSubtractIntervalYearMonthColumn.class,
+  DateScalarSubtractIntervalYearMonthColumn.class,
+  DateColSubtractIntervalYearMonthScalar.class,
+  TimestampColSubtractIntervalYearMonthColumn.class,
+  TimestampScalarSubtractIntervalYearMonthColumn.class,
+  TimestampColSubtractIntervalYearMonthScalar.class,
+})
 public class GenericUDFOPMinus extends GenericUDFBaseArithmetic {
 
   public GenericUDFOPMinus() {
@@ -46,7 +77,6 @@ public class GenericUDFOPMinus extends G
 
   @Override
   protected GenericUDF instantiateDTIUDF() {
-    // TODO: implement date-time/interval version of UDF
-    return new GenericUDFOPNumericMinus();
+    return new GenericUDFOPDTIMinus();
   }
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java Sat Mar 28 14:03:43 2015
@@ -19,6 +19,8 @@
 package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColUnaryMinus;
@@ -28,6 +30,8 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -81,6 +85,16 @@ public class GenericUDFOPNegative extend
       HiveDecimal dec = ((HiveDecimalWritable)input).getHiveDecimal();
       decimalWritable.set(dec.negate());
       return decimalWritable;
+    case INTERVAL_YEAR_MONTH:
+      HiveIntervalYearMonth intervalYearMonth =
+          ((HiveIntervalYearMonthWritable) input).getHiveIntervalYearMonth();
+      this.intervalYearMonthWritable.set(intervalYearMonth.negate());
+      return this.intervalYearMonthWritable;
+    case INTERVAL_DAY_TIME:
+      HiveIntervalDayTime intervalDayTime =
+          ((HiveIntervalDayTimeWritable) input).getHiveIntervalDayTime();
+      this.intervalDayTimeWritable.set(intervalDayTime.negate());
+      return intervalDayTimeWritable;
     default:
       // Should never happen.
       throw new RuntimeException("Unexpected type in evaluating " + opName + ": " +

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java Sat Mar 28 14:03:43 2015
@@ -55,7 +55,13 @@ import org.apache.hadoop.hive.serde2.obj
   TimestampColNotEqualLongScalar.class, LongScalarNotEqualTimestampColumn.class,
   FilterTimestampColNotEqualLongScalar.class, FilterLongScalarNotEqualTimestampColumn.class,
   TimestampColNotEqualDoubleScalar.class, DoubleScalarNotEqualTimestampColumn.class,
-  FilterTimestampColNotEqualDoubleScalar.class, FilterDoubleScalarNotEqualTimestampColumn.class
+  FilterTimestampColNotEqualDoubleScalar.class, FilterDoubleScalarNotEqualTimestampColumn.class,
+  IntervalYearMonthScalarNotEqualIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarNotEqualIntervalYearMonthColumn.class,
+  IntervalYearMonthColNotEqualIntervalYearMonthScalar.class, FilterIntervalYearMonthColNotEqualIntervalYearMonthScalar.class,
+  IntervalDayTimeScalarNotEqualIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarNotEqualIntervalDayTimeColumn.class,
+  IntervalDayTimeColNotEqualIntervalDayTimeScalar.class, FilterIntervalDayTimeColNotEqualIntervalDayTimeScalar.class,
+  DateColNotEqualDateScalar.class,FilterDateColNotEqualDateScalar.class,
+  DateScalarNotEqualDateColumn.class,FilterDateScalarNotEqualDateColumn.class,
   })
 public class GenericUDFOPNotEqual extends GenericUDFBaseCompare {
   public GenericUDFOPNotEqual(){

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java Sat Mar 28 14:03:43 2015
@@ -37,7 +37,38 @@ import org.apache.hadoop.hive.ql.exec.ve
   LongColAddDoubleScalar.class, DoubleColAddLongScalar.class, DoubleColAddDoubleScalar.class,
   LongScalarAddLongColumn.class, LongScalarAddDoubleColumn.class, DoubleScalarAddLongColumn.class,
   DoubleScalarAddDoubleColumn.class, DecimalScalarAddDecimalColumn.class, DecimalColAddDecimalColumn.class,
-  DecimalColAddDecimalScalar.class})
+  DecimalColAddDecimalScalar.class,
+  IntervalYearMonthColAddIntervalYearMonthColumn.class,
+  IntervalYearMonthColAddIntervalYearMonthScalar.class,
+  IntervalYearMonthScalarAddIntervalYearMonthColumn.class,
+  IntervalDayTimeColAddIntervalDayTimeColumn.class,
+  IntervalDayTimeColAddIntervalDayTimeScalar.class,
+  IntervalDayTimeScalarAddIntervalDayTimeColumn.class,
+  IntervalDayTimeColAddTimestampColumn.class,
+  IntervalDayTimeColAddTimestampScalar.class,
+  IntervalDayTimeScalarAddTimestampColumn.class,
+  TimestampColAddIntervalDayTimeColumn.class,
+  TimestampColAddIntervalDayTimeScalar.class,
+  TimestampScalarAddIntervalDayTimeColumn.class,
+  DateColAddIntervalDayTimeColumn.class,
+  DateColAddIntervalDayTimeScalar.class,
+  DateScalarAddIntervalDayTimeColumn.class,
+  IntervalDayTimeColAddDateColumn.class,
+  IntervalDayTimeColAddDateScalar.class,
+  IntervalDayTimeScalarAddDateColumn.class,
+  IntervalYearMonthColAddDateColumn.class,
+  IntervalYearMonthColAddDateScalar.class,
+  IntervalYearMonthScalarAddDateColumn.class,
+  IntervalYearMonthColAddTimestampColumn.class,
+  IntervalYearMonthColAddTimestampScalar.class,
+  IntervalYearMonthScalarAddTimestampColumn.class,
+  DateColAddIntervalYearMonthColumn.class,
+  DateScalarAddIntervalYearMonthColumn.class,
+  DateColAddIntervalYearMonthScalar.class,
+  TimestampColAddIntervalYearMonthColumn.class,
+  TimestampScalarAddIntervalYearMonthColumn.class,
+  TimestampColAddIntervalYearMonthScalar.class
+})
 public class GenericUDFOPPlus extends GenericUDFBaseArithmetic {
 
   public GenericUDFOPPlus() {
@@ -52,7 +83,6 @@ public class GenericUDFOPPlus extends Ge
 
   @Override
   protected GenericUDF instantiateDTIUDF() {
-    // TODO: implement date-time/interval version of UDF
-    return new GenericUDFOPNumericPlus();
+    return new GenericUDFOPDTIPlus();
   }
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java Sat Mar 28 14:03:43 2015
@@ -198,11 +198,17 @@ public final class GenericUDFUtils {
       return returnObjectInspector != null ? returnObjectInspector : defaultOI;
     }
 
+    public Object convertIfNecessary(Object o, ObjectInspector oi) {
+      return convertIfNecessary(o, oi, true);
+    }
+
     /**
      * Convert the return Object if necessary (when the ObjectInspectors of
-     * different possibilities are not all the same).
+     * different possibilities are not all the same). If reuse is true, 
+     * the result Object will be the same object as the last invocation 
+     * (as long as the oi is the same)
      */
-    public Object convertIfNecessary(Object o, ObjectInspector oi) {
+    public Object convertIfNecessary(Object o, ObjectInspector oi, boolean reuse) {
       Object converted = null;
       if (oi == returnObjectInspector) {
         converted = o;
@@ -212,15 +218,20 @@ public final class GenericUDFUtils {
           return null;
         }
 
-        if (converters == null) {
-          converters = new HashMap<ObjectInspector, Converter>();
+        Converter converter = null;
+        if (reuse) {
+	  if (converters == null) {
+	    converters = new HashMap<ObjectInspector, Converter>();
+	  }
+	  converter = converters.get(oi);
         }
 
-        Converter converter = converters.get(oi);
         if (converter == null) {
           converter = ObjectInspectorConverters.getConverter(oi,
               returnObjectInspector);
-          converters.put(oi, converter);
+          if (reuse) {
+            converters.put(oi, converter);
+          }
         }
         converted = converter.convert(o);
       }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFStack.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFStack.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFStack.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFStack.java Sat Mar 28 14:03:43 2015
@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.exec.De
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils.ReturnObjectInspectorResolver;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
@@ -60,13 +61,13 @@ public class GenericUDTFStack extends Ge
     if (args.length < 2)  {
       throw new UDFArgumentException("STACK() expects at least two arguments.");
     }
-    if (!(args[0] instanceof WritableConstantIntObjectInspector)) {
+    if (!(args[0] instanceof ConstantObjectInspector)) {
       throw new UDFArgumentException(
           "The first argument to STACK() must be a constant integer (got " +
           args[0].getTypeName() + " instead).");
     }
-    numRows =
-        ((WritableConstantIntObjectInspector)args[0]).getWritableConstantValue();
+    numRows = (IntWritable)
+        ((ConstantObjectInspector)args[0]).getWritableConstantValue();
 
     if (numRows == null || numRows.get() < 1) {
       throw new UDFArgumentException(

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java Sat Mar 28 14:03:43 2015
@@ -208,7 +208,7 @@ public class NGramEstimator {
    *
    * @param other A serialized n-gram object created by the serialize() method
    */
-  public void merge(List<Text> other) throws HiveException {
+  public void merge(List other) throws HiveException {
     if(other == null) {
       return;
     }
@@ -240,8 +240,7 @@ public class NGramEstimator {
     for(int i = 3; i < other.size(); i++) {
       ArrayList<String> key = new ArrayList<String>();
       for(int j = 0; j < n; j++) {
-        Text word = other.get(i+j);
-        key.add(word.toString());
+        key.add(other.get(i+j).toString());
       }
       i += n;
       double val = Double.parseDouble( other.get(i).toString() );

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumericHistogram.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumericHistogram.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumericHistogram.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumericHistogram.java Sat Mar 28 14:03:43 2015
@@ -21,7 +21,9 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Collections;
 import java.util.Random;
+
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 
 
 /**
@@ -115,7 +117,7 @@ public class NumericHistogram {
    * @param other A serialized histogram created by the serialize() method
    * @see #merge
    */
-  public void merge(List<DoubleWritable> other) {
+  public void merge(List other, DoubleObjectInspector doi) {
     if(other == null) {
       return;
     }
@@ -123,13 +125,13 @@ public class NumericHistogram {
     if(nbins == 0 || nusedbins == 0)  {
       // Our aggregation buffer has nothing in it, so just copy over 'other'
       // by deserializing the ArrayList of (x,y) pairs into an array of Coord objects
-      nbins = (int) other.get(0).get();
+      nbins = (int) doi.get(other.get(0));
       nusedbins = (other.size()-1)/2;
       bins = new ArrayList<Coord>(nusedbins);
       for (int i = 1; i < other.size(); i+=2) {
         Coord bin = new Coord();
-        bin.x = other.get(i).get();
-        bin.y = other.get(i+1).get();
+        bin.x = doi.get(other.get(i));
+        bin.y = doi.get(other.get(i+1));
         bins.add(bin);
       }
     } else {
@@ -146,8 +148,8 @@ public class NumericHistogram {
       }
       for (int j = 1; j < other.size(); j += 2) {
         Coord bin = new Coord();
-        bin.x = other.get(j).get();
-        bin.y = other.get(j+1).get();
+        bin.x = doi.get(other.get(j));
+        bin.y = doi.get(other.get(j+1));
         tmp_bins.add(bin);
       }
       Collections.sort(tmp_bins);

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java Sat Mar 28 14:03:43 2015
@@ -1190,7 +1190,7 @@ public class WindowingTableFunction exte
           (PrimitiveObjectInspector) expressionDef.getOI());
       String s2 = PrimitiveObjectInspectorUtils.getString(v2,
           (PrimitiveObjectInspector) expressionDef.getOI());
-      return (s1 == null && s2 == null) || s1.equals(s2);
+      return (s1 == null && s2 == null) || (s1 != null && s1.equals(s2));
     }
   }
 

Modified: hive/branches/spark/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto (original)
+++ hive/branches/spark/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto Sat Mar 28 14:03:43 2015
@@ -129,6 +129,7 @@ message ColumnEncoding {
 message StripeFooter {
   repeated Stream streams = 1;
   repeated ColumnEncoding columns = 2;
+  optional string writerTimezone = 3;
 }
 
 message Type {

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java Sat Mar 28 14:03:43 2015
@@ -319,7 +319,7 @@ public class TestFileSinkOperator {
   }
 
   private void processRows(FileSinkOperator op) throws HiveException {
-    for (TFSORow r : rows) op.processOp(r, 0);
+    for (TFSORow r : rows) op.process(r, 0);
     op.jobCloseOp(jc, true);
     op.close(false);
   }

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Sat Mar 28 14:03:43 2015
@@ -236,7 +236,7 @@ public class TestOperators extends TestC
 
       // evaluate on row
       for (int i = 0; i < 5; i++) {
-        op.processOp(r[i].o, 0);
+        op.process(r[i].o, 0);
       }
       op.close(false);
 

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestBytesBytesMultiHashMap.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestBytesBytesMultiHashMap.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestBytesBytesMultiHashMap.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestBytesBytesMultiHashMap.java Sat Mar 28 14:03:43 2015
@@ -49,10 +49,10 @@ public class TestBytesBytesMultiHashMap
   public void testPutGetOne() throws Exception {
     BytesBytesMultiHashMap map = new BytesBytesMultiHashMap(CAPACITY, LOAD_FACTOR, WB_SIZE);
     RandomKvSource kv = new RandomKvSource(0, 0);
-    map.put(kv);
+    map.put(kv, -1);
     verifyResults(map, kv.getLastKey(), kv.getLastValue());
     kv = new RandomKvSource(10, 100);
-    map.put(kv);
+    map.put(kv, -1);
     verifyResults(map, kv.getLastKey(), kv.getLastValue());
   }
 
@@ -60,12 +60,12 @@ public class TestBytesBytesMultiHashMap
   public void testPutGetMultiple() throws Exception {
     BytesBytesMultiHashMap map = new BytesBytesMultiHashMap(CAPACITY, LOAD_FACTOR, WB_SIZE);
     RandomKvSource kv = new RandomKvSource(0, 100);
-    map.put(kv);
+    map.put(kv, -1);
     verifyResults(map, kv.getLastKey(), kv.getLastValue());
     FixedKeyKvSource kv2 = new FixedKeyKvSource(kv.getLastKey(), 0, 100);
     kv2.values.add(kv.getLastValue());
     for (int i = 0; i < 3; ++i) {
-      map.put(kv2);
+      map.put(kv2, -1);
       verifyResults(map, kv2.key, kv2.values.toArray(new byte[kv2.values.size()][]));
     }
   }
@@ -74,11 +74,11 @@ public class TestBytesBytesMultiHashMap
   public void testGetNonExistent() throws Exception {
     BytesBytesMultiHashMap map = new BytesBytesMultiHashMap(CAPACITY, LOAD_FACTOR, WB_SIZE);
     RandomKvSource kv = new RandomKvSource(1, 100);
-    map.put(kv);
+    map.put(kv, -1);
     byte[] key = kv.getLastKey();
     key[0] = (byte)(key[0] + 1);
     FixedKeyKvSource kv2 = new FixedKeyKvSource(kv.getLastKey(), 0, 100);
-    map.put(kv2);
+    map.put(kv2, -1);
     key[0] = (byte)(key[0] + 1);
     List<WriteBuffers.ByteSegmentRef> results = new ArrayList<WriteBuffers.ByteSegmentRef>(0);
     map.getValueRefs(key, key.length, results);
@@ -93,7 +93,7 @@ public class TestBytesBytesMultiHashMap
     BytesBytesMultiHashMap map = new BytesBytesMultiHashMap(CAPACITY, 1f, WB_SIZE);
     UniqueKeysKvSource kv = new UniqueKeysKvSource();
     for (int i = 0; i < CAPACITY; ++i) {
-      map.put(kv);
+      map.put(kv, -1);
     }
     for (int i = 0; i < kv.keys.size(); ++i) {
       verifyResults(map, kv.keys.get(i), kv.values.get(i));
@@ -111,7 +111,7 @@ public class TestBytesBytesMultiHashMap
     BytesBytesMultiHashMap map = new BytesBytesMultiHashMap(1, 0.0000001f, WB_SIZE);
     UniqueKeysKvSource kv = new UniqueKeysKvSource();
     for (int i = 0; i < 18; ++i) {
-      map.put(kv);
+      map.put(kv, -1);
       for (int j = 0; j <= i; ++j) {
         verifyResults(map, kv.keys.get(j), kv.values.get(j));
       }
@@ -165,7 +165,7 @@ public class TestBytesBytesMultiHashMap
 
     @Override
     public void writeKey(RandomAccessOutput dest) throws SerDeException {
-      lastKey += 465623573; // This number is certified to be random.
+      lastKey += 465623573;
       int len = LazyBinaryUtils.writeVLongToByteArray(buffer, lastKey);
       lastBuffer = Arrays.copyOf(buffer, len);
       keys.add(lastBuffer);

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java Sat Mar 28 14:03:43 2015
@@ -19,12 +19,11 @@
 package org.apache.hadoop.hive.ql.exec.vector;
 
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 
 import junit.framework.Assert;
 
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.FilterExprAndExpr;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColEqualDoubleScalar;
@@ -39,6 +38,8 @@ import org.junit.Test;
  */
 public class TestVectorFilterOperator {
 
+  HiveConf hconf = new HiveConf();
+
   /**
    * Fundamental logic and performance tests for vector filters belong here.
    *
@@ -96,6 +97,7 @@ public class TestVectorFilterOperator {
   @Test
   public void testBasicFilterOperator() throws HiveException {
     VectorFilterOperator vfo = getAVectorFilterOperator();
+    vfo.initialize(hconf, null);
     VectorExpression ve1 = new FilterLongColGreaterLongColumn(0,1);
     VectorExpression ve2 = new FilterLongColEqualDoubleScalar(2, 0);
     VectorExpression ve3 = new FilterExprAndExpr();
@@ -124,6 +126,7 @@ public class TestVectorFilterOperator {
   @Test
   public void testBasicFilterLargeData() throws HiveException {
     VectorFilterOperator vfo = getAVectorFilterOperator();
+    vfo.initialize(hconf, null);
     VectorExpression ve1 = new FilterLongColGreaterLongColumn(0,1);
     VectorExpression ve2 = new FilterLongColEqualDoubleScalar(2, 0);
     VectorExpression ve3 = new FilterExprAndExpr();
@@ -136,7 +139,7 @@ public class TestVectorFilterOperator {
     VectorizedRowBatch vrg = fdr.getNext();
 
     while (vrg.size > 0) {
-      vfo.processOp(vrg, 0);
+      vfo.process(vrg, 0);
       vrg = fdr.getNext();
     }
     long endTime = System.currentTimeMillis();

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java Sat Mar 28 14:03:43 2015
@@ -26,8 +26,6 @@ import static org.junit.Assert.assertTru
 import java.lang.management.ManagementFactory;
 import java.lang.management.MemoryMXBean;
 import java.lang.reflect.Constructor;
-import java.math.BigDecimal;
-import java.math.BigInteger;
 import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -39,6 +37,7 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.vector.util.FakeCaptureOutputOperator;
 import org.apache.hadoop.hive.ql.exec.vector.util.FakeVectorRowBatchFromConcat;
 import org.apache.hadoop.hive.ql.exec.vector.util.FakeVectorRowBatchFromLongIterables;
@@ -70,6 +69,8 @@ import org.junit.Test;
  */
 public class TestVectorGroupByOperator {
 
+  HiveConf hconf = new HiveConf();
+
   private static ExprNodeDesc buildColumnDesc(
       VectorizationContext ctx,
       String column,
@@ -188,7 +189,7 @@ public class TestVectorGroupByOperator {
     VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
 
     FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
-    vgo.initialize(null, null);
+    vgo.initialize(hconf, null);
 
     this.outputRowCount = 0;
     out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {
@@ -233,7 +234,7 @@ public class TestVectorGroupByOperator {
     long countRowsProduced = 0;
     for (VectorizedRowBatch unit: data) {
       countRowsProduced += 100;
-      vgo.processOp(unit,  0);
+      vgo.process(unit,  0);
       if (0 < outputRowCount) {
         break;
       }
@@ -529,7 +530,7 @@ public class TestVectorGroupByOperator {
             new String[] {"int", "bigint"},
             Arrays.asList(new Object[]{  1,null, 1, null}),
             Arrays.asList(new Object[]{13L,null,7L, 19L})),
-        buildHashMap((int)1, 20L, null, 19L));
+        buildHashMap(1, 20L, null, 19L));
   }
 
   @Test
@@ -541,7 +542,7 @@ public class TestVectorGroupByOperator {
             new String[] {"bigint", "bigint"},
             Arrays.asList(new Object[]{  1,null, 1, null}),
             Arrays.asList(new Object[]{13L,null,7L, 19L})),
-        buildHashMap((long)1L, 20L, null, 19L));
+        buildHashMap(1L, 20L, null, 19L));
   }
 
   @Test
@@ -589,7 +590,7 @@ public class TestVectorGroupByOperator {
             new String[] {"double", "bigint"},
             Arrays.asList(new Object[]{  1,null, 1, null}),
             Arrays.asList(new Object[]{13L,null,7L, 19L})),
-        buildHashMap((double)1.0, 20L, null, 19L));
+        buildHashMap(1.0, 20L, null, 19L));
   }
 
   @Test
@@ -794,7 +795,7 @@ public class TestVectorGroupByOperator {
                 HiveDecimal.create(5),
                 HiveDecimal.create(7),
                 HiveDecimal.create(19)}),
-        (double) Math.sqrt(30));
+        Math.sqrt(30));
   }
 
   @Test
@@ -808,7 +809,7 @@ public class TestVectorGroupByOperator {
                 HiveDecimal.create(5),
                 HiveDecimal.create(7),
                 HiveDecimal.create(19)}),
-        (double) Math.sqrt(40));
+        Math.sqrt(40));
   }
 
   @Test
@@ -1546,7 +1547,7 @@ public class TestVectorGroupByOperator {
         "variance",
         2,
         Arrays.asList(new Long[]{97L}),
-        (double)0.0);
+        0.0);
   }
 
   @Test
@@ -1565,12 +1566,12 @@ public class TestVectorGroupByOperator {
         "variance",
         2,
         Arrays.asList(new Long[]{null,13L, 5L,7L,19L}),
-        (double) 30.0);
+        30.0);
     testAggregateLongAggregate(
         "variance",
         2,
         Arrays.asList(new Long[]{13L,null,5L, 7L,19L}),
-        (double) 30.0);
+        30.0);
     testAggregateLongAggregate(
         "variance",
         2,
@@ -1633,7 +1634,7 @@ public class TestVectorGroupByOperator {
         "std",
         2,
         Arrays.asList(new Long[]{13L,5L,7L,19L}),
-        (double) Math.sqrt(30));
+        Math.sqrt(30));
   }
 
   @Test
@@ -1673,7 +1674,7 @@ public class TestVectorGroupByOperator {
         "stddev_samp",
         2,
         Arrays.asList(new Long[]{13L,5L,7L,19L}),
-        (double) Math.sqrt(40));
+        Math.sqrt(40));
   }
 
   @Test
@@ -1731,7 +1732,7 @@ public class TestVectorGroupByOperator {
     VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
 
     FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
-    vgo.initialize(null, null);
+    vgo.initialize(hconf, null);
     out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {
 
       private int rowIndex;
@@ -1801,7 +1802,7 @@ public class TestVectorGroupByOperator {
     }.init(aggregateName, expected, keys));
 
     for (VectorizedRowBatch unit: data) {
-      vgo.processOp(unit,  0);
+      vgo.process(unit,  0);
     }
     vgo.close(false);
 
@@ -1845,7 +1846,7 @@ public class TestVectorGroupByOperator {
     VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
 
     FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
-    vgo.initialize(null, null);
+    vgo.initialize(hconf, null);
     out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {
 
       private int rowIndex;
@@ -1914,7 +1915,7 @@ public class TestVectorGroupByOperator {
     }.init(aggregateName, expected, keys));
 
     for (VectorizedRowBatch unit: data) {
-      vgo.processOp(unit,  0);
+      vgo.process(unit,  0);
     }
     vgo.close(false);
 
@@ -2079,18 +2080,18 @@ public class TestVectorGroupByOperator {
         assertEquals(key, null, arr[0]);
       } else if (arr[0] instanceof LongWritable) {
         LongWritable lw = (LongWritable) arr[0];
-        assertEquals(key, (Long) expected, (Long) lw.get());
+        assertEquals(key, expected, lw.get());
       } else if (arr[0] instanceof Text) {
         Text tx = (Text) arr[0];
         String sbw = tx.toString();
-        assertEquals(key, (String) expected, sbw);
+        assertEquals(key, expected, sbw);
       } else if (arr[0] instanceof DoubleWritable) {
         DoubleWritable dw = (DoubleWritable) arr[0];
-        assertEquals (key, (Double) expected, (Double) dw.get());
+        assertEquals (key, expected, dw.get());
       } else if (arr[0] instanceof Double) {
-        assertEquals (key, (Double) expected, (Double) arr[0]);
+        assertEquals (key, expected, arr[0]);
       } else if (arr[0] instanceof Long) {
-        assertEquals (key, (Long) expected, (Long) arr[0]);
+        assertEquals (key, expected, arr[0]);
       } else if (arr[0] instanceof HiveDecimalWritable) {
         HiveDecimalWritable hdw = (HiveDecimalWritable) arr[0];
         HiveDecimal hd = hdw.getHiveDecimal();
@@ -2126,10 +2127,10 @@ public class TestVectorGroupByOperator {
 
         if (vals[1] instanceof DoubleWritable) {
           DoubleWritable dw = (DoubleWritable) vals[1];
-          assertEquals (key, (Double) expected, (Double) (dw.get() / lw.get()));
+          assertEquals (key, expected, dw.get() / lw.get());
         } else if (vals[1] instanceof HiveDecimalWritable) {
           HiveDecimalWritable hdw = (HiveDecimalWritable) vals[1];
-          assertEquals (key, (HiveDecimal) expected, hdw.getHiveDecimal().divide(HiveDecimal.create(lw.get())));
+          assertEquals (key, expected, hdw.getHiveDecimal().divide(HiveDecimal.create(lw.get())));
         }
       }
     }
@@ -2241,10 +2242,10 @@ public class TestVectorGroupByOperator {
     VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
 
     FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
-    vgo.initialize(null, null);
+    vgo.initialize(hconf, null);
 
     for (VectorizedRowBatch unit: data) {
-      vgo.processOp(unit,  0);
+      vgo.process(unit,  0);
     }
     vgo.close(false);
 
@@ -2272,10 +2273,10 @@ public class TestVectorGroupByOperator {
     VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
 
     FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
-    vgo.initialize(null, null);
+    vgo.initialize(hconf, null);
 
     for (VectorizedRowBatch unit: data) {
-      vgo.processOp(unit,  0);
+      vgo.process(unit,  0);
     }
     vgo.close(false);
 
@@ -2303,10 +2304,10 @@ public class TestVectorGroupByOperator {
     VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
 
     FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
-    vgo.initialize(null, null);
+    vgo.initialize(hconf, null);
 
     for (VectorizedRowBatch unit: data) {
-      vgo.processOp(unit,  0);
+      vgo.process(unit,  0);
     }
     vgo.close(false);
 
@@ -2321,35 +2322,34 @@ public class TestVectorGroupByOperator {
   }
 
   public void testAggregateDecimalIterable (
-          String aggregateName,
-          Iterable<VectorizedRowBatch> data,
-          Object expected) throws HiveException {
-          List<String> mapColumnNames = new ArrayList<String>();
-          mapColumnNames.add("A");
-          VectorizationContext ctx = new VectorizationContext(mapColumnNames);
-
-        GroupByDesc desc = buildGroupByDescType(ctx, aggregateName, "A",
-            TypeInfoFactory.getDecimalTypeInfo(30, 4));
+String aggregateName, Iterable<VectorizedRowBatch> data,
+      Object expected) throws HiveException {
+    List<String> mapColumnNames = new ArrayList<String>();
+    mapColumnNames.add("A");
+    VectorizationContext ctx = new VectorizationContext(mapColumnNames);
 
-        VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
+    GroupByDesc desc =
+        buildGroupByDescType(ctx, aggregateName, "A", TypeInfoFactory.getDecimalTypeInfo(30, 4));
 
-        FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
-        vgo.initialize(null, null);
+    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
 
-        for (VectorizedRowBatch unit: data) {
-          vgo.processOp(unit,  0);
-        }
-        vgo.close(false);
+    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
+    vgo.initialize(hconf, null);
 
-        List<Object> outBatchList = out.getCapturedRows();
-        assertNotNull(outBatchList);
-        assertEquals(1, outBatchList.size());
+    for (VectorizedRowBatch unit : data) {
+      vgo.process(unit, 0);
+    }
+    vgo.close(false);
 
-        Object result = outBatchList.get(0);
+    List<Object> outBatchList = out.getCapturedRows();
+    assertNotNull(outBatchList);
+    assertEquals(1, outBatchList.size());
 
-        Validator validator = getValidator(aggregateName);
-        validator.validate("_total", expected, result);
-      }
+    Object result = outBatchList.get(0);
+
+    Validator validator = getValidator(aggregateName);
+    validator.validate("_total", expected, result);
+  }
 
 
   public void testAggregateDoubleIterable (
@@ -2366,10 +2366,10 @@ public class TestVectorGroupByOperator {
     VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
 
     FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
-    vgo.initialize(null, null);
+    vgo.initialize(hconf, null);
 
     for (VectorizedRowBatch unit: data) {
-      vgo.processOp(unit,  0);
+      vgo.process(unit,  0);
     }
     vgo.close(false);
 
@@ -2399,7 +2399,7 @@ public class TestVectorGroupByOperator {
     vgo.initialize(null, null);
 
     for (VectorizedRowBatch unit: data) {
-      vgo.processOp(unit,  0);
+      vgo.process(unit,  0);
     }
     vgo.close(false);
 
@@ -2430,7 +2430,7 @@ public class TestVectorGroupByOperator {
     VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
 
     FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
-    vgo.initialize(null, null);
+    vgo.initialize(hconf, null);
     out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {
 
       private int rowIndex;
@@ -2469,7 +2469,7 @@ public class TestVectorGroupByOperator {
     }.init(aggregateName, expected, keys));
 
     for (VectorizedRowBatch unit: data) {
-      vgo.processOp(unit,  0);
+      vgo.process(unit,  0);
     }
     vgo.close(false);
 
@@ -2496,7 +2496,7 @@ public class TestVectorGroupByOperator {
     VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
 
     FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
-    vgo.initialize(null, null);
+    vgo.initialize(hconf, null);
     out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {
 
       private int rowIndex;
@@ -2536,7 +2536,7 @@ public class TestVectorGroupByOperator {
     }.init(aggregateName, expected, keys));
 
     for (VectorizedRowBatch unit: data) {
-      vgo.processOp(unit,  0);
+      vgo.process(unit,  0);
     }
     vgo.close(false);
 

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorLimitOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorLimitOperator.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorLimitOperator.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorLimitOperator.java Sat Mar 28 14:03:43 2015
@@ -67,7 +67,7 @@ public class TestVectorLimitOperator {
     lo.initialize(new Configuration(), null);
 
     // Process the batch
-    lo.processOp(vrb, 0);
+    lo.process(vrb, 0);
 
     // Verify batch size
     Assert.assertEquals(vrb.size, expectedBatchSize);

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java Sat Mar 28 14:03:43 2015
@@ -119,7 +119,7 @@ public class TestVectorSelectOperator {
     VectorizedRowBatch vrg = VectorizedRowGroupGenUtil.getVectorizedRowBatch(
         VectorizedRowBatch.DEFAULT_SIZE, 4, 17);
 
-    vso.processOp(vrg, 0);
+    vso.process(vrg, 0);
   }
 
 }

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java Sat Mar 28 14:03:43 2015
@@ -151,7 +151,7 @@ public class TestVectorizationContext {
     VectorUDFUnixTimeStampLong v1 = new VectorUDFUnixTimeStampLong();
     VectorExpressionDescriptor.Builder builder1 = new VectorExpressionDescriptor.Builder();
     VectorExpressionDescriptor.Descriptor d1 = builder1.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
-        .setNumArguments(1).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.INT_DATETIME_FAMILY)
+        .setNumArguments(1).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.INT_DATETIME_INTERVAL_FAMILY)
         .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
     assertTrue(d1.matches(v1.getDescriptor()));
 

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatchCtx.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatchCtx.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatchCtx.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizedRowBatchCtx.java Sat Mar 28 14:03:43 2015
@@ -21,12 +21,11 @@ package org.apache.hadoop.hive.ql.exec.v
 import java.io.File;
 import java.io.IOException;
 import java.sql.Timestamp;
+import java.util.Arrays;
 import java.util.Calendar;
 import java.util.List;
 import java.util.Properties;
 
-import junit.framework.Assert;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -58,6 +57,7 @@ import org.apache.hadoop.io.ObjectWritab
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.compress.DefaultCodec;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -100,7 +100,7 @@ public class TestVectorizedRowBatchCtx {
       serDe = new ColumnarSerDe();
       SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
     } catch (SerDeException e) {
-      new RuntimeException(e);
+      throw new RuntimeException(e);
     }
   }
 
@@ -292,7 +292,7 @@ public class TestVectorizedRowBatchCtx {
               BytesWritable batchBinary = (BytesWritable) bcv.getWritableObject(i);
               byte[] a = colBinary.getBytes();
               byte[] b = batchBinary.getBytes();
-              Assert.assertEquals(true, a.equals(b));
+              Assert.assertEquals(true, Arrays.equals(a, b));
           }
             break;
           case STRING: {
@@ -312,7 +312,7 @@ public class TestVectorizedRowBatchCtx {
           }
             break;
           default:
-            Assert.assertEquals("Unknown type", false);
+            Assert.assertTrue("Unknown type", false);
           }
         } else {
           Assert.assertEquals(true, batch.cols[j].isNull[i]);

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java Sat Mar 28 14:03:43 2015
@@ -46,9 +46,6 @@ public class TestVectorTypeCasts {
   // Number of nanoseconds in one second
   private static final long NANOS_PER_SECOND = 1000000000;
 
-  // Number of microseconds in one second
-  private static final long MICROS_PER_SECOND = 1000000;
-
   @Test
   public void testVectorCastLongToDouble() {
     VectorizedRowBatch b = TestVectorMathFunctions.getVectorizedRowBatchLongInDoubleOut();
@@ -111,8 +108,8 @@ public class TestVectorTypeCasts {
     b.cols[0].noNulls = true;
     VectorExpression expr = new CastLongToTimestampViaLongToLong(0, 1);
     expr.evaluate(b);
-    Assert.assertEquals(-2 * MICROS_PER_SECOND, resultV.vector[0]);
-    Assert.assertEquals(2 * MICROS_PER_SECOND, resultV.vector[1]);
+    Assert.assertEquals(-2 * NANOS_PER_SECOND, resultV.vector[0]);
+    Assert.assertEquals(2 * NANOS_PER_SECOND, resultV.vector[1]);
   }
 
   @Test

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeCaptureOutputOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeCaptureOutputOperator.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeCaptureOutputOperator.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeCaptureOutputOperator.java Sat Mar 28 14:03:43 2015
@@ -20,8 +20,9 @@ package org.apache.hadoop.hive.ql.exec.v
 
 import java.io.Serializable;
 import java.util.ArrayList;
-import java.util.Arrays;
+import java.util.Collection;
 import java.util.List;
+import java.util.concurrent.Future;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.exec.Operator;
@@ -36,17 +37,17 @@ import org.apache.hadoop.hive.ql.plan.ap
 public class FakeCaptureOutputOperator extends Operator<FakeCaptureOutputDesc>
   implements Serializable {
   private static final long serialVersionUID = 1L;
-  
+
   public interface OutputInspector {
     public void inspectRow(Object row, int tag) throws HiveException;
   }
-  
+
   private OutputInspector outputInspector;
-  
+
   public void setOutputInspector(OutputInspector outputInspector) {
     this.outputInspector = outputInspector;
   }
-  
+
   public OutputInspector getOutputInspector() {
     return outputInspector;
   }
@@ -67,18 +68,20 @@ public class FakeCaptureOutputOperator e
     return out;
   }
 
-  
+
   public List<Object> getCapturedRows() {
     return rows;
   }
 
   @Override
-  public void initializeOp(Configuration conf) throws HiveException {
+  public Collection<Future<?>> initializeOp(Configuration conf) throws HiveException {
+    Collection<Future<?>> result = super.initializeOp(conf);
     rows = new ArrayList<Object>();
+    return result;
   }
 
   @Override
-  public void processOp(Object row, int tag) throws HiveException {
+  public void process(Object row, int tag) throws HiveException {
     rows.add(row);
     if (null != outputInspector) {
       outputInspector.inspectRow(row, tag);

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorDataSourceOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorDataSourceOperator.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorDataSourceOperator.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorDataSourceOperator.java Sat Mar 28 14:03:43 2015
@@ -20,7 +20,9 @@ package org.apache.hadoop.hive.ql.exec.v
 
 import java.io.Serializable;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.List;
+import java.util.concurrent.Future;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.exec.Operator;
@@ -60,11 +62,12 @@ public class FakeVectorDataSourceOperato
   }
 
   @Override
-  public void initializeOp(Configuration conf) throws HiveException {
+  public Collection<Future<?>> initializeOp(Configuration conf) throws HiveException {
+    return super.initializeOp(conf);
   }
 
   @Override
-  public void processOp(Object row, int tag) throws HiveException {
+  public void process(Object row, int tag) throws HiveException {
     for (VectorizedRowBatch unit: source) {
       forward(unit, null);
     }

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java Sat Mar 28 14:03:43 2015
@@ -49,6 +49,23 @@ public class TestHooks {
   }
 
   @Test
+  public void testRedactLogString() throws Exception {
+    HiveConf conf = new HiveConf(TestHooks.class);
+    String str;
+
+    HiveConf.setVar(conf, HiveConf.ConfVars.QUERYREDACTORHOOKS, SimpleQueryRedactor.class.getName());
+
+    str = HookUtils.redactLogString(null, null);
+    assertEquals(str, null);
+
+    str = HookUtils.redactLogString(conf, null);
+    assertEquals(str, null);
+
+    str = HookUtils.redactLogString(conf, "select 'XXX' from t1");
+    assertEquals(str, "select 'AAA' from t1");
+  }
+
+  @Test
   public void testQueryRedactor() throws Exception {
     HiveConf conf = new HiveConf(TestHooks.class);
     HiveConf.setVar(conf, HiveConf.ConfVars.QUERYREDACTORHOOKS,

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java Sat Mar 28 14:03:43 2015
@@ -27,7 +27,6 @@ import java.io.File;
 import java.io.FileOutputStream;
 import java.io.FileReader;
 import java.io.PrintStream;
-import java.math.BigDecimal;
 import java.sql.Date;
 import java.sql.Timestamp;
 import java.util.Arrays;
@@ -203,7 +202,7 @@ public class TestFileDump {
         10000L,
         4.0f,
         20.0,
-        HiveDecimal.create(new BigDecimal(4.2222)),
+        HiveDecimal.create("4.2222"),
         new Timestamp(1416967764000L),
         new Date(1416967764000L),
         "string",
@@ -222,7 +221,7 @@ public class TestFileDump {
         20000L,
         8.0f,
         40.0,
-        HiveDecimal.create(new BigDecimal(2.2222)),
+        HiveDecimal.create("2.2222"),
         new Timestamp(1416967364000L),
         new Date(1411967764000L),
         "abcd",
@@ -234,7 +233,6 @@ public class TestFileDump {
 
     writer.close();
     PrintStream origOut = System.out;
-    String outputFilename = "orc-file-dump.out";
     ByteArrayOutputStream myOut = new ByteArrayOutputStream();
 
     // replace stdout and run command
@@ -245,9 +243,8 @@ public class TestFileDump {
 
     String[] lines = myOut.toString().split("\n");
     // Don't be fooled by the big space in the middle, this line is quite long
-    assertEquals("{\"b\":true,\"bt\":10,\"s\":100,\"i\":1000,\"l\":10000,\"f\":4,\"d\":20,\"de\":\"4.222199999999999953\",\"t\":\"2014-11-25 18:09:24\",\"dt\":\"2014-11-25\",\"str\":\"string\",\"c\":\"hello                                                                                                                                                                                                                                                          \",\"vc\":\"hello\",\"m\":[{\"_key\":\"k1\",\"_value\":\"v1\"}],\"a\":[100,200],\"st\":{\"i\":10,\"s\":\"foo\"}}", lines[0]);
-    assertEquals("{\"b\":false,\"bt\":20,\"s\":200,\"i\":2000,\"l\":20000,\"f\":8,\"d\":40,\"de\":\"2.222199999999999953\",\"t\":\"2014-11-25 18:02:44\",\"dt\":\"2014-09-28\",\"str\":\"abcd\",\"c\":\"world                                                                                                                                                                                                                                                          \",\"vc\":\"world\",\"m\":[{\"_key\":\"k3\",\"_value\":\"v3\"}],\"a\":[200,300],\"st\":{\"i\":20,\"s\":\"bar\"}}", lines[1]);
-
+    assertEquals("{\"b\":true,\"bt\":10,\"s\":100,\"i\":1000,\"l\":10000,\"f\":4,\"d\":20,\"de\":\"4.2222\",\"t\":\"2014-11-25 18:09:24\",\"dt\":\"2014-11-25\",\"str\":\"string\",\"c\":\"hello                                                                                                                                                                                                                                                          \",\"vc\":\"hello\",\"m\":[{\"_key\":\"k1\",\"_value\":\"v1\"}],\"a\":[100,200],\"st\":{\"i\":10,\"s\":\"foo\"}}", lines[0]);
+    assertEquals("{\"b\":false,\"bt\":20,\"s\":200,\"i\":2000,\"l\":20000,\"f\":8,\"d\":40,\"de\":\"2.2222\",\"t\":\"2014-11-25 18:02:44\",\"dt\":\"2014-09-28\",\"str\":\"abcd\",\"c\":\"world                                                                                                                                                                                                                                                          \",\"vc\":\"world\",\"m\":[{\"_key\":\"k3\",\"_value\":\"v3\"}],\"a\":[200,300],\"st\":{\"i\":20,\"s\":\"bar\"}}", lines[1]);
   }
 
   // Test that if the fraction of rows that have distinct strings is greater than the configured

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInStream.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInStream.java?rev=1669775&r1=1669774&r2=1669775&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInStream.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInStream.java Sat Mar 28 14:03:43 2015
@@ -18,7 +18,8 @@
 
 package org.apache.hadoop.hive.ql.io.orc;
 
-import org.junit.Test;
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.fail;
 
 import java.io.DataInputStream;
 import java.io.DataOutput;
@@ -28,8 +29,7 @@ import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
 
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.fail;
+import org.junit.Test;
 
 public class TestInStream {
 



Mime
View raw message