hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1667850 [2/4] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/org/apache/hive/common/util/ common/src/test/org/apache/hadoop/hive/common/type/ common/src/test/org/apache/hive/common/util/ ql/src/java/o...
Date Thu, 19 Mar 2015 19:05:29 GMT
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java?rev=1667850&r1=1667849&r2=1667850&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java Thu Mar 19 19:05:28 2015
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
+import java.math.BigDecimal;
 import java.sql.Date;
 import java.sql.Timestamp;
 import java.util.ArrayList;
@@ -34,6 +35,8 @@ import org.apache.commons.lang.StringUti
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionInfo;
@@ -75,6 +78,7 @@ import org.apache.hadoop.hive.serde2.typ
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+import org.apache.hive.common.util.DateUtils;
 
 import com.google.common.collect.Lists;
 
@@ -175,9 +179,18 @@ public class TypeCheckProcFactory {
         + HiveParser.KW_FALSE + "%"), tf.getBoolExprProcessor());
     opRules.put(new RuleRegExp("R5", HiveParser.TOK_DATELITERAL + "%|"
         + HiveParser.TOK_TIMESTAMPLITERAL + "%"), tf.getDateTimeExprProcessor());
-    opRules.put(new RuleRegExp("R6", HiveParser.TOK_TABLE_OR_COL + "%"),
+    opRules.put(new RuleRegExp("R6",
+        HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL + "%|"
+        + HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL + "%|"
+        + HiveParser.TOK_INTERVAL_YEAR_LITERAL + "%|"
+        + HiveParser.TOK_INTERVAL_MONTH_LITERAL + "%|"
+        + HiveParser.TOK_INTERVAL_DAY_LITERAL + "%|"
+        + HiveParser.TOK_INTERVAL_HOUR_LITERAL + "%|"
+        + HiveParser.TOK_INTERVAL_MINUTE_LITERAL + "%|"
+        + HiveParser.TOK_INTERVAL_SECOND_LITERAL + "%"), tf.getIntervalExprProcessor());
+    opRules.put(new RuleRegExp("R7", HiveParser.TOK_TABLE_OR_COL + "%"),
         tf.getColumnExprProcessor());
-    opRules.put(new RuleRegExp("R7", HiveParser.TOK_SUBQUERY_OP + "%"),
+    opRules.put(new RuleRegExp("R8", HiveParser.TOK_SUBQUERY_OP + "%"),
         tf.getSubQueryExprProcessor());
 
     // The dispatcher fires the processor corresponding to the closest matching
@@ -472,6 +485,79 @@ public class TypeCheckProcFactory {
   }
 
   /**
+   * Processor for interval constants.
+   */
+  public static class IntervalExprProcessor implements NodeProcessor {
+
+    private static final BigDecimal NANOS_PER_SEC_BD = new BigDecimal(DateUtils.NANOS_PER_SEC);
+    @Override
+    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
+        Object... nodeOutputs) throws SemanticException {
+
+      TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
+      if (ctx.getError() != null) {
+        return null;
+      }
+
+      ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
+      if (desc != null) {
+        return desc;
+      }
+
+      ASTNode expr = (ASTNode) nd;
+      String intervalString = BaseSemanticAnalyzer.stripQuotes(expr.getText());
+
+      // Get the string value and convert to a Interval value.
+      try {
+        switch (expr.getType()) {
+          case HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL:
+            return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo,
+                HiveIntervalYearMonth.valueOf(intervalString));
+          case HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL:
+            return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
+                HiveIntervalDayTime.valueOf(intervalString));
+          case HiveParser.TOK_INTERVAL_YEAR_LITERAL:
+            return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo,
+                new HiveIntervalYearMonth(Integer.parseInt(intervalString), 0));
+          case HiveParser.TOK_INTERVAL_MONTH_LITERAL:
+            return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo,
+                new HiveIntervalYearMonth(0, Integer.parseInt(intervalString)));
+          case HiveParser.TOK_INTERVAL_DAY_LITERAL:
+            return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
+                new HiveIntervalDayTime(Integer.parseInt(intervalString), 0, 0, 0, 0));
+          case HiveParser.TOK_INTERVAL_HOUR_LITERAL:
+            return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
+                new HiveIntervalDayTime(0, Integer.parseInt(intervalString), 0, 0, 0));
+          case HiveParser.TOK_INTERVAL_MINUTE_LITERAL:
+            return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
+                new HiveIntervalDayTime(0, 0, Integer.parseInt(intervalString), 0, 0));
+          case HiveParser.TOK_INTERVAL_SECOND_LITERAL:
+            BigDecimal bd = new BigDecimal(intervalString);
+            BigDecimal bdSeconds = new BigDecimal(bd.toBigInteger());
+            BigDecimal bdNanos = bd.subtract(bdSeconds);
+            return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
+                new HiveIntervalDayTime(0, 0, 0, bdSeconds.intValueExact(),
+                    bdNanos.multiply(NANOS_PER_SEC_BD).intValue()));
+          default:
+            throw new IllegalArgumentException("Invalid time literal type " + expr.getType());
+        }
+      } catch (Exception err) {
+        throw new SemanticException(
+            "Unable to convert interval literal '" + intervalString + "' to interval value.", err);
+      }
+    }
+  }
+
+  /**
+   * Factory method to get IntervalExprProcessor.
+   *
+   * @return IntervalExprProcessor.
+   */
+  public IntervalExprProcessor getIntervalExprProcessor() {
+    return new IntervalExprProcessor();
+  }
+
+  /**
    * Processor for table columns.
    */
   public static class ColumnExprProcessor implements NodeProcessor {
@@ -619,6 +705,10 @@ public class TypeCheckProcFactory {
           serdeConstants.DATE_TYPE_NAME);
       conversionFunctionTextHashMap.put(HiveParser.TOK_TIMESTAMP,
           serdeConstants.TIMESTAMP_TYPE_NAME);
+      conversionFunctionTextHashMap.put(HiveParser.TOK_INTERVAL_YEAR_MONTH,
+          serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME);
+      conversionFunctionTextHashMap.put(HiveParser.TOK_INTERVAL_DAY_TIME,
+          serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME);
       conversionFunctionTextHashMap.put(HiveParser.TOK_DECIMAL,
           serdeConstants.DECIMAL_TYPE_NAME);
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseArithmetic.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseArithmetic.java?rev=1667850&r1=1667849&r2=1667850&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseArithmetic.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseArithmetic.java Thu Mar 19 19:05:28 2015
@@ -58,12 +58,15 @@ public abstract class GenericUDFBaseArit
     // Determine if we are dealing with a numeric or date arithmetic operation
     boolean isDateTimeOp = false;
     for (int idx = 0; idx < 2; ++idx) {
-      if (arguments[idx].getCategory() == Category.PRIMITIVE) {
-        if (PrimitiveGrouping.DATE_GROUP == PrimitiveObjectInspectorUtils.getPrimitiveGrouping(
-            ((PrimitiveObjectInspector) arguments[idx]).getPrimitiveCategory())) {
+      switch (((PrimitiveObjectInspector) arguments[idx]).getPrimitiveCategory()) {
+        case DATE:
+        case TIMESTAMP:
+        case INTERVAL_YEAR_MONTH:
+        case INTERVAL_DAY_TIME:
           isDateTimeOp = true;
           break;
-        }
+        default:
+          break;
       }
     }
 

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseDTI.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseDTI.java?rev=1667850&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseDTI.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseDTI.java Thu Mar 19 19:05:28 2015
@@ -0,0 +1,17 @@
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+
+public abstract class GenericUDFBaseDTI extends GenericUDFBaseBinary {
+  protected transient PrimitiveObjectInspector[] inputOIs;
+
+  protected boolean checkArgs(PrimitiveCategory leftType, PrimitiveCategory rightType) {
+    boolean result = false;
+    if (inputOIs[0].getPrimitiveCategory() == leftType
+        && inputOIs[1].getPrimitiveCategory() == rightType) {
+      result = true;
+    }
+    return result;
+  }
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java?rev=1667850&r1=1667849&r2=1667850&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java Thu Mar 19 19:05:28 2015
@@ -24,6 +24,8 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -53,6 +55,10 @@ public abstract class GenericUDFBaseUnar
   protected FloatWritable floatWritable = new FloatWritable();
   protected DoubleWritable doubleWritable = new DoubleWritable();
   protected HiveDecimalWritable decimalWritable = new HiveDecimalWritable();
+  protected HiveIntervalYearMonthWritable intervalYearMonthWritable =
+      new HiveIntervalYearMonthWritable();
+  protected HiveIntervalDayTimeWritable intervalDayTimeWritable =
+      new HiveIntervalDayTimeWritable();
 
   public GenericUDFBaseUnary() {
     opName = getClass().getSimpleName();
@@ -74,11 +80,13 @@ public abstract class GenericUDFBaseUnar
     }
 
     inputOI = (PrimitiveObjectInspector) arguments[0];
-    if (!FunctionRegistry.isNumericType(inputOI.getTypeInfo())) {
+    if (!FunctionRegistry.isNumericType(inputOI.getTypeInfo())
+        && (inputOI.getTypeInfo() != TypeInfoFactory.intervalDayTimeTypeInfo)
+        && (inputOI.getTypeInfo() != TypeInfoFactory.intervalYearMonthTypeInfo)) {
       throw new UDFArgumentTypeException(0, "The "
           + GenericUDFUtils.getOrdinal(1)
-          + " argument of " + opName + "  is expected to a "
-          + "numeric type, but "
+          + " argument of " + opName + "  is expected to be a "
+          + "numeric or interval type, but "
           + inputOI.getTypeName() + " is found");
     }
 

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java?rev=1667850&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java Thu Mar 19 19:05:28 2015
@@ -0,0 +1,258 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hive.common.util.DateTimeMath;
+
+@Description(name = "-", value = "a _FUNC_ b - Returns the difference a-b")
+public class GenericUDFOPDTIMinus extends GenericUDFBaseDTI {
+
+  protected transient DateTimeMath dtm = new DateTimeMath();
+  protected transient OperationType minusOpType;
+  protected transient int intervalArg1Idx;
+  protected transient int intervalArg2Idx;
+  protected transient int dtArg1Idx;
+  protected transient int dtArg2Idx;
+  protected transient Converter dt1Converter;
+  protected transient Converter dt2Converter;
+
+  protected transient DateWritable dateResult = new DateWritable();
+  protected transient TimestampWritable timestampResult = new TimestampWritable();
+  protected transient HiveIntervalYearMonthWritable intervalYearMonthResult =
+      new HiveIntervalYearMonthWritable();
+  protected transient HiveIntervalDayTimeWritable intervalDayTimeResult =
+      new HiveIntervalDayTimeWritable();
+
+  enum OperationType {
+    INTERVALYM_MINUS_INTERVALYM,
+    DATE_MINUS_INTERVALYM,
+    TIMESTAMP_MINUS_INTERVALYM,
+    INTERVALDT_MINUS_INTERVALDT,
+    TIMESTAMP_MINUS_INTERVALDT,
+    TIMESTAMP_MINUS_TIMESTAMP
+  };
+
+  public GenericUDFOPDTIMinus() {
+    this.opName = getClass().getSimpleName();
+    this.opDisplayName = "-";
+  }
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments)
+      throws UDFArgumentException {
+
+    if (arguments.length != 2) {
+      throw new UDFArgumentException(opName + " requires two arguments.");
+    }
+
+    PrimitiveObjectInspector resultOI = null;
+
+    for (int i = 0; i < 2; i++) {
+      Category category = arguments[i].getCategory();
+      if (category != Category.PRIMITIVE) {
+        throw new UDFArgumentTypeException(i, "The "
+            + GenericUDFUtils.getOrdinal(i + 1)
+            + " argument of " + opName + "  is expected to a "
+            + Category.PRIMITIVE.toString().toLowerCase() + " type, but "
+            + category.toString().toLowerCase() + " is found");
+      }
+    }
+
+    inputOIs = new PrimitiveObjectInspector[] {
+      (PrimitiveObjectInspector) arguments[0],
+      (PrimitiveObjectInspector) arguments[1]
+    };
+    PrimitiveObjectInspector leftOI = inputOIs[0];
+    PrimitiveObjectInspector rightOI = inputOIs[1];
+
+    // Allowed operations:
+    // IntervalYearMonth - IntervalYearMonth = IntervalYearMonth
+    // Date - IntervalYearMonth = Date (operands not reversible)
+    // Timestamp - IntervalYearMonth = Timestamp (operands not reversible)
+    // IntervalDayTime - IntervalDayTime = IntervalDayTime
+    // Date - IntervalYearMonth = Timestamp (operands not reversible)
+    // Timestamp - IntervalYearMonth = Timestamp (operands not reversible)
+    // Timestamp - Timestamp = IntervalDayTime
+    // Date - Date = IntervalDayTime
+    // Timestamp - Date = IntervalDayTime (operands reversible)
+    if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
+      minusOpType = OperationType.INTERVALYM_MINUS_INTERVALYM;
+      intervalArg1Idx = 0;
+      intervalArg2Idx = 1;
+      resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+          TypeInfoFactory.intervalYearMonthTypeInfo);
+    } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
+      minusOpType = OperationType.DATE_MINUS_INTERVALYM;
+      dtArg1Idx = 0;
+      intervalArg1Idx = 1;
+      resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+          TypeInfoFactory.dateTypeInfo);
+    } else if (checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
+      minusOpType = OperationType.TIMESTAMP_MINUS_INTERVALYM;
+      dtArg1Idx = 0;
+      intervalArg1Idx = 1;
+      resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+          TypeInfoFactory.timestampTypeInfo);
+    } else if (checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.INTERVAL_DAY_TIME)) {
+      minusOpType = OperationType.INTERVALDT_MINUS_INTERVALDT;
+      intervalArg1Idx = 0;
+      intervalArg2Idx = 1;
+      resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+          TypeInfoFactory.intervalDayTimeTypeInfo);
+    } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_DAY_TIME)
+        || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_DAY_TIME)) {
+      minusOpType = OperationType.TIMESTAMP_MINUS_INTERVALDT;
+      dtArg1Idx = 0;
+      intervalArg1Idx = 1;
+      resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+          TypeInfoFactory.timestampTypeInfo);
+      dt1Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
+    } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.DATE)
+        || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.TIMESTAMP)
+        || checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.TIMESTAMP)
+        || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.DATE)) {
+      // Operands converted to timestamp, result as interval day-time
+      minusOpType = OperationType.TIMESTAMP_MINUS_TIMESTAMP;
+      dtArg1Idx = 0;
+      dtArg2Idx = 1;
+      resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+          TypeInfoFactory.intervalDayTimeTypeInfo);
+      dt1Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
+      dt2Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
+    } else {
+      // Unsupported types - error
+      List<TypeInfo> argTypeInfos = new ArrayList<TypeInfo>(2);
+      argTypeInfos.add(leftOI.getTypeInfo());
+      argTypeInfos.add(rightOI.getTypeInfo());
+      throw new NoMatchingMethodException(this.getClass(), argTypeInfos, null);
+    }
+
+    return resultOI;
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    switch (minusOpType) {
+      case INTERVALYM_MINUS_INTERVALYM: {
+        HiveIntervalYearMonth iym1 = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(
+            arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]);
+        HiveIntervalYearMonth iym2 = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(
+            arguments[intervalArg2Idx].get(), inputOIs[intervalArg2Idx]);
+        return handleIntervalYearMonthResult(dtm.subtract(iym1, iym2));
+      }
+      case DATE_MINUS_INTERVALYM: {
+        HiveIntervalYearMonth iym1 = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(
+            arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]);
+        Date dt1 = PrimitiveObjectInspectorUtils.getDate(
+            arguments[dtArg1Idx].get(), inputOIs[dtArg1Idx]);
+        return handleDateResult(dtm.subtract(dt1, iym1));
+      }
+      case TIMESTAMP_MINUS_INTERVALYM: {
+        HiveIntervalYearMonth iym1 = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(
+            arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]);
+        Timestamp ts1 = PrimitiveObjectInspectorUtils.getTimestamp(
+            arguments[dtArg1Idx].get(), inputOIs[dtArg1Idx]);
+        return handleTimestampResult(dtm.subtract(ts1, iym1));
+      }
+      case INTERVALDT_MINUS_INTERVALDT: {
+        HiveIntervalDayTime idt1 = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime(
+            arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]);
+        HiveIntervalDayTime idt2 = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime(
+            arguments[intervalArg2Idx].get(), inputOIs[intervalArg2Idx]);
+        return handleIntervalDayTimeResult(dtm.subtract(idt1, idt2));
+      }
+      case TIMESTAMP_MINUS_INTERVALDT: {
+        HiveIntervalDayTime idt1 = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime(
+            arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]);
+        Timestamp ts1 = PrimitiveObjectInspectorUtils.getTimestamp(
+            arguments[dtArg1Idx].get(), inputOIs[dtArg1Idx]);
+        return handleTimestampResult(dtm.subtract(ts1, idt1));
+      }
+      case TIMESTAMP_MINUS_TIMESTAMP: {
+        Timestamp ts1 = PrimitiveObjectInspectorUtils.getTimestamp(
+            arguments[dtArg1Idx].get(), inputOIs[dtArg1Idx]);
+        Timestamp ts2 = PrimitiveObjectInspectorUtils.getTimestamp(
+            arguments[dtArg2Idx].get(), inputOIs[dtArg2Idx]);
+        return handleIntervalDayTimeResult(dtm.subtract(ts1, ts2));
+      }
+      default:
+        throw new HiveException("Unknown PlusOpType " + minusOpType);
+    }
+  }
+
+  protected DateWritable handleDateResult(Date result) {
+    if (result == null) {
+      return null;
+    }
+    dateResult.set(result);
+    return dateResult;
+  }
+
+  protected TimestampWritable handleTimestampResult(Timestamp result) {
+    if (result == null) {
+      return null;
+    }
+    timestampResult.set(result);
+    return timestampResult;
+  }
+
+  protected HiveIntervalYearMonthWritable handleIntervalYearMonthResult(
+      HiveIntervalYearMonth result) {
+    if (result == null) {
+      return null;
+    }
+    intervalYearMonthResult.set(result);
+    return intervalYearMonthResult;
+  }
+
+  protected HiveIntervalDayTimeWritable handleIntervalDayTimeResult(
+      HiveIntervalDayTime result) {
+    if (result == null) {
+      return null;
+    }
+    intervalDayTimeResult.set(result);
+    return intervalDayTimeResult;
+  }
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java?rev=1667850&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java Thu Mar 19 19:05:28 2015
@@ -0,0 +1,253 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hive.common.util.DateTimeMath;
+
+@Description(name = "+", value = "a _FUNC_ b - Returns a+b")
+public class GenericUDFOPDTIPlus extends GenericUDFBaseDTI {
+
+  protected transient DateTimeMath dtm = new DateTimeMath();
+  protected transient OperationType plusOpType;
+  protected transient int intervalArg1Idx;
+  protected transient int intervalArg2Idx;
+  protected transient int dtArgIdx;
+  protected transient Converter dtConverter;
+
+  protected transient TimestampWritable timestampResult = new TimestampWritable();
+  protected transient DateWritable dateResult = new DateWritable();
+  protected transient HiveIntervalDayTimeWritable intervalDayTimeResult =
+      new HiveIntervalDayTimeWritable();
+  protected transient HiveIntervalYearMonthWritable intervalYearMonthResult =
+      new HiveIntervalYearMonthWritable();
+
+  enum OperationType {
+    INTERVALYM_PLUS_INTERVALYM,
+    INTERVALYM_PLUS_DATE,
+    INTERVALYM_PLUS_TIMESTAMP,
+    INTERVALDT_PLUS_INTERVALDT,
+    INTERVALDT_PLUS_TIMESTAMP,
+  };
+
+  public GenericUDFOPDTIPlus() {
+    this.opName = getClass().getSimpleName();
+    this.opDisplayName = "+";
+  }
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments)
+      throws UDFArgumentException {
+
+    if (arguments.length != 2) {
+      throw new UDFArgumentException(opName + " requires two arguments.");
+    }
+
+    PrimitiveObjectInspector resultOI = null;
+
+    for (int i = 0; i < 2; i++) {
+      Category category = arguments[i].getCategory();
+      if (category != Category.PRIMITIVE) {
+        throw new UDFArgumentTypeException(i, "The "
+            + GenericUDFUtils.getOrdinal(i + 1)
+            + " argument of " + opName + "  is expected to a "
+            + Category.PRIMITIVE.toString().toLowerCase() + " type, but "
+            + category.toString().toLowerCase() + " is found");
+      }
+    }
+
+    inputOIs = new PrimitiveObjectInspector[] {
+      (PrimitiveObjectInspector) arguments[0],
+      (PrimitiveObjectInspector) arguments[1]
+    };
+    PrimitiveObjectInspector leftOI = inputOIs[0];
+    PrimitiveObjectInspector rightOI = inputOIs[1];
+
+    // Allowed operations:
+    // IntervalYearMonth + IntervalYearMonth = IntervalYearMonth
+    // IntervalYearMonth + Date = Date (operands reversible)
+    // IntervalYearMonth + Timestamp = Timestamp (operands reversible)
+    // IntervalDayTime + IntervalDayTime = IntervalDayTime
+    // IntervalDayTime + Date = Timestamp (operands reversible)
+    // IntervalDayTime + Timestamp = Timestamp (operands reversible)
+    if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
+      plusOpType = OperationType.INTERVALYM_PLUS_INTERVALYM;
+      intervalArg1Idx = 0;
+      intervalArg2Idx = 1;
+      resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+          TypeInfoFactory.intervalYearMonthTypeInfo);
+    } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
+      plusOpType = OperationType.INTERVALYM_PLUS_DATE;
+      dtArgIdx = 0;
+      intervalArg1Idx = 1;
+      resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+          TypeInfoFactory.dateTypeInfo);
+    } else if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.DATE)) {
+      plusOpType = OperationType.INTERVALYM_PLUS_DATE;
+      intervalArg1Idx = 0;
+      dtArgIdx = 1;
+      resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+          TypeInfoFactory.dateTypeInfo);
+    } else if (checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
+      plusOpType = OperationType.INTERVALYM_PLUS_TIMESTAMP;
+      dtArgIdx = 0;
+      intervalArg1Idx = 1;
+      resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+          TypeInfoFactory.timestampTypeInfo);
+    } else if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.TIMESTAMP)) {
+      plusOpType = OperationType.INTERVALYM_PLUS_TIMESTAMP;
+      intervalArg1Idx = 0;
+      dtArgIdx = 1;
+      resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+          TypeInfoFactory.timestampTypeInfo);
+    } else if (checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.INTERVAL_DAY_TIME)) {
+      plusOpType = OperationType.INTERVALDT_PLUS_INTERVALDT;
+      intervalArg1Idx = 0;
+      intervalArg2Idx = 1;
+      resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+          TypeInfoFactory.intervalDayTimeTypeInfo);
+    } else if (checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.DATE)
+        || checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.TIMESTAMP)) {
+      plusOpType = OperationType.INTERVALDT_PLUS_TIMESTAMP;
+      intervalArg1Idx = 0;
+      dtArgIdx = 1;
+      resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+          TypeInfoFactory.timestampTypeInfo);
+      dtConverter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
+    } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_DAY_TIME)
+        || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_DAY_TIME)) {
+      plusOpType = OperationType.INTERVALDT_PLUS_TIMESTAMP;
+      intervalArg1Idx = 1;
+      dtArgIdx = 0;
+      resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
+          TypeInfoFactory.timestampTypeInfo);
+      dtConverter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
+    } else {
+      // Unsupported types - error
+      List<TypeInfo> argTypeInfos = new ArrayList<TypeInfo>(2);
+      argTypeInfos.add(leftOI.getTypeInfo());
+      argTypeInfos.add(rightOI.getTypeInfo());
+      throw new NoMatchingMethodException(this.getClass(), argTypeInfos, null);
+    }
+
+    return resultOI;
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    switch (plusOpType) {
+      case INTERVALYM_PLUS_INTERVALYM: {
+        HiveIntervalYearMonth iym1 = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(
+            arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]);
+        HiveIntervalYearMonth iym2 = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(
+            arguments[intervalArg2Idx].get(), inputOIs[intervalArg2Idx]);
+        return handleIntervalYearMonthResult(dtm.add(iym1, iym2));
+      }
+      case INTERVALYM_PLUS_DATE: {
+        HiveIntervalYearMonth iym1 = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(
+            arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]);
+        Date dt1 = PrimitiveObjectInspectorUtils.getDate(
+            arguments[dtArgIdx].get(), inputOIs[dtArgIdx]);
+        return handleDateResult(dtm.add(dt1, iym1));
+      }
+      case INTERVALYM_PLUS_TIMESTAMP: {
+        HiveIntervalYearMonth iym1 = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(
+            arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]);
+        Timestamp ts1 = PrimitiveObjectInspectorUtils.getTimestamp(
+            arguments[dtArgIdx].get(), inputOIs[dtArgIdx]);
+        return handleTimestampResult(dtm.add(ts1, iym1));
+      }
+      case INTERVALDT_PLUS_INTERVALDT: {
+        HiveIntervalDayTime idt1 = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime(
+            arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]);
+        HiveIntervalDayTime idt2 = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime(
+            arguments[intervalArg2Idx].get(), inputOIs[intervalArg2Idx]);
+        return handleIntervalDayTimeResult(dtm.add(idt1, idt2));
+      }
+      case INTERVALDT_PLUS_TIMESTAMP: {
+        HiveIntervalDayTime idt1 = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime(
+            arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]);
+        Timestamp ts1 = PrimitiveObjectInspectorUtils.getTimestamp(
+            arguments[dtArgIdx].get(), inputOIs[dtArgIdx]);
+        return handleTimestampResult(dtm.add(ts1, idt1));
+      }
+      default:
+        throw new HiveException("Unknown PlusOpType " + plusOpType);
+    }
+  }
+
+  protected DateWritable handleDateResult(Date result) {
+    if (result == null) {
+      return null;
+    }
+    dateResult.set(result);
+    return dateResult;
+  }
+
+  protected TimestampWritable handleTimestampResult(Timestamp result) {
+    if (result == null) {
+      return null;
+    }
+    timestampResult.set(result);
+    return timestampResult;
+  }
+
+  protected HiveIntervalYearMonthWritable handleIntervalYearMonthResult(
+      HiveIntervalYearMonth result) {
+    if (result == null) {
+      return null;
+    }
+    intervalYearMonthResult.set(result);
+    return intervalYearMonthResult;
+  }
+
+  protected HiveIntervalDayTimeWritable handleIntervalDayTimeResult(
+      HiveIntervalDayTime result) {
+    if (result == null) {
+      return null;
+    }
+    intervalDayTimeResult.set(result);
+    return intervalDayTimeResult;
+  }
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java?rev=1667850&r1=1667849&r2=1667850&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java Thu Mar 19 19:05:28 2015
@@ -46,7 +46,6 @@ public class GenericUDFOPMinus extends G
 
   @Override
   protected GenericUDF instantiateDTIUDF() {
-    // TODO: implement date-time/interval version of UDF
-    return new GenericUDFOPNumericMinus();
+    return new GenericUDFOPDTIMinus();
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java?rev=1667850&r1=1667849&r2=1667850&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java Thu Mar 19 19:05:28 2015
@@ -19,6 +19,8 @@
 package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColUnaryMinus;
@@ -28,6 +30,8 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -81,6 +85,16 @@ public class GenericUDFOPNegative extend
       HiveDecimal dec = ((HiveDecimalWritable)input).getHiveDecimal();
       decimalWritable.set(dec.negate());
       return decimalWritable;
+    case INTERVAL_YEAR_MONTH:
+      HiveIntervalYearMonth intervalYearMonth =
+          ((HiveIntervalYearMonthWritable) input).getHiveIntervalYearMonth();
+      this.intervalYearMonthWritable.set(intervalYearMonth.negate());
+      return this.intervalYearMonthWritable;
+    case INTERVAL_DAY_TIME:
+      HiveIntervalDayTime intervalDayTime =
+          ((HiveIntervalDayTimeWritable) input).getHiveIntervalDayTime();
+      this.intervalDayTimeWritable.set(intervalDayTime.negate());
+      return intervalDayTimeWritable;
     default:
       // Should never happen.
       throw new RuntimeException("Unexpected type in evaluating " + opName + ": " +

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java?rev=1667850&r1=1667849&r2=1667850&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java Thu Mar 19 19:05:28 2015
@@ -52,7 +52,6 @@ public class GenericUDFOPPlus extends Ge
 
   @Override
   protected GenericUDF instantiateDTIUDF() {
-    // TODO: implement date-time/interval version of UDF
-    return new GenericUDFOPNumericPlus();
+    return new GenericUDFOPDTIPlus();
   }
 }

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalDayTime.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalDayTime.java?rev=1667850&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalDayTime.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalDayTime.java Thu Mar 19 19:05:28 2015
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.HiveIntervalDayTimeConverter;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+/**
+*
+* GenericUDFIntervalDayTime
+*
+* Example usage:
+* ... CAST(<Interval string> as INTERVAL DAY TO SECOND) ...
+*
+* Creates a HiveIntervalDayTimeWritable object using PrimitiveObjectInspectorConverter
+*
+*/
+@Description(name = "interval_day_time",
+  value = "CAST(<string> AS INTERVAL DAY TO SECOND) - Returns the day-time interval represented by the string")
+public class GenericUDFToIntervalDayTime extends GenericUDF {
+
+  private transient PrimitiveObjectInspector argumentOI;
+  private transient HiveIntervalDayTimeConverter tc;
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    if (arguments.length < 1) {
+      throw new UDFArgumentLengthException(
+          "The function INTERVAL_DAY_TIME requires at least one argument, got "
+          + arguments.length);
+    }
+    try {
+      argumentOI = (PrimitiveObjectInspector) arguments[0];
+    } catch (ClassCastException e) {
+      throw new UDFArgumentException(
+          "The function INTERVAL_DAY_TIME takes only primitive types");
+    }
+
+    tc = new HiveIntervalDayTimeConverter(argumentOI,
+        PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector);
+    return PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector;
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    Object o0 = arguments[0].get();
+    if (o0 == null) {
+      return null;
+    }
+
+    return tc.convert(o0);
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    assert (children.length == 1);
+    StringBuilder sb = new StringBuilder();
+    sb.append("CAST( ");
+    sb.append(children[0]);
+    sb.append(" AS INTERVAL DAY TO SECOND)");
+    return sb.toString();
+  }
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalYearMonth.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalYearMonth.java?rev=1667850&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalYearMonth.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalYearMonth.java Thu Mar 19 19:05:28 2015
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.HiveIntervalYearMonthConverter;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+/**
+*
+* GenericUDFIntervalYearMonth
+*
+* Example usage:
+* ... CAST(<Interval string> as INTERVAL YEAR TO MONTH) ...
+*
+* Creates a HiveIntervalYearMonthWritable object using PrimitiveObjectInspectorConverter
+*
+*/
+@Description(name = "interval_year_month",
+  value = "CAST(<string> AS INTERVAL YEAR TO MONTH) - Returns the year-month interval represented by the string")
+public class GenericUDFToIntervalYearMonth extends GenericUDF {
+
+  private transient PrimitiveObjectInspector argumentOI;
+  private transient HiveIntervalYearMonthConverter tc;
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    if (arguments.length < 1) {
+      throw new UDFArgumentLengthException(
+          "The function INTERVAL_YEAR_MONTH requires at least one argument, got "
+          + arguments.length);
+    }
+    try {
+      argumentOI = (PrimitiveObjectInspector) arguments[0];
+    } catch (ClassCastException e) {
+      throw new UDFArgumentException(
+          "The function INTERVAL_YEAR_MONTH takes only primitive types");
+    }
+
+    tc = new HiveIntervalYearMonthConverter(argumentOI,
+        PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector);
+    return PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector;
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    Object o0 = arguments[0].get();
+    if (o0 == null) {
+      return null;
+    }
+
+    return tc.convert(o0);
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    assert (children.length == 1);
+    StringBuilder sb = new StringBuilder();
+    sb.append("CAST( ");
+    sb.append(children[0]);
+    sb.append(" AS INTERVAL YEAR TO MONTH)");
+    return sb.toString();
+  }
+}

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java?rev=1667850&r1=1667849&r2=1667850&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java Thu Mar 19 19:05:28 2015
@@ -18,17 +18,26 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import java.sql.Date;
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -244,4 +253,143 @@ public class TestGenericUDFOPMinus exten
 
     verifyReturnType(new GenericUDFOPMinus(), "decimal(10,2)", "decimal(10,2)", "decimal(11,2)");
   }
+
+  @Test
+  public void testIntervalYearMonthMinusIntervalYearMonth() throws Exception {
+    GenericUDFOPMinus udf = new GenericUDFOPMinus();
+
+    HiveIntervalYearMonthWritable left =
+        new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("3-1"));
+    HiveIntervalYearMonthWritable right =
+        new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("1-2"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector,
+        PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.intervalYearMonthTypeInfo, oi.getTypeInfo());
+    HiveIntervalYearMonthWritable res = (HiveIntervalYearMonthWritable) udf.evaluate(args);
+    Assert.assertEquals(HiveIntervalYearMonth.valueOf("1-11"), res.getHiveIntervalYearMonth());
+  }
+
+
+  @Test
+  public void testDateMinusIntervalYearMonth() throws Exception {
+    GenericUDFOPMinus udf = new GenericUDFOPMinus();
+
+    DateWritable left =
+        new DateWritable(Date.valueOf("2004-02-15"));
+    HiveIntervalYearMonthWritable right =
+        new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableDateObjectInspector,
+        PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
+    DateWritable res = (DateWritable) udf.evaluate(args);
+    Assert.assertEquals(Date.valueOf("2001-06-15"), res.get());
+  }
+
+  @Test
+  public void testTimestampMinusIntervalYearMonth() throws Exception {
+    GenericUDFOPMinus udf = new GenericUDFOPMinus();
+
+    TimestampWritable left =
+        new TimestampWritable(Timestamp.valueOf("2004-01-15 01:02:03.123456789"));
+    HiveIntervalYearMonthWritable right =
+        new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableTimestampObjectInspector,
+        PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
+    Assert.assertEquals(Timestamp.valueOf("2001-11-15 01:02:03.123456789"), res.getTimestamp());
+  }
+
+  @Test
+  public void testIntervalDayTimeMinusIntervalDayTime() throws Exception {
+    GenericUDFOPMinus udf = new GenericUDFOPMinus();
+
+    HiveIntervalDayTimeWritable left =
+        new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("2 2:3:4.567"));
+    HiveIntervalDayTimeWritable right =
+        new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector,
+        PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.intervalDayTimeTypeInfo, oi.getTypeInfo());
+    HiveIntervalDayTimeWritable res = (HiveIntervalDayTimeWritable) udf.evaluate(args);
+    Assert.assertEquals(HiveIntervalDayTime.valueOf("1 0:0:0.567"), res.getHiveIntervalDayTime());
+  }
+
+  @Test
+  public void testTimestampMinusIntervalDayTime() throws Exception {
+    GenericUDFOPMinus udf = new GenericUDFOPMinus();
+
+    TimestampWritable left =
+        new TimestampWritable(Timestamp.valueOf("2001-01-02 2:3:4.567"));
+    HiveIntervalDayTimeWritable right =
+        new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableTimestampObjectInspector,
+        PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
+    Assert.assertEquals(Timestamp.valueOf("2001-01-01 00:00:00"), res.getTimestamp());
+  }
+
+  @Test
+  public void testDateMinusIntervalDayTime() throws Exception {
+    GenericUDFOPMinus udf = new GenericUDFOPMinus();
+
+    DateWritable left =
+        new DateWritable(Date.valueOf("2001-01-01"));
+    HiveIntervalDayTimeWritable right =
+        new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 0:0:0.555"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableDateObjectInspector,
+        PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
+    Assert.assertEquals(Timestamp.valueOf("2000-12-30 23:59:59.445"), res.getTimestamp());
+  }
 }

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java?rev=1667850&r1=1667849&r2=1667850&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java Thu Mar 19 19:05:28 2015
@@ -18,17 +18,26 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import java.sql.Date;
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -250,4 +259,236 @@ public class TestGenericUDFOPPlus extend
 
     verifyReturnType(new GenericUDFOPPlus(), "decimal(10,2)", "decimal(10,2)", "decimal(11,2)");
   }
+
+  @Test
+  public void testIntervalYearMonthPlusIntervalYearMonth() throws Exception {
+    GenericUDFOPPlus udf = new GenericUDFOPPlus();
+
+    HiveIntervalYearMonthWritable left =
+        new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("1-2"));
+    HiveIntervalYearMonthWritable right =
+        new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("1-11"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector,
+        PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.intervalYearMonthTypeInfo, oi.getTypeInfo());
+    HiveIntervalYearMonthWritable res = (HiveIntervalYearMonthWritable) udf.evaluate(args);
+    Assert.assertEquals(HiveIntervalYearMonth.valueOf("3-1"), res.getHiveIntervalYearMonth());
+  }
+
+  @Test
+  public void testIntervalYearMonthPlusDate() throws Exception {
+    GenericUDFOPPlus udf = new GenericUDFOPPlus();
+
+    HiveIntervalYearMonthWritable left =
+        new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
+    DateWritable right =
+        new DateWritable(Date.valueOf("2001-06-15"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector,
+        PrimitiveObjectInspectorFactory.writableDateObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
+    DateWritable res = (DateWritable) udf.evaluate(args);
+    Assert.assertEquals(Date.valueOf("2004-02-15"), res.get());
+  }
+
+  @Test
+  public void testDatePlusIntervalYearMonth() throws Exception {
+    GenericUDFOPPlus udf = new GenericUDFOPPlus();
+
+    DateWritable left =
+        new DateWritable(Date.valueOf("2001-06-15"));
+    HiveIntervalYearMonthWritable right =
+        new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableDateObjectInspector,
+        PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
+    DateWritable res = (DateWritable) udf.evaluate(args);
+    Assert.assertEquals(Date.valueOf("2004-02-15"), res.get());
+  }
+
+  @Test
+  public void testIntervalYearMonthPlusTimestamp() throws Exception {
+    GenericUDFOPPlus udf = new GenericUDFOPPlus();
+
+    HiveIntervalYearMonthWritable left =
+        new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2"));
+    TimestampWritable right =
+        new TimestampWritable(Timestamp.valueOf("2001-11-15 01:02:03.123456789"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector,
+        PrimitiveObjectInspectorFactory.writableTimestampObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
+    Assert.assertEquals(Timestamp.valueOf("2004-01-15 01:02:03.123456789"), res.getTimestamp());
+  }
+
+  @Test
+  public void testTimestampPlusIntervalYearMonth() throws Exception {
+    GenericUDFOPPlus udf = new GenericUDFOPPlus();
+
+    TimestampWritable left =
+        new TimestampWritable(Timestamp.valueOf("2001-11-15 01:02:03.123456789"));
+    HiveIntervalYearMonthWritable right =
+        new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableTimestampObjectInspector,
+        PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
+    Assert.assertEquals(Timestamp.valueOf("2004-01-15 01:02:03.123456789"), res.getTimestamp());
+  }
+
+  @Test
+  public void testIntervalDayTimePlusIntervalDayTime() throws Exception {
+    GenericUDFOPPlus udf = new GenericUDFOPPlus();
+
+    HiveIntervalDayTimeWritable left =
+        new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 0:0:0.567"));
+    HiveIntervalDayTimeWritable right =
+        new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector,
+        PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.intervalDayTimeTypeInfo, oi.getTypeInfo());
+    HiveIntervalDayTimeWritable res = (HiveIntervalDayTimeWritable) udf.evaluate(args);
+    Assert.assertEquals(HiveIntervalDayTime.valueOf("2 2:3:4.567"), res.getHiveIntervalDayTime());
+  }
+
+  @Test
+  public void testIntervalDayTimePlusTimestamp() throws Exception {
+    GenericUDFOPPlus udf = new GenericUDFOPPlus();
+
+    HiveIntervalDayTimeWritable left =
+        new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
+    TimestampWritable right =
+        new TimestampWritable(Timestamp.valueOf("2001-01-01 00:00:00"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector,
+        PrimitiveObjectInspectorFactory.writableTimestampObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
+    Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp());
+  }
+
+  @Test
+  public void testTimestampPlusIntervalDayTime() throws Exception {
+    GenericUDFOPPlus udf = new GenericUDFOPPlus();
+
+    TimestampWritable left =
+        new TimestampWritable(Timestamp.valueOf("2001-01-01 00:00:00"));
+    HiveIntervalDayTimeWritable right =
+        new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableTimestampObjectInspector,
+        PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
+    Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp());
+  }
+
+  @Test
+  public void testIntervalDayTimePlusDate() throws Exception {
+    GenericUDFOPPlus udf = new GenericUDFOPPlus();
+
+    HiveIntervalDayTimeWritable left =
+        new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
+    DateWritable right =
+        new DateWritable(Date.valueOf("2001-01-01"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector,
+        PrimitiveObjectInspectorFactory.writableDateObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    // Date + day-time interval = timestamp
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
+    Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp());
+  }
+
+  @Test
+  public void testDatePlusIntervalDayTime() throws Exception {
+    GenericUDFOPPlus udf = new GenericUDFOPPlus();
+
+    DateWritable left =
+        new DateWritable(Date.valueOf("2001-01-01"));
+    HiveIntervalDayTimeWritable right =
+        new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
+    ObjectInspector[] inputOIs = {
+        PrimitiveObjectInspectorFactory.writableDateObjectInspector,
+        PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector
+    };
+    DeferredObject[] args = {
+        new DeferredJavaObject(left),
+        new DeferredJavaObject(right),
+    };
+
+    // Date + day-time interval = timestamp
+    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+    Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
+    TimestampWritable res = (TimestampWritable) udf.evaluate(args);
+    Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp());
+  }
 }

Added: hive/trunk/ql/src/test/queries/clientnegative/interval_1.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/interval_1.q?rev=1667850&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/interval_1.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/interval_1.q Thu Mar 19 19:05:28 2015
@@ -0,0 +1,2 @@
+-- year-month/day-time intervals not compatible
+select interval_day_time(interval '1' year) from src limit 1;

Added: hive/trunk/ql/src/test/queries/clientnegative/interval_2.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/interval_2.q?rev=1667850&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/interval_2.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/interval_2.q Thu Mar 19 19:05:28 2015
@@ -0,0 +1,3 @@
+-- year-month/day-time intervals not compatible
+select interval '1' year - interval '365' day from src limit 1;
+

Added: hive/trunk/ql/src/test/queries/clientnegative/interval_3.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/interval_3.q?rev=1667850&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/interval_3.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/interval_3.q Thu Mar 19 19:05:28 2015
@@ -0,0 +1,3 @@
+-- year-month/day-time intervals not compatible
+select interval '1' year + interval '365' day from src limit 1;
+

Added: hive/trunk/ql/src/test/queries/clientpositive/interval_1.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/interval_1.q?rev=1667850&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/interval_1.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/interval_1.q Thu Mar 19 19:05:28 2015
@@ -0,0 +1,42 @@
+select
+  interval '10-11' year to month,
+  interval '10' year,
+  interval '11' month
+from src limit 1;
+
+select
+  interval_year_month('10-11'),
+  interval_year_month(cast('10-11' as string)),
+  interval_year_month(cast('10-11' as varchar(10))),
+  interval_year_month(cast('10-11' as char(10))),
+  interval_year_month('10-11') = interval '10-11' year to month
+from src limit 1;
+
+-- Test normalization of interval values
+select
+  interval '49' month
+from src limit 1;
+
+select
+  interval '10 9:8:7.987654321' day to second,
+  interval '10' day,
+  interval '11' hour,
+  interval '12' minute,
+  interval '13' second,
+  interval '13.123456789' second
+from src limit 1;
+
+select
+  interval_day_time('2 1:2:3'),
+  interval_day_time(cast('2 1:2:3' as string)),
+  interval_day_time(cast('2 1:2:3' as varchar(10))),
+  interval_day_time(cast('2 1:2:3' as char(10))),
+  interval_day_time('2 1:2:3') = interval '2 1:2:3' day to second
+from src limit 1;
+
+-- Test normalization of interval values
+select
+  interval '49' hour,
+  interval '1470' minute,
+  interval '90061.111111111' second
+from src limit 1;

Added: hive/trunk/ql/src/test/queries/clientpositive/interval_2.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/interval_2.q?rev=1667850&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/interval_2.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/interval_2.q Thu Mar 19 19:05:28 2015
@@ -0,0 +1,87 @@
+-- group-by/order-by/aggregation functions
+
+select
+  iym, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt)
+from (
+  select
+    key,
+    interval_year_month(concat(key, '-1')) as iym,
+    interval_day_time(concat(key, ' 1:1:1')) as idt
+  from src) q1
+group by iym 
+order by iym asc
+limit 5;
+
+select
+  iym, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt)
+from (
+  select
+    key,
+    interval_year_month(concat(key, '-1')) as iym,
+    interval_day_time(concat(key, ' 1:1:1')) as idt
+  from src) q1
+group by iym 
+order by iym desc
+limit 5;
+
+-- same query as previous, with having clause
+select
+  iym, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt)
+from (
+  select
+    key,
+    interval_year_month(concat(key, '-1')) as iym,
+    interval_day_time(concat(key, ' 1:1:1')) as idt
+  from src) q1
+group by iym 
+having max(idt) > interval '496 0:0:0' day to second
+order by iym desc
+limit 5;
+
+select
+  idt, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt)
+from (
+  select
+    key,
+    interval_year_month(concat(key, '-1')) as iym,
+    interval_day_time(concat(key, ' 1:1:1')) as idt
+  from src) q1
+group by idt 
+order by idt asc
+limit 5;
+
+select
+  idt, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt)
+from (
+  select
+    key,
+    interval_year_month(concat(key, '-1')) as iym,
+    interval_day_time(concat(key, ' 1:1:1')) as idt
+  from src) q1
+group by idt 
+order by idt desc
+limit 5;
+
+-- same query as previous, with having clause
+select
+  idt, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt)
+from (
+  select
+    key,
+    interval_year_month(concat(key, '-1')) as iym,
+    interval_day_time(concat(key, ' 1:1:1')) as idt
+  from src) q1
+group by idt 
+having max(iym) < interval '496-0' year to month
+order by idt desc
+limit 5;
+
+select
+  count(iym), count(idt), min(key), max(key), min(iym), max(iym), min(idt), max(idt)
+from (
+  select
+    key,
+    interval_year_month(concat(key, '-1')) as iym,
+    interval_day_time(concat(key, ' 1:1:1')) as idt
+  from src) q1;
+

Added: hive/trunk/ql/src/test/queries/clientpositive/interval_3.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/interval_3.q?rev=1667850&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/interval_3.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/interval_3.q Thu Mar 19 19:05:28 2015
@@ -0,0 +1,38 @@
+-- where clause
+select
+  l_orderkey, l_shipdate, l_receiptdate
+from lineitem
+  where (cast(l_shipdate as date) - date '1992-01-01') < interval '365 0:0:0' day to second
+order by l_orderkey;
+
+select
+  l_orderkey, l_shipdate, l_receiptdate
+from lineitem
+  where (cast(l_shipdate as date) + interval '1-0' year to month) <= date '1994-01-01'
+order by l_orderkey;
+
+select
+  l_orderkey, l_shipdate, l_receiptdate
+from lineitem
+  where (cast(l_shipdate as date) + interval '1-0' year to month) <= date '1994-01-01'
+    and (cast(l_receiptdate as date) - cast(l_shipdate as date)) < interval '10' day
+order by l_orderkey;
+
+
+-- joins
+select
+  a.l_orderkey, b.l_orderkey, a.interval1
+from
+  (
+    select
+      l_orderkey, l_shipdate, l_receiptdate, (cast(l_receiptdate as date) - cast(l_shipdate as date)) as interval1
+    from lineitem
+  ) a 
+  join
+  (
+    select
+      l_orderkey, l_shipdate, l_receiptdate, (cast(l_receiptdate as date) - date '1992-07-02') as interval2
+    from lineitem
+  ) b
+  on a.interval1 = b.interval2 and a.l_orderkey = b.l_orderkey
+order by a.l_orderkey;

Added: hive/trunk/ql/src/test/queries/clientpositive/interval_arithmetic.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/interval_arithmetic.q?rev=1667850&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/interval_arithmetic.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/interval_arithmetic.q Thu Mar 19 19:05:28 2015
@@ -0,0 +1,162 @@
+create table interval_arithmetic_1 (dateval date, tsval timestamp);
+insert overwrite table interval_arithmetic_1
+  select cast(ctimestamp1 as date), ctimestamp1 from alltypesorc;
+
+-- interval year-month arithmetic
+explain
+select
+  dateval,
+  dateval - interval '2-2' year to month,
+  dateval - interval '-2-2' year to month,
+  dateval + interval '2-2' year to month,
+  dateval + interval '-2-2' year to month,
+  - interval '2-2' year to month + dateval,
+  interval '2-2' year to month + dateval
+from interval_arithmetic_1
+limit 2;
+
+select
+  dateval,
+  dateval - interval '2-2' year to month,
+  dateval - interval '-2-2' year to month,
+  dateval + interval '2-2' year to month,
+  dateval + interval '-2-2' year to month,
+  - interval '2-2' year to month + dateval,
+  interval '2-2' year to month + dateval
+from interval_arithmetic_1
+limit 2;
+
+explain
+select
+  dateval,
+  dateval - date '1999-06-07',
+  date '1999-06-07' - dateval,
+  dateval - dateval
+from interval_arithmetic_1
+limit 2;
+
+select
+  dateval,
+  dateval - date '1999-06-07',
+  date '1999-06-07' - dateval,
+  dateval - dateval
+from interval_arithmetic_1
+limit 2;
+
+explain
+select
+  tsval,
+  tsval - interval '2-2' year to month,
+  tsval - interval '-2-2' year to month,
+  tsval + interval '2-2' year to month,
+  tsval + interval '-2-2' year to month,
+  - interval '2-2' year to month + tsval,
+  interval '2-2' year to month + tsval
+from interval_arithmetic_1
+limit 2;
+
+select
+  tsval,
+  tsval - interval '2-2' year to month,
+  tsval - interval '-2-2' year to month,
+  tsval + interval '2-2' year to month,
+  tsval + interval '-2-2' year to month,
+  - interval '2-2' year to month + tsval,
+  interval '2-2' year to month + tsval
+from interval_arithmetic_1
+limit 2;
+
+explain
+select
+  interval '2-2' year to month + interval '3-3' year to month,
+  interval '2-2' year to month - interval '3-3' year to month
+from interval_arithmetic_1
+limit 2;
+
+select
+  interval '2-2' year to month + interval '3-3' year to month,
+  interval '2-2' year to month - interval '3-3' year to month
+from interval_arithmetic_1
+limit 2;
+
+
+-- interval day-time arithmetic
+explain
+select
+  dateval,
+  dateval - interval '99 11:22:33.123456789' day to second,
+  dateval - interval '-99 11:22:33.123456789' day to second,
+  dateval + interval '99 11:22:33.123456789' day to second,
+  dateval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + dateval,
+  interval '99 11:22:33.123456789' day to second + dateval
+from interval_arithmetic_1
+limit 2;
+
+select
+  dateval,
+  dateval - interval '99 11:22:33.123456789' day to second,
+  dateval - interval '-99 11:22:33.123456789' day to second,
+  dateval + interval '99 11:22:33.123456789' day to second,
+  dateval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + dateval,
+  interval '99 11:22:33.123456789' day to second + dateval
+from interval_arithmetic_1
+limit 2;
+
+explain
+select
+  dateval,
+  tsval,
+  dateval - tsval,
+  tsval - dateval,
+  tsval - tsval
+from interval_arithmetic_1
+limit 2;
+
+select
+  dateval,
+  tsval,
+  dateval - tsval,
+  tsval - dateval,
+  tsval - tsval
+from interval_arithmetic_1
+limit 2;
+
+explain
+select
+  tsval,
+  tsval - interval '99 11:22:33.123456789' day to second,
+  tsval - interval '-99 11:22:33.123456789' day to second,
+  tsval + interval '99 11:22:33.123456789' day to second,
+  tsval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + tsval,
+  interval '99 11:22:33.123456789' day to second + tsval
+from interval_arithmetic_1
+limit 2;
+
+select
+  tsval,
+  tsval - interval '99 11:22:33.123456789' day to second,
+  tsval - interval '-99 11:22:33.123456789' day to second,
+  tsval + interval '99 11:22:33.123456789' day to second,
+  tsval + interval '-99 11:22:33.123456789' day to second,
+  -interval '99 11:22:33.123456789' day to second + tsval,
+  interval '99 11:22:33.123456789' day to second + tsval
+from interval_arithmetic_1
+limit 2;
+
+explain
+select
+  interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second,
+  interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second
+from interval_arithmetic_1
+limit 2;
+
+select
+  interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second,
+  interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second
+from interval_arithmetic_1
+limit 2;
+
+drop table interval_arithmetic_1;

Added: hive/trunk/ql/src/test/queries/clientpositive/interval_comparison.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/interval_comparison.q?rev=1667850&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/interval_comparison.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/interval_comparison.q Thu Mar 19 19:05:28 2015
@@ -0,0 +1,85 @@
+
+-- should all be true
+select
+  i1 = i1,
+  i1 = i2,
+  i1 >= i2,
+  i1 <= i2,
+  i3 = i3,
+  i3 = i4,
+  i3 <= i4,
+  i3 >= i4,
+  i1 < i3,
+  i3 > i1,
+  i1 != i3
+from (
+  select
+    interval '2-0' year to month as i1,
+    interval '2' year as i2,
+    interval '2-1' year to month as i3,
+    interval '25' month as i4
+  from src limit 1
+) q1;
+
+-- should all be false
+select
+  i1 != i1,
+  i1 != i2,
+  i1 < i2,
+  i1 > i2,
+  i1 = i3,
+  i1 > i3,
+  i1 >= i3,
+  i3 < i1,
+  i3 <= i1
+from (
+  select
+    interval '2-0' year to month as i1,
+    interval '2' year as i2,
+    interval '2-1' year to month as i3,
+    interval '25' month as i4
+  from src limit 1
+) q1;
+
+-- should all be true
+select
+  i1 = i1,
+  i1 = i2,
+  i1 >= i2,
+  i1 <= i2,
+  i3 = i3,
+  i3 = i4,
+  i3 <= i4,
+  i3 >= i4,
+  i1 < i3,
+  i3 > i1,
+  i1 != i3
+from (
+  select
+    interval '1 0:0:0' day to second as i1,
+    interval '24' hour as i2,
+    interval '1 0:0:1' day to second as i3,
+    interval '86401' second as i4
+  from src limit 1
+) q1;
+
+-- should all be false
+select
+  i1 != i1,
+  i1 != i2,
+  i1 < i2,
+  i1 > i2,
+  i1 = i3,
+  i1 > i3,
+  i1 >= i3,
+  i3 < i1,
+  i3 <= i1
+from (
+  select
+    interval '1 0:0:0' day to second as i1,
+    interval '24' hour as i2,
+    interval '1 0:0:1' day to second as i3,
+    interval '86401' second as i4
+  from src limit 1
+) q1;
+

Added: hive/trunk/ql/src/test/results/clientnegative/interval_1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/interval_1.q.out?rev=1667850&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/interval_1.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/interval_1.q.out Thu Mar 19 19:05:28 2015
@@ -0,0 +1 @@
+FAILED: RuntimeException Cannot convert to IntervalDayTime from: interval_year_month

Added: hive/trunk/ql/src/test/results/clientnegative/interval_2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/interval_2.q.out?rev=1667850&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/interval_2.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/interval_2.q.out Thu Mar 19 19:05:28 2015
@@ -0,0 +1 @@
+FAILED: SemanticException Line 0:-1 Wrong arguments ''365'': No matching method for class org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPDTIMinus with (interval_year_month, interval_day_time)

Added: hive/trunk/ql/src/test/results/clientnegative/interval_3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/interval_3.q.out?rev=1667850&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/interval_3.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/interval_3.q.out Thu Mar 19 19:05:28 2015
@@ -0,0 +1 @@
+FAILED: SemanticException Line 0:-1 Wrong arguments ''365'': No matching method for class org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPDTIPlus with (interval_year_month, interval_day_time)

Added: hive/trunk/ql/src/test/results/clientpositive/interval_1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/interval_1.q.out?rev=1667850&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/interval_1.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/interval_1.q.out Thu Mar 19 19:05:28 2015
@@ -0,0 +1,116 @@
+PREHOOK: query: select
+  interval '10-11' year to month,
+  interval '10' year,
+  interval '11' month
+from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  interval '10-11' year to month,
+  interval '10' year,
+  interval '11' month
+from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+10-11	10-0	0-11
+PREHOOK: query: select
+  interval_year_month('10-11'),
+  interval_year_month(cast('10-11' as string)),
+  interval_year_month(cast('10-11' as varchar(10))),
+  interval_year_month(cast('10-11' as char(10))),
+  interval_year_month('10-11') = interval '10-11' year to month
+from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  interval_year_month('10-11'),
+  interval_year_month(cast('10-11' as string)),
+  interval_year_month(cast('10-11' as varchar(10))),
+  interval_year_month(cast('10-11' as char(10))),
+  interval_year_month('10-11') = interval '10-11' year to month
+from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+10-11	10-11	10-11	10-11	true
+PREHOOK: query: -- Test normalization of interval values
+select
+  interval '49' month
+from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- Test normalization of interval values
+select
+  interval '49' month
+from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+4-1
+PREHOOK: query: select
+  interval '10 9:8:7.987654321' day to second,
+  interval '10' day,
+  interval '11' hour,
+  interval '12' minute,
+  interval '13' second,
+  interval '13.123456789' second
+from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  interval '10 9:8:7.987654321' day to second,
+  interval '10' day,
+  interval '11' hour,
+  interval '12' minute,
+  interval '13' second,
+  interval '13.123456789' second
+from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+10 09:08:07.987654321	10 00:00:00.000000000	0 11:00:00.000000000	0 00:12:00.000000000	0 00:00:13.000000000	0 00:00:13.123456789
+PREHOOK: query: select
+  interval_day_time('2 1:2:3'),
+  interval_day_time(cast('2 1:2:3' as string)),
+  interval_day_time(cast('2 1:2:3' as varchar(10))),
+  interval_day_time(cast('2 1:2:3' as char(10))),
+  interval_day_time('2 1:2:3') = interval '2 1:2:3' day to second
+from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  interval_day_time('2 1:2:3'),
+  interval_day_time(cast('2 1:2:3' as string)),
+  interval_day_time(cast('2 1:2:3' as varchar(10))),
+  interval_day_time(cast('2 1:2:3' as char(10))),
+  interval_day_time('2 1:2:3') = interval '2 1:2:3' day to second
+from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+2 01:02:03.000000000	2 01:02:03.000000000	2 01:02:03.000000000	2 01:02:03.000000000	true
+PREHOOK: query: -- Test normalization of interval values
+select
+  interval '49' hour,
+  interval '1470' minute,
+  interval '90061.111111111' second
+from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- Test normalization of interval values
+select
+  interval '49' hour,
+  interval '1470' minute,
+  interval '90061.111111111' second
+from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+2 01:00:00.000000000	1 00:30:00.000000000	1 01:01:01.111111111



Mime
View raw message