hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1667850 [1/4] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/org/apache/hive/common/util/ common/src/test/org/apache/hadoop/hive/common/type/ common/src/test/org/apache/hive/common/util/ ql/src/java/o...
Date Thu, 19 Mar 2015 19:05:29 GMT
Author: hashutosh
Date: Thu Mar 19 19:05:28 2015
New Revision: 1667850

URL: http://svn.apache.org/r1667850
Log:
HIVE-9792 : Support interval type in expressions/predicates (Jason Dere via Ashutosh Chauhan)

Added:
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalDayTime.java
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalYearMonth.java
    hive/trunk/common/src/java/org/apache/hive/common/util/DateTimeMath.java
    hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalDayTime.java
    hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalYearMonth.java
    hive/trunk/common/src/test/org/apache/hive/common/util/TestDateTimeMath.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseDTI.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalDayTime.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalYearMonth.java
    hive/trunk/ql/src/test/queries/clientnegative/interval_1.q
    hive/trunk/ql/src/test/queries/clientnegative/interval_2.q
    hive/trunk/ql/src/test/queries/clientnegative/interval_3.q
    hive/trunk/ql/src/test/queries/clientpositive/interval_1.q
    hive/trunk/ql/src/test/queries/clientpositive/interval_2.q
    hive/trunk/ql/src/test/queries/clientpositive/interval_3.q
    hive/trunk/ql/src/test/queries/clientpositive/interval_arithmetic.q
    hive/trunk/ql/src/test/queries/clientpositive/interval_comparison.q
    hive/trunk/ql/src/test/results/clientnegative/interval_1.q.out
    hive/trunk/ql/src/test/results/clientnegative/interval_2.q.out
    hive/trunk/ql/src/test/results/clientnegative/interval_3.q.out
    hive/trunk/ql/src/test/results/clientpositive/interval_1.q.out
    hive/trunk/ql/src/test/results/clientpositive/interval_2.q.out
    hive/trunk/ql/src/test/results/clientpositive/interval_3.q.out
    hive/trunk/ql/src/test/results/clientpositive/interval_arithmetic.q.out
    hive/trunk/ql/src/test/results/clientpositive/interval_comparison.q.out
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveIntervalDayTimeWritable.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveIntervalYearMonthWritable.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveIntervalDayTime.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveIntervalYearMonth.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveIntervalDayTimeObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveIntervalYearMonthObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveIntervalDayTime.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveIntervalYearMonth.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/HiveIntervalDayTimeObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/HiveIntervalYearMonthObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveIntervalDayTimeObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveIntervalYearMonthObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableHiveIntervalDayTimeObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableHiveIntervalYearMonthObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveIntervalDayTimeObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveIntervalYearMonthObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveIntervalDayTimeObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveIntervalYearMonthObjectInspector.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveIntervalDayTimeWritable.java
    hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveIntervalYearMonthWritable.java
Removed:
    hive/trunk/ql/src/test/queries/clientnegative/invalid_arithmetic_type.q
    hive/trunk/ql/src/test/results/clientnegative/invalid_arithmetic_type.q.out
Modified:
    hive/trunk/common/src/java/org/apache/hive/common/util/DateUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseArithmetic.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java
    hive/trunk/serde/if/serde.thrift
    hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
    hive/trunk/serde/src/gen/thrift/gen-cpp/serde_constants.h
    hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
    hive/trunk/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
    hive/trunk/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
    hive/trunk/serde/src/gen/thrift/gen-rb/serde_constants.rb
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java

Added: hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalDayTime.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalDayTime.java?rev=1667850&view=auto
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalDayTime.java (added)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalDayTime.java Thu Mar 19 19:05:28 2015
@@ -0,0 +1,234 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.common.type;
+
+import java.math.BigDecimal;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.commons.lang.builder.HashCodeBuilder;
+import org.apache.hive.common.util.DateUtils;
+
+/**
+ * Day-time interval type representing an offset in days/hours/minutes/seconds,
+ * with nanosecond precision.
+ * 1 day = 24 hours = 1440 minutes = 86400 seconds
+ */
+public class HiveIntervalDayTime implements Comparable<HiveIntervalDayTime> {
+
+  // days/hours/minutes/seconds all represented as seconds
+  protected long totalSeconds;
+  protected int nanos;
+
+  public HiveIntervalDayTime() {
+  }
+
+  public HiveIntervalDayTime(int days, int hours, int minutes, int seconds, int nanos) {
+    set(days, hours, minutes, seconds, nanos);
+  }
+
+  public HiveIntervalDayTime(long seconds, int nanos) {
+    set(seconds, nanos);
+  }
+
+  public HiveIntervalDayTime(BigDecimal seconds) {
+    set(seconds);
+  }
+
+  public HiveIntervalDayTime(HiveIntervalDayTime other) {
+    set(other.totalSeconds, other.nanos);
+  }
+
+  public int getDays() {
+    return (int) TimeUnit.SECONDS.toDays(totalSeconds);
+  }
+
+  public int getHours() {
+    return (int) (TimeUnit.SECONDS.toHours(totalSeconds) % TimeUnit.DAYS.toHours(1));
+  }
+
+  public int getMinutes() {
+    return (int) (TimeUnit.SECONDS.toMinutes(totalSeconds) % TimeUnit.HOURS.toMinutes(1));
+  }
+
+  public int getSeconds() {
+    return (int) (totalSeconds % TimeUnit.MINUTES.toSeconds(1));
+  }
+
+  public int getNanos() {
+    return nanos;
+  }
+
+  /**
+   * Returns days/hours/minutes all converted into seconds.
+   * Nanos still need to be retrieved using getNanos()
+   * @return
+   */
+  public long getTotalSeconds() {
+    return totalSeconds;
+  }
+
+  /**
+   * Ensures that the seconds and nanoseconds fields have consistent sign
+   */
+  protected void normalizeSecondsAndNanos() {
+    if (totalSeconds > 0 && nanos < 0) {
+      --totalSeconds;
+      nanos += DateUtils.NANOS_PER_SEC;
+    } else if (totalSeconds < 0 && nanos > 0) {
+      ++totalSeconds;
+      nanos -= DateUtils.NANOS_PER_SEC;
+    }
+  }
+
+  public void set(int days, int hours, int minutes, int seconds, int nanos) {
+    long totalSeconds = seconds;
+    totalSeconds += TimeUnit.DAYS.toSeconds(days);
+    totalSeconds += TimeUnit.HOURS.toSeconds(hours);
+    totalSeconds += TimeUnit.MINUTES.toSeconds(minutes);
+    totalSeconds += TimeUnit.NANOSECONDS.toSeconds(nanos);
+    nanos = nanos % DateUtils.NANOS_PER_SEC;
+
+    this.totalSeconds = totalSeconds;
+    this.nanos = nanos;
+
+    normalizeSecondsAndNanos();
+  }
+
+  public void set(long seconds, int nanos) {
+    this.totalSeconds = seconds;
+    this.nanos = nanos;
+    normalizeSecondsAndNanos();
+  }
+
+  public void set(BigDecimal totalSecondsBd) {
+    long totalSeconds = totalSecondsBd.longValue();
+    BigDecimal fractionalSecs = totalSecondsBd.remainder(BigDecimal.ONE);
+    int nanos = fractionalSecs.multiply(DateUtils.NANOS_PER_SEC_BD).intValue();
+    set(totalSeconds, nanos);
+  }
+
+  public void set(HiveIntervalDayTime other) {
+    set(other.getTotalSeconds(), other.getNanos());
+  }
+
+  public HiveIntervalDayTime negate() {
+    return new HiveIntervalDayTime(-getTotalSeconds(), -getNanos());
+  }
+
+  @Override
+  public int compareTo(HiveIntervalDayTime other) {
+    long cmp = this.totalSeconds - other.totalSeconds;
+    if (cmp == 0) {
+      cmp = this.nanos - other.nanos;
+    }
+    if (cmp != 0) {
+      cmp = cmp > 0 ? 1 : -1;
+    }
+    return (int) cmp;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+    if (!(obj instanceof HiveIntervalDayTime)) {
+      return false;
+    }
+    return 0 == compareTo((HiveIntervalDayTime) obj);
+  }
+
+  @Override
+  public int hashCode() {
+    return new HashCodeBuilder().append(totalSeconds).append(nanos).toHashCode();
+  }
+
+  @Override
+  public String toString() {
+    // If normalize() was used, then day-hour-minute-second-nanos should have the same sign.
+    // This is currently working with that assumption.
+    boolean isNegative = (totalSeconds < 0 || nanos < 0);
+    String daySecondSignStr = isNegative ? "-" : "";
+
+    return String.format("%s%d %02d:%02d:%02d.%09d",
+        daySecondSignStr, Math.abs(getDays()),
+        Math.abs(getHours()), Math.abs(getMinutes()),
+        Math.abs(getSeconds()), Math.abs(getNanos()));
+  }
+
+  public static HiveIntervalDayTime valueOf(String strVal) {
+    HiveIntervalDayTime result = null;
+    if (strVal == null) {
+      throw new IllegalArgumentException("Interval day-time string was null");
+    }
+    Matcher patternMatcher = PATTERN_MATCHER.get();
+    patternMatcher.reset(strVal);
+    if (patternMatcher.matches()) {
+      // Parse out the individual parts
+      try {
+        // Sign - whether interval is positive or negative
+        int sign = 1;
+        String field = patternMatcher.group(1);
+        if (field != null && field.equals("-")) {
+          sign = -1;
+        }
+        int days = sign *
+            DateUtils.parseNumericValueWithRange("day", patternMatcher.group(2),
+                0, Integer.MAX_VALUE);
+        byte hours = (byte) (sign *
+            DateUtils.parseNumericValueWithRange("hour", patternMatcher.group(3), 0, 23));
+        byte minutes = (byte) (sign *
+            DateUtils.parseNumericValueWithRange("minute", patternMatcher.group(4), 0, 59));
+        int seconds = 0;
+        int nanos = 0;
+        field = patternMatcher.group(5);
+        if (field != null) {
+          BigDecimal bdSeconds = new BigDecimal(field);
+          if (bdSeconds.compareTo(DateUtils.MAX_INT_BD) > 0) {
+            throw new IllegalArgumentException("seconds value of " + bdSeconds + " too large");
+          }
+          seconds = sign * bdSeconds.intValue();
+          nanos = sign * bdSeconds.subtract(new BigDecimal(bdSeconds.toBigInteger()))
+              .multiply(DateUtils.NANOS_PER_SEC_BD).intValue();
+        }
+
+        result = new HiveIntervalDayTime(days, hours, minutes, seconds, nanos);
+      } catch (Exception err) {
+        throw new IllegalArgumentException("Error parsing interval day-time string: " + strVal, err);
+      }
+    } else {
+      throw new IllegalArgumentException(
+          "Interval string does not match day-time format of 'd h:m:s.n': " + strVal);
+    }
+
+    return result;
+  }
+
+  // Simple pattern: D H:M:S.nnnnnnnnn
+  private final static String PARSE_PATTERN =
+      "([+|-])?(\\d+) (\\d+):(\\d+):((\\d+)(\\.(\\d+))?)";
+
+  private static final ThreadLocal<Matcher> PATTERN_MATCHER = new ThreadLocal<Matcher>() {
+      @Override
+      protected Matcher initialValue() {
+        return Pattern.compile(PARSE_PATTERN).matcher("");
+      }
+  };
+}

Added: hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalYearMonth.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalYearMonth.java?rev=1667850&view=auto
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalYearMonth.java (added)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalYearMonth.java Thu Mar 19 19:05:28 2015
@@ -0,0 +1,161 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.common.type;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.hive.common.util.DateUtils;
+
+public class HiveIntervalYearMonth implements Comparable<HiveIntervalYearMonth> {
+
+  // years/months represented in months
+  protected int totalMonths;
+
+  protected final static int MONTHS_PER_YEAR = 12;
+
+  public HiveIntervalYearMonth() {
+  }
+
+  public HiveIntervalYearMonth(int years, int months) {
+    set(years, months);
+  }
+
+  public HiveIntervalYearMonth(int totalMonths) {
+    set(totalMonths);
+  }
+
+  public HiveIntervalYearMonth(HiveIntervalYearMonth hiveInterval) {
+    set(hiveInterval.getTotalMonths());
+  }
+
+  //
+  // Getters
+  //
+
+  public int getYears() {
+    return totalMonths / MONTHS_PER_YEAR;
+  }
+
+  public int getMonths() {
+    return totalMonths % MONTHS_PER_YEAR;
+  }
+
+  public int getTotalMonths() {
+    return totalMonths;
+  }
+
+  public void set(int years, int months) {
+    this.totalMonths = months;
+    this.totalMonths += years * MONTHS_PER_YEAR;
+  }
+
+  public void set(int totalMonths) {
+    this.totalMonths = totalMonths;
+  }
+
+  public void set(HiveIntervalYearMonth other) {
+    set(other.getTotalMonths());
+  }
+
+  public HiveIntervalYearMonth negate() {
+    return new HiveIntervalYearMonth(-getTotalMonths());
+  }
+
+  //
+  // Comparison
+  //
+
+  @Override
+  public int compareTo(HiveIntervalYearMonth other) {
+    int cmp = this.getTotalMonths() - other.getTotalMonths();
+
+    if (cmp != 0) {
+      cmp = cmp > 0 ? 1 : -1;
+    }
+    return cmp;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+    if (!(obj instanceof HiveIntervalYearMonth)) {
+      return false;
+    }
+    return 0 == compareTo((HiveIntervalYearMonth) obj);
+  }
+
+  @Override
+  public int hashCode() {
+    return totalMonths;
+  }
+
+  @Override
+  public String toString() {
+    String yearMonthSignStr = totalMonths >= 0 ? "" : "-";
+
+    return String.format("%s%d-%d",
+        yearMonthSignStr, Math.abs(getYears()), Math.abs(getMonths()));
+  }
+
+  public static HiveIntervalYearMonth valueOf(String strVal) {
+    HiveIntervalYearMonth result = null;
+    if (strVal == null) {
+      throw new IllegalArgumentException("Interval year-month string was null");
+    }
+    Matcher patternMatcher = PATTERN_MATCHER.get();
+    patternMatcher.reset(strVal);
+    if (patternMatcher.matches()) {
+      // Parse out the individual parts
+      try {
+        // Sign - whether interval is positive or negative
+        int sign = 1;
+        String field = patternMatcher.group(1);
+        if (field != null && field.equals("-")) {
+          sign = -1;
+        }
+        int years = sign *
+            DateUtils.parseNumericValueWithRange("year", patternMatcher.group(2),
+                0, Integer.MAX_VALUE);
+        byte months = (byte) (sign *
+            DateUtils.parseNumericValueWithRange("month", patternMatcher.group(3), 0, 11));
+        result = new HiveIntervalYearMonth(years, months);
+      } catch (Exception err) {
+        throw new IllegalArgumentException("Error parsing interval year-month string: " + strVal, err);
+      }
+    } else {
+      throw new IllegalArgumentException(
+          "Interval string does not match year-month format of 'y-m': " + strVal);
+    }
+
+    return result;
+  }
+
+  // Simple pattern: Y-M
+  private final static String PARSE_PATTERN =
+      "([+|-])?(\\d+)-(\\d+)";
+
+  private static final ThreadLocal<Matcher> PATTERN_MATCHER = new ThreadLocal<Matcher>() {
+      @Override
+      protected Matcher initialValue() {
+        return Pattern.compile(PARSE_PATTERN).matcher("");
+      }
+  };
+}

Added: hive/trunk/common/src/java/org/apache/hive/common/util/DateTimeMath.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hive/common/util/DateTimeMath.java?rev=1667850&view=auto
==============================================================================
--- hive/trunk/common/src/java/org/apache/hive/common/util/DateTimeMath.java (added)
+++ hive/trunk/common/src/java/org/apache/hive/common/util/DateTimeMath.java Thu Mar 19 19:05:28 2015
@@ -0,0 +1,195 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.common.util;
+
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.Calendar;
+import java.util.TimeZone;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+
+
+public class DateTimeMath {
+
+  private static class NanosResult {
+    public int seconds;
+    public int nanos;
+
+    public void addNanos(int leftNanos, int rightNanos) {
+      seconds = 0;
+      nanos = leftNanos + rightNanos;
+      if (nanos < 0) {
+        seconds = -1;
+        nanos += DateUtils.NANOS_PER_SEC;
+      } else if (nanos >= DateUtils.NANOS_PER_SEC) {
+        seconds = 1;
+        nanos -= DateUtils.NANOS_PER_SEC;
+      }
+    }
+  }
+
+  protected Calendar calUtc = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+  protected Calendar calLocal = Calendar.getInstance();
+  protected NanosResult nanosResult = new NanosResult();
+
+  //
+  // Operations involving/returning year-month intervals
+  //
+
+  /**
+   * Perform month arithmetic to millis value using UTC time zone.
+   * @param millis
+   * @param months
+   * @return
+   */
+  public long addMonthsToMillisUtc(long millis, int months) {
+    calUtc.setTimeInMillis(millis);
+    calUtc.add(Calendar.MONTH, months);
+    return calUtc.getTimeInMillis();
+  }
+
+  /**
+   * Perform month arithmetic to millis value using local time zone.
+   * @param millis
+   * @param months
+   * @return
+   */
+  public long addMonthsToMillisLocal(long millis, int months) {
+    calLocal.setTimeInMillis(millis);
+    calLocal.add(Calendar.MONTH, months);
+    return calLocal.getTimeInMillis();
+  }
+
+  public Timestamp add(Timestamp ts, HiveIntervalYearMonth interval) {
+    if (ts == null || interval == null) {
+      return null;
+    }
+
+    // Attempt to match Oracle semantics for timestamp arithmetic,
+    // where timestamp arithmetic is done in UTC, then converted back to local timezone
+    long resultMillis = addMonthsToMillisUtc(ts.getTime(), interval.getTotalMonths());
+    Timestamp tsResult = new Timestamp(resultMillis);
+    tsResult.setNanos(ts.getNanos());
+
+    return tsResult;
+  }
+
+  public Date add(Date dt, HiveIntervalYearMonth interval) {
+    if (dt == null || interval == null) {
+      return null;
+    }
+
+    // Since Date millis value is in local timezone representation, do date arithmetic
+    // using local timezone so the time remains at the start of the day.
+    long resultMillis = addMonthsToMillisLocal(dt.getTime(), interval.getTotalMonths());
+    return new Date(resultMillis);
+  }
+
+  public HiveIntervalYearMonth add(HiveIntervalYearMonth left, HiveIntervalYearMonth right) {
+    HiveIntervalYearMonth result = null;
+    if (left == null || right == null) {
+      return null;
+    }
+
+    result = new HiveIntervalYearMonth(left.getTotalMonths() + right.getTotalMonths());
+    return result;
+  }
+
+  public Timestamp subtract(Timestamp left, HiveIntervalYearMonth right) {
+    if (left == null || right == null) {
+      return null;
+    }
+    return add(left, right.negate());
+  }
+
+  public Date subtract(Date left, HiveIntervalYearMonth right) {
+    if (left == null || right == null) {
+      return null;
+    }
+    return add(left, right.negate());
+  }
+
+  public HiveIntervalYearMonth subtract(HiveIntervalYearMonth left, HiveIntervalYearMonth right) {
+    if (left == null || right == null) {
+      return null;
+    }
+    return add(left, right.negate());
+  }
+
+  //
+  // Operations involving/returning day-time intervals
+  //
+
+  public Timestamp add(Timestamp ts, HiveIntervalDayTime interval) {
+    if (ts == null || interval == null) {
+      return null;
+    }
+
+    nanosResult.addNanos(ts.getNanos(), interval.getNanos());
+
+    long newMillis = ts.getTime()
+        + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds);
+    Timestamp tsResult = new Timestamp(newMillis);
+    tsResult.setNanos(nanosResult.nanos);
+    return tsResult;
+  }
+
+  public HiveIntervalDayTime add(HiveIntervalDayTime left, HiveIntervalDayTime right) {
+    HiveIntervalDayTime result = null;
+    if (left == null || right == null) {
+      return null;
+    }
+
+    nanosResult.addNanos(left.getNanos(), right.getNanos());
+
+    long totalSeconds = left.getTotalSeconds() + right.getTotalSeconds() + nanosResult.seconds;
+    result = new HiveIntervalDayTime(totalSeconds, nanosResult.nanos);
+    return result;
+  }
+
+  public Timestamp subtract(Timestamp left, HiveIntervalDayTime right) {
+    if (left == null || right == null) {
+      return null;
+    }
+    return add(left, right.negate());
+  }
+
+  public HiveIntervalDayTime subtract(HiveIntervalDayTime left, HiveIntervalDayTime right) {
+    if (left == null || right == null) {
+      return null;
+    }
+    return add(left, right.negate());
+  }
+
+  public HiveIntervalDayTime subtract(Timestamp left, Timestamp right) {
+    HiveIntervalDayTime result = null;
+    if (left == null || right == null) {
+      return null;
+    }
+
+    nanosResult.addNanos(left.getNanos(), -(right.getNanos()));
+
+    long totalSeconds = TimeUnit.MILLISECONDS.toSeconds(left.getTime())
+        - TimeUnit.MILLISECONDS.toSeconds(right.getTime()) + nanosResult.seconds;
+    result = new HiveIntervalDayTime(totalSeconds, nanosResult.nanos);
+    return result;
+  }
+}

Modified: hive/trunk/common/src/java/org/apache/hive/common/util/DateUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hive/common/util/DateUtils.java?rev=1667850&r1=1667849&r2=1667850&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hive/common/util/DateUtils.java (original)
+++ hive/trunk/common/src/java/org/apache/hive/common/util/DateUtils.java Thu Mar 19 19:05:28 2015
@@ -18,6 +18,7 @@
 
 package org.apache.hive.common.util;
 
+import java.math.BigDecimal;
 import java.text.SimpleDateFormat;
 
 /**
@@ -36,4 +37,21 @@ public class DateUtils {
   public static SimpleDateFormat getDateFormat() {
     return dateFormatLocal.get();
   }
+
+  public static final int NANOS_PER_SEC = 1000000000;
+  public static final BigDecimal MAX_INT_BD = new BigDecimal(Integer.MAX_VALUE);
+  public static final BigDecimal NANOS_PER_SEC_BD = new BigDecimal(NANOS_PER_SEC);
+
+  public static int parseNumericValueWithRange(String fieldName,
+      String strVal, int minValue, int maxValue) throws IllegalArgumentException {
+    int result = 0;
+    if (strVal != null) {
+      result = Integer.parseInt(strVal);
+      if (result < minValue || result > maxValue) {
+        throw new IllegalArgumentException(String.format("%s value %d outside range [%d, %d]",
+            fieldName, result, minValue, maxValue));
+      }
+    }
+    return result;
+  }
 }
\ No newline at end of file

Added: hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalDayTime.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalDayTime.java?rev=1667850&view=auto
==============================================================================
--- hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalDayTime.java (added)
+++ hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalDayTime.java Thu Mar 19 19:05:28 2015
@@ -0,0 +1,183 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.common.type;
+
+import org.junit.*;
+
+import static org.junit.Assert.*;
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+
+public class TestHiveIntervalDayTime {
+
+  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testGetters() throws Exception {
+    HiveIntervalDayTime i1 = new HiveIntervalDayTime(3, 4, 5, 6, 7);
+
+    assertEquals(3, i1.getDays());
+    assertEquals(4, i1.getHours());
+    assertEquals(5, i1.getMinutes());
+    assertEquals(6, i1.getSeconds());
+    assertEquals(7, i1.getNanos());
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testCompare() throws Exception {
+    HiveIntervalDayTime i1 = new HiveIntervalDayTime(3, 4, 5, 6, 7);
+    HiveIntervalDayTime i2 = new HiveIntervalDayTime(3, 4, 5, 6, 7);
+    HiveIntervalDayTime i3 = new HiveIntervalDayTime(3, 4, 8, 9, 10);
+    HiveIntervalDayTime i4 = new HiveIntervalDayTime(3, 4, 8, 9, 5);
+
+    // compareTo()
+    assertEquals(i1 + " compareTo " + i1, 0, i1.compareTo(i1));
+    assertEquals(i1 + " compareTo " + i2, 0, i1.compareTo(i2));
+    assertEquals(i2 + " compareTo " + i1, 0, i2.compareTo(i1));
+    assertEquals(i3 + " compareTo " + i3, 0, i3.compareTo(i3));
+
+    assertTrue(i1 + " compareTo " + i3, 0 > i1.compareTo(i3));
+    assertTrue(i3 + " compareTo " + i1, 0 < i3.compareTo(i1));
+
+    // equals()
+    assertTrue(i1 + " equals " + i1, i1.equals(i1));
+    assertTrue(i1 + " equals " + i2, i1.equals(i2));
+    assertFalse(i1 + " equals " + i3, i1.equals(i3));
+    assertFalse(i3 + " equals " + i1, i3.equals(i1));
+    assertFalse(i3 + " equals " + i4, i3.equals(i4));
+
+    // hashCode()
+    assertEquals(i1 + " hashCode " + i1, i1.hashCode(), i1.hashCode());
+    assertEquals(i1 + " hashCode " + i1, i1.hashCode(), i2.hashCode());
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testValueOf() throws Exception {
+    HiveIntervalDayTime i1 = HiveIntervalDayTime.valueOf("3 04:05:06.123456");
+    assertEquals(3, i1.getDays());
+    assertEquals(4, i1.getHours());
+    assertEquals(5, i1.getMinutes());
+    assertEquals(6, i1.getSeconds());
+    assertEquals(123456000, i1.getNanos());
+
+    HiveIntervalDayTime i2 = HiveIntervalDayTime.valueOf("+3 04:05:06");
+    assertEquals(3, i2.getDays());
+    assertEquals(4, i2.getHours());
+    assertEquals(5, i2.getMinutes());
+    assertEquals(6, i2.getSeconds());
+    assertEquals(0, i2.getNanos());
+
+    HiveIntervalDayTime i3 = HiveIntervalDayTime.valueOf("-12 13:14:15.987654321");
+    assertEquals(-12, i3.getDays());
+    assertEquals(-13, i3.getHours());
+    assertEquals(-14, i3.getMinutes());
+    assertEquals(-15, i3.getSeconds());
+    assertEquals(-987654321, i3.getNanos());
+
+    HiveIntervalDayTime i4 = HiveIntervalDayTime.valueOf("-0 0:0:0.000000012");
+    assertEquals(0, i4.getDays());
+    assertEquals(0, i4.getHours());
+    assertEquals(0, i4.getMinutes());
+    assertEquals(0, i4.getSeconds());
+    assertEquals(-12, i4.getNanos());
+
+    // Invalid values
+    String[] invalidValues = {
+      null,
+      "abc",
+      "0-11",
+      "0 60:0:0",
+      "0 0:60:0"
+    };
+    for (String invalidValue : invalidValues) {
+      boolean caughtException = false;
+      try {
+        HiveIntervalDayTime.valueOf(invalidValue);
+        fail("Expected exception");
+      } catch (IllegalArgumentException err) {
+        caughtException = true;
+      }
+      assertTrue("Expected exception", caughtException);
+    }
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testToString() throws Exception {
+    assertEquals("0 00:00:00.000000000", HiveIntervalDayTime.valueOf("0 00:00:00").toString());
+    assertEquals("3 04:05:06.123456000", HiveIntervalDayTime.valueOf("3 04:05:06.123456").toString());
+    assertEquals("-3 04:05:06.123456000", HiveIntervalDayTime.valueOf("-3 04:05:06.123456").toString());
+    assertEquals("1 00:00:00.000000000", HiveIntervalDayTime.valueOf("1 00:00:00").toString());
+    assertEquals("-1 00:00:00.000000000", HiveIntervalDayTime.valueOf("-1 00:00:00").toString());
+    assertEquals("0 00:00:00.880000000", HiveIntervalDayTime.valueOf("0 00:00:00.88").toString());
+    assertEquals("-0 00:00:00.880000000", HiveIntervalDayTime.valueOf("-0 00:00:00.88").toString());
+
+    // Mixed sign cases
+    assertEquals("-3 04:05:06.000000007",
+        new HiveIntervalDayTime(-3, -4, -5, -6, -7).toString());
+    assertEquals("3 04:05:06.000000007",
+        new HiveIntervalDayTime(3, 4, 5, 6, 7).toString());
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testNormalize() throws Exception {
+    HiveIntervalDayTime i1 = new HiveIntervalDayTime(50, 48, 3, 5400, 2000000123);
+    assertEquals(HiveIntervalDayTime.valueOf("52 1:33:2.000000123"), i1);
+    assertEquals(52, i1.getDays());
+    assertEquals(1, i1.getHours());
+    assertEquals(33, i1.getMinutes());
+    assertEquals(2, i1.getSeconds());
+    assertEquals(123, i1.getNanos());
+
+    assertEquals(HiveIntervalDayTime.valueOf("0 0:0:0"),
+        new HiveIntervalDayTime(0, 0, 0, 0, 0));
+    assertEquals(HiveIntervalDayTime.valueOf("0 0:0:0"),
+        new HiveIntervalDayTime(2, -48, 0, 1, -1000000000));
+    assertEquals(HiveIntervalDayTime.valueOf("0 0:0:0"),
+        new HiveIntervalDayTime(-2, 48, 0, -1, 1000000000));
+    assertEquals(HiveIntervalDayTime.valueOf("1 0:0:0"),
+        new HiveIntervalDayTime(-1, 48, 0, 0, 0));
+    assertEquals(HiveIntervalDayTime.valueOf("-1 0:0:0"),
+        new HiveIntervalDayTime(1, -48, 0, 0, 0));
+    assertEquals(HiveIntervalDayTime.valueOf("0 23:59:59.999999999"),
+        new HiveIntervalDayTime(1, 0, 0, 0, -1));
+    assertEquals(HiveIntervalDayTime.valueOf("-0 23:59:59.999999999"),
+        new HiveIntervalDayTime(-1, 0, 0, 0, 1));
+
+    // -1 day 10 hrs 11 mins 172800 secs = -1 day 10 hrs 11 mins + 2 days = 1 day 10 hrs 11 mins
+    assertEquals(HiveIntervalDayTime.valueOf("1 10:11:0"),
+        new HiveIntervalDayTime(-1, 10, 11, 172800, 0));
+
+    i1 = new HiveIntervalDayTime(480, 480, 0, 5400, 2000000123);
+    assertEquals(500, i1.getDays());
+    assertEquals(1, i1.getHours());
+    assertEquals(30, i1.getMinutes());
+    assertEquals(2, i1.getSeconds());
+    assertEquals(123, i1.getNanos());
+  }
+}

Added: hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalYearMonth.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalYearMonth.java?rev=1667850&view=auto
==============================================================================
--- hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalYearMonth.java (added)
+++ hive/trunk/common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalYearMonth.java Thu Mar 19 19:05:28 2015
@@ -0,0 +1,136 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.common.type;
+
+import org.junit.*;
+import static org.junit.Assert.*;
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+
+public class TestHiveIntervalYearMonth {
+
+  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testGetters() throws Exception {
+    HiveIntervalYearMonth i1 = new HiveIntervalYearMonth(1, 2);
+    assertEquals(1, i1.getYears());
+    assertEquals(2, i1.getMonths());
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testCompare() throws Exception {
+    HiveIntervalYearMonth i1 = new HiveIntervalYearMonth(1, 2);
+    HiveIntervalYearMonth i2 = new HiveIntervalYearMonth(1, 2);
+    HiveIntervalYearMonth i3 = new HiveIntervalYearMonth(1, 3);
+
+    // compareTo()
+    assertEquals(i1 + " compareTo " + i1, 0, i1.compareTo(i1));
+    assertEquals(i1 + " compareTo " + i2, 0, i1.compareTo(i2));
+    assertEquals(i2 + " compareTo " + i1, 0, i2.compareTo(i1));
+    assertEquals(i3 + " compareTo " + i3, 0, i3.compareTo(i3));
+
+    assertTrue(i1 + " compareTo " + i3, 0 > i1.compareTo(i3));
+    assertTrue(i3 + " compareTo " + i1, 0 < i3.compareTo(i1));
+
+    // equals()
+    assertTrue(i1 + " equals " + i1, i1.equals(i1));
+    assertTrue(i1 + " equals " + i2, i1.equals(i2));
+    assertFalse(i1 + " equals " + i3, i1.equals(i3));
+    assertFalse(i3 + " equals " + i1, i3.equals(i1));
+
+    // hashCode()
+    assertEquals(i1 + " hashCode " + i1, i1.hashCode(), i1.hashCode());
+    assertEquals(i1 + " hashCode " + i1, i1.hashCode(), i2.hashCode());
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testValueOf() throws Exception {
+    HiveIntervalYearMonth i1 = HiveIntervalYearMonth.valueOf("1-2");
+    assertEquals(1, i1.getYears());
+    assertEquals(2, i1.getMonths());
+
+    HiveIntervalYearMonth i2 = HiveIntervalYearMonth.valueOf("+8-9");
+    assertEquals(8, i2.getYears());
+    assertEquals(9, i2.getMonths());
+
+    HiveIntervalYearMonth i3 = HiveIntervalYearMonth.valueOf("-10-11");
+    assertEquals(-10, i3.getYears());
+    assertEquals(-11, i3.getMonths());
+
+    HiveIntervalYearMonth i4 = HiveIntervalYearMonth.valueOf("-0-0");
+    assertEquals(0, i4.getYears());
+    assertEquals(0, i4.getMonths());
+
+    // Invalid values
+    String[] invalidValues = {
+      null,
+      "abc",
+      "0-12",
+      "0 1:2:3"
+    };
+    for (String invalidValue : invalidValues) {
+      boolean caughtException = false;
+      try {
+        HiveIntervalYearMonth.valueOf(invalidValue);
+        fail("Expected exception");
+      } catch (IllegalArgumentException err) {
+        caughtException = true;
+      }
+      assertTrue("Expected exception", caughtException);
+    }
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testToString() throws Exception {
+    assertEquals("0-0", HiveIntervalYearMonth.valueOf("0-0").toString());
+    assertEquals("1-2", HiveIntervalYearMonth.valueOf("1-2").toString());
+    assertEquals("-1-2", HiveIntervalYearMonth.valueOf("-1-2").toString());
+    assertEquals("1-0", HiveIntervalYearMonth.valueOf("1-0").toString());
+    assertEquals("-1-0", HiveIntervalYearMonth.valueOf("-1-0").toString());
+    assertEquals("0-0", HiveIntervalYearMonth.valueOf("-0-0").toString());
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testNormalize() throws Exception {
+    HiveIntervalYearMonth i1 = new HiveIntervalYearMonth(1, -6);
+    assertEquals(HiveIntervalYearMonth.valueOf("0-6"), i1);
+    assertEquals(0, i1.getYears());
+    assertEquals(6, i1.getMonths());
+
+    assertEquals(HiveIntervalYearMonth.valueOf("0-0"), new HiveIntervalYearMonth(0, 0));
+    assertEquals(HiveIntervalYearMonth.valueOf("0-0"), new HiveIntervalYearMonth(-1, 12));
+    assertEquals(HiveIntervalYearMonth.valueOf("0-4"), new HiveIntervalYearMonth(-1, 16));
+    assertEquals(HiveIntervalYearMonth.valueOf("0-11"), new HiveIntervalYearMonth(1, -1));
+    assertEquals(HiveIntervalYearMonth.valueOf("-0-11"), new HiveIntervalYearMonth(-1, 1));
+
+    // -5 years + 121 months = -5 years + 10 years + 1 month = 5 years 1 month
+    assertEquals(HiveIntervalYearMonth.valueOf("5-1"), new HiveIntervalYearMonth(-5, 121));
+  }
+}

Added: hive/trunk/common/src/test/org/apache/hive/common/util/TestDateTimeMath.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/test/org/apache/hive/common/util/TestDateTimeMath.java?rev=1667850&view=auto
==============================================================================
--- hive/trunk/common/src/test/org/apache/hive/common/util/TestDateTimeMath.java (added)
+++ hive/trunk/common/src/test/org/apache/hive/common/util/TestDateTimeMath.java Thu Mar 19 19:05:28 2015
@@ -0,0 +1,463 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.common.util;
+
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.TimeZone;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.junit.*;
+
+import static org.junit.Assert.*;
+
+public class TestDateTimeMath {
+
+  @Test
+  public void testTimestampIntervalYearMonthArithmetic() throws Exception {
+    char plus = '+';
+    char minus = '-';
+
+    checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, "0-0",
+        "2001-01-01 01:02:03");
+    checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.456", plus, "1-1",
+        "2002-02-01 01:02:03.456");
+    checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.456", plus, "10-0",
+        "2011-01-01 01:02:03.456");
+    checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.456", plus, "0-11",
+        "2001-12-01 01:02:03.456");
+    checkTimestampIntervalYearMonthArithmetic("2001-03-01 01:02:03.500", plus, "1-11",
+        "2003-02-01 01:02:03.500");
+    checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.500", plus, "-1-1",
+        "1999-12-01 01:02:03.500");
+    checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.500", plus, "-0-0",
+        "2001-01-01 01:02:03.500");
+    checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.123456789", plus, "-0-0",
+        "2001-01-01 01:02:03.123456789");
+
+    checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "0-0",
+        "2001-01-01 01:02:03");
+    checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "10-0",
+        "1991-01-01 01:02:03");
+    checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "-10-0",
+        "2011-01-01 01:02:03");
+    checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "8-2",
+        "1992-11-01 01:02:03");
+    checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "-8-2",
+        "2009-03-01 01:02:03");
+    checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.123456789", minus, "8-2",
+        "1992-11-01 01:02:03.123456789");
+
+    checkTimestampIntervalYearMonthArithmetic(null, plus, "1-1",
+        null);
+    checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, null,
+        null);
+    checkTimestampIntervalYearMonthArithmetic(null, minus, "1-1",
+        null);
+    checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, null,
+        null);
+
+    // End of the month behavior
+    checkTimestampIntervalYearMonthArithmetic("2001-01-28 01:02:03", plus, "0-1",
+        "2001-02-28 01:02:03");
+    checkTimestampIntervalYearMonthArithmetic("2001-01-29 01:02:03", plus, "0-1",
+        "2001-02-28 01:02:03");
+    checkTimestampIntervalYearMonthArithmetic("2001-01-30 01:02:03", plus, "0-1",
+        "2001-02-28 01:02:03");
+    checkTimestampIntervalYearMonthArithmetic("2001-01-31 01:02:03", plus, "0-1",
+        "2001-02-28 01:02:03");
+    checkTimestampIntervalYearMonthArithmetic("2001-02-28 01:02:03", plus, "0-1",
+        "2001-03-28 01:02:03");
+
+    // Test that timestamp arithmetic is done in UTC and then converted back to local timezone,
+    // matching Oracle behavior.
+    TimeZone originalTz = TimeZone.getDefault();
+    try {
+      TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"));
+      checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, "0-6",
+          "2001-07-01 02:02:03");
+        checkTimestampIntervalYearMonthArithmetic("2001-07-01 01:02:03", plus, "0-6",
+          "2002-01-01 00:02:03");
+
+      TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
+      checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, "0-6",
+          "2001-07-01 01:02:03");
+        checkTimestampIntervalYearMonthArithmetic("2001-07-01 01:02:03", plus, "0-6",
+          "2002-01-01 01:02:03");
+    } finally {
+      TimeZone.setDefault(originalTz);
+    }
+  }
+
+  @Test
+  public void testDateIntervalYearMonthArithmetic() throws Exception {
+    char plus = '+';
+    char minus = '-';
+
+    checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "0-0", "2001-01-01");
+    checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "0-1", "2001-02-01");
+    checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "0-6", "2001-07-01");
+    checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "1-0", "2002-01-01");
+    checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "1-1", "2002-02-01");
+    checkDateIntervalDayTimeArithmetic("2001-10-10", plus, "1-6", "2003-04-10");
+    checkDateIntervalDayTimeArithmetic("2003-04-10", plus, "-1-6", "2001-10-10");
+
+    checkDateIntervalDayTimeArithmetic("2001-01-01", minus, "0-0", "2001-01-01");
+    checkDateIntervalDayTimeArithmetic("2001-01-01", minus, "0-1", "2000-12-01");
+    checkDateIntervalDayTimeArithmetic("2001-01-01", minus, "1-0", "2000-01-01");
+    checkDateIntervalDayTimeArithmetic("2001-01-01", minus, "1-1", "1999-12-01");
+    checkDateIntervalDayTimeArithmetic("2001-10-10", minus, "1-6", "2000-04-10");
+    checkDateIntervalDayTimeArithmetic("2003-04-10", minus, "-1-6", "2004-10-10");
+
+    // end of month behavior
+    checkDateIntervalDayTimeArithmetic("2001-01-28", plus, "0-1", "2001-02-28");
+    checkDateIntervalDayTimeArithmetic("2001-01-29", plus, "0-1", "2001-02-28");
+    checkDateIntervalDayTimeArithmetic("2001-01-30", plus, "0-1", "2001-02-28");
+    checkDateIntervalDayTimeArithmetic("2001-01-31", plus, "0-1", "2001-02-28");
+    checkDateIntervalDayTimeArithmetic("2001-01-31", plus, "0-2", "2001-03-31");
+    checkDateIntervalDayTimeArithmetic("2001-02-28", plus, "0-1", "2001-03-28");
+    // leap year
+    checkDateIntervalDayTimeArithmetic("2004-01-28", plus, "0-1", "2004-02-28");
+    checkDateIntervalDayTimeArithmetic("2004-01-29", plus, "0-1", "2004-02-29");
+    checkDateIntervalDayTimeArithmetic("2004-01-30", plus, "0-1", "2004-02-29");
+    checkDateIntervalDayTimeArithmetic("2004-01-31", plus, "0-1", "2004-02-29");
+  }
+
+  @Test
+  public void testIntervalYearMonthArithmetic() throws Exception {
+    char plus = '+';
+    char minus = '-';
+
+    checkIntervalYearMonthArithmetic("0-0", plus, "0-0", "0-0");
+    checkIntervalYearMonthArithmetic("0-0", plus, "4-5", "4-5");
+    checkIntervalYearMonthArithmetic("4-5", plus, "0-0", "4-5");
+    checkIntervalYearMonthArithmetic("0-0", plus, "1-1", "1-1");
+    checkIntervalYearMonthArithmetic("1-1", plus, "0-0", "1-1");
+
+    checkIntervalYearMonthArithmetic("0-0", minus, "0-0", "0-0");
+    checkIntervalYearMonthArithmetic("0-0", minus, "1-0", "-1-0");
+    checkIntervalYearMonthArithmetic("1-2", minus, "1-1", "0-1");
+    checkIntervalYearMonthArithmetic("0-0", minus, "1-1", "-1-1");
+    checkIntervalYearMonthArithmetic("-1-1", minus, "1-1", "-2-2");
+    checkIntervalYearMonthArithmetic("-1-1", minus, "-1-1", "0-0");
+
+    checkIntervalYearMonthArithmetic(null, plus, "1-1", null);
+    checkIntervalYearMonthArithmetic("1-1", plus, null, null);
+    checkIntervalYearMonthArithmetic(null, minus, "1-1", null);
+    checkIntervalYearMonthArithmetic("1-1", minus, null, null);
+  }
+
+  @Test
+  public void testTimestampIntervalDayTimeArithmetic() throws Exception {
+    char plus = '+';
+    char minus = '-';
+
+    checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", plus, "1 1:1:1",
+        "2001-01-02 02:03:04");
+    checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.456", plus, "1 1:1:1",
+        "2001-01-02 02:03:04.456");
+    checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.456", plus, "1 1:1:1.555",
+        "2001-01-02 02:03:05.011");
+    checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", plus, "1 1:1:1.555555555",
+        "2001-01-02 02:03:04.555555555");
+    checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.456", plus, "1 1:1:1.555555555",
+        "2001-01-02 02:03:05.011555555");
+    checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500", plus, "1 1:1:1.499",
+        "2001-01-02 02:03:04.999");
+    checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500", plus, "1 1:1:1.500",
+        "2001-01-02 02:03:05.0");
+    checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500", plus, "1 1:1:1.501",
+        "2001-01-02 02:03:05.001");
+    checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500000000", plus, "1 1:1:1.4999999999",
+        "2001-01-02 02:03:04.999999999");
+    checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500000000", plus, "1 1:1:1.500",
+        "2001-01-02 02:03:05.0");
+    checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500000000", plus, "1 1:1:1.500000001",
+        "2001-01-02 02:03:05.000000001");
+
+    checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", minus, "0 01:02:03",
+        "2001-01-01 00:00:00");
+    checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", minus, "0 0:0:0",
+        "2001-01-01 01:02:03");
+
+    checkTsIntervalDayTimeArithmetic(null, plus, "1 1:1:1.555555555",
+        null);
+    checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", plus, null,
+        null);
+    checkTsIntervalDayTimeArithmetic(null, minus, "1 1:1:1.555555555",
+        null);
+    checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", minus, null,
+        null);
+
+    // Try some time zone boundaries
+    TimeZone originalTz = TimeZone.getDefault();
+    try {
+      // America/Los_Angeles DST dates - 2015-03-08 02:00:00/2015-11-01 02:00:00
+      TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"));
+
+      checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:58", plus, "0 0:0:01",
+          "2015-03-08 01:59:59");
+      checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59", plus, "0 0:0:01",
+          "2015-03-08 03:00:00");
+      checkTsIntervalDayTimeArithmetic("2015-03-08 03:00:00", minus, "0 0:0:01",
+          "2015-03-08 01:59:59");
+      checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59.995", plus, "0 0:0:0.005",
+          "2015-03-08 03:00:00");
+      checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59.995", plus, "0 0:0:0.0051",
+          "2015-03-08 03:00:00.0001");
+      checkTsIntervalDayTimeArithmetic("2015-03-08 03:00:00", minus, "0 0:0:0.005",
+          "2015-03-08 01:59:59.995");
+      checkTsIntervalDayTimeArithmetic("2015-11-01 01:59:58", plus, "0 0:0:01",
+          "2015-11-01 01:59:59");
+      checkTsIntervalDayTimeArithmetic("2015-11-01 01:59:59", plus, "0 0:0:01",
+          "2015-11-01 02:00:00");
+
+      // UTC has no such adjustment
+      TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
+      checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:58", plus, "0 0:0:01",
+          "2015-03-08 01:59:59");
+      checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59", plus, "0 0:0:01",
+          "2015-03-08 02:00:00");
+    } finally {
+      TimeZone.setDefault(originalTz);
+    }
+  }
+
+  @Test
+  public void testIntervalDayTimeArithmetic() throws Exception {
+    char plus = '+';
+    char minus = '-';
+
+    checkIntervalDayTimeArithmetic("0 0:0:0", plus, "0 0:0:0", "0 0:0:0");
+    checkIntervalDayTimeArithmetic("0 01:02:03", plus, "6 0:0:0.0001", "6 01:02:03.0001");
+    checkIntervalDayTimeArithmetic("6 0:0:0.0001", plus, "0 01:02:03", "6 01:02:03.0001");
+    checkIntervalDayTimeArithmetic("0 01:02:03", plus, "1 10:10:10.0001", "1 11:12:13.0001");
+    checkIntervalDayTimeArithmetic("1 10:10:10.0001", plus, "0 01:02:03", "1 11:12:13.0001");
+    checkIntervalDayTimeArithmetic("0 0:0:0.900000000", plus, "0 0:0:0.099999999", "0 0:0:0.999999999");
+    checkIntervalDayTimeArithmetic("0 0:0:0.900000001", plus, "0 0:0:0.099999999", "0 0:0:1");
+    checkIntervalDayTimeArithmetic("0 0:0:0.900000002", plus, "0 0:0:0.099999999", "0 0:0:1.000000001");
+
+    checkIntervalDayTimeArithmetic("0 0:0:0", minus, "0 0:0:0", "0 0:0:0");
+    checkIntervalDayTimeArithmetic("0 0:0:0", minus, "0 0:0:0.123", "-0 0:0:0.123");
+    checkIntervalDayTimeArithmetic("3 4:5:6.789", minus, "1 1:1:1.111", "2 3:4:5.678");
+    checkIntervalDayTimeArithmetic("0 0:0:0.0", minus, "1 1:1:1.111", "-1 1:1:1.111");
+    checkIntervalDayTimeArithmetic("-1 1:1:1.222", minus, "1 1:1:1.111", "-2 2:2:2.333");
+    checkIntervalDayTimeArithmetic("-1 1:1:1.111", minus, "-1 1:1:1.111", "0 0:0:0");
+
+    checkIntervalDayTimeArithmetic(null, plus, "1 1:1:1.111", null);
+    checkIntervalDayTimeArithmetic("1 1:1:1.111", plus, null, null);
+    checkIntervalDayTimeArithmetic(null, minus, "1 1:1:1.111", null);
+    checkIntervalDayTimeArithmetic("1 1:1:1.111", minus, null, null);
+  }
+
+  @Test
+  public void testTimestampSubtraction() throws Exception {
+    checkTsArithmetic("2001-01-01 00:00:00", "2001-01-01 00:00:00", "0 0:0:0");
+    checkTsArithmetic("2002-02-02 01:01:01", "2001-01-01 00:00:00", "397 1:1:1");
+    checkTsArithmetic("2001-01-01 00:00:00", "2002-02-02 01:01:01", "-397 1:1:1");
+    checkTsArithmetic("2015-01-01 00:00:00", "2014-12-31 00:00:00", "1 0:0:0");
+    checkTsArithmetic("2014-12-31 00:00:00", "2015-01-01 00:00:00", "-1 0:0:0");
+    checkTsArithmetic("2015-01-01 00:00:00", "2014-12-31 23:59:59", "0 0:0:01");
+    checkTsArithmetic("2014-12-31 23:59:59", "2015-01-01 00:00:00", "-0 0:0:01");
+    checkTsArithmetic("2015-01-01 00:00:00", "2014-12-31 23:59:59.9999", "0 0:0:00.0001");
+    checkTsArithmetic("2014-12-31 23:59:59.9999", "2015-01-01 00:00:00", "-0 0:0:00.0001");
+    checkTsArithmetic("2015-01-01 00:00:00", "2014-12-31 11:12:13.000000001", "0 12:47:46.999999999");
+    checkTsArithmetic("2014-12-31 11:12:13.000000001", "2015-01-01 00:00:00", "-0 12:47:46.999999999");
+
+    // Test that timestamp arithmetic is done in UTC and then converted back to local timezone,
+    // matching Oracle behavior.
+    TimeZone originalTz = TimeZone.getDefault();
+    try {
+      TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"));
+      checkTsArithmetic("1999-12-15 00:00:00", "1999-09-15 00:00:00", "91 1:0:0");
+      checkTsArithmetic("1999-09-15 00:00:00", "1999-12-15 00:00:00", "-91 1:0:0");
+      checkTsArithmetic("1999-12-15 00:00:00", "1995-09-15 00:00:00", "1552 1:0:0");
+      checkTsArithmetic("1995-09-15 00:00:00", "1999-12-15 00:00:00", "-1552 1:0:0");
+
+      TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
+      checkTsArithmetic("1999-12-15 00:00:00", "1999-09-15 00:00:00", "91 0:0:0");
+      checkTsArithmetic("1999-09-15 00:00:00", "1999-12-15 00:00:00", "-91 0:0:0");
+      checkTsArithmetic("1999-12-15 00:00:00", "1995-09-15 00:00:00", "1552 0:0:0");
+      checkTsArithmetic("1995-09-15 00:00:00", "1999-12-15 00:00:00", "-1552 0:0:0");
+    } finally {
+      TimeZone.setDefault(originalTz);
+    }
+  }
+
+  private static void checkTimestampIntervalYearMonthArithmetic(
+      String left, char operationType, String right, String expected) throws Exception {
+    Timestamp leftTs = null;
+    if (left != null) {
+      leftTs = Timestamp.valueOf(left);
+    }
+    HiveIntervalYearMonth rightInterval = null;
+    if (right != null) {
+      rightInterval = HiveIntervalYearMonth.valueOf(right);
+    }
+    Timestamp expectedResult = null;
+    if (expected != null) {
+      expectedResult = Timestamp.valueOf(expected);
+    }
+    Timestamp testResult = null;
+
+    DateTimeMath dtm = new DateTimeMath();
+    switch (operationType) {
+      case '-':
+        testResult = dtm.subtract(leftTs, rightInterval);
+        break;
+      case '+':
+        testResult = dtm.add(leftTs, rightInterval);
+        break;
+      default:
+        throw new IllegalArgumentException("Invalid operation " + operationType);
+    }
+
+    assertEquals(String.format("%s %s %s", leftTs, operationType, rightInterval),
+        expectedResult, testResult);
+  }
+
+  private static void checkDateIntervalDayTimeArithmetic(
+      String left, char operationType, String right, String expected) throws Exception {
+    Date leftDt = null;
+    if (left != null) {
+      leftDt = Date.valueOf(left);
+    }
+    HiveIntervalYearMonth rightInterval = null;
+    if (right != null) {
+      rightInterval = HiveIntervalYearMonth.valueOf(right);
+    }
+    Date expectedResult = null;
+    if (expected != null) {
+      expectedResult = Date.valueOf(expected);
+    }
+    Date testResult = null;
+
+    DateTimeMath dtm = new DateTimeMath();
+    switch (operationType) {
+      case '-':
+        testResult = dtm.subtract(leftDt, rightInterval);
+        break;
+      case '+':
+        testResult = dtm.add(leftDt, rightInterval);
+        break;
+      default:
+        throw new IllegalArgumentException("Invalid operation " + operationType);
+    }
+
+    assertEquals(String.format("%s %s %s", leftDt, operationType, rightInterval),
+        expectedResult, testResult);
+  }
+
+  private static void checkIntervalYearMonthArithmetic(
+      String left, char operationType, String right, String expected) throws Exception {
+    HiveIntervalYearMonth leftInterval = left == null ? null: HiveIntervalYearMonth.valueOf(left);
+    HiveIntervalYearMonth rightInterval = right == null ? null : HiveIntervalYearMonth.valueOf(right);
+    HiveIntervalYearMonth expectedResult = expected == null ? null : HiveIntervalYearMonth.valueOf(expected);
+    HiveIntervalYearMonth testResult = null;
+
+    DateTimeMath dtm = new DateTimeMath();
+    switch (operationType) {
+      case '-':
+        testResult = dtm.subtract(leftInterval, rightInterval);
+        break;
+      case '+':
+        testResult = dtm.add(leftInterval,  rightInterval);
+        break;
+      default:
+        throw new IllegalArgumentException("Invalid operation " + operationType);
+    }
+
+    assertEquals(String.format("%s %s %s", leftInterval, operationType, rightInterval),
+        expectedResult, testResult);
+  }
+
+  private static void checkTsIntervalDayTimeArithmetic(
+      String left, char operationType, String right, String expected) throws Exception {
+    Timestamp leftTs = null;
+    if (left != null) {
+      leftTs = Timestamp.valueOf(left);
+    }
+    HiveIntervalDayTime rightInterval = right == null ? null : HiveIntervalDayTime.valueOf(right);
+    Timestamp expectedResult = null;
+    if (expected != null) {
+      expectedResult = Timestamp.valueOf(expected);
+    }
+    Timestamp testResult = null;
+
+    DateTimeMath dtm = new DateTimeMath();
+    switch (operationType) {
+      case '-':
+        testResult = dtm.subtract(leftTs, rightInterval);
+        break;
+      case '+':
+        testResult = dtm.add(leftTs, rightInterval);
+        break;
+      default:
+        throw new IllegalArgumentException("Invalid operation " + operationType);
+    }
+
+    assertEquals(String.format("%s %s %s", leftTs, operationType, rightInterval),
+        expectedResult, testResult);
+  }
+
+  private static void checkIntervalDayTimeArithmetic(
+      String left, char operationType, String right, String expected) throws Exception {
+    HiveIntervalDayTime leftInterval = left == null ? null : HiveIntervalDayTime.valueOf(left);
+    HiveIntervalDayTime rightInterval = right == null ? null : HiveIntervalDayTime.valueOf(right);
+    HiveIntervalDayTime expectedResult = expected == null ? null : HiveIntervalDayTime.valueOf(expected);
+    HiveIntervalDayTime testResult = null;
+
+    DateTimeMath dtm = new DateTimeMath();
+    switch (operationType) {
+      case '-':
+        testResult = dtm.subtract(leftInterval, rightInterval);
+        break;
+      case '+':
+        testResult = dtm.add(leftInterval,  rightInterval);
+        break;
+      default:
+        throw new IllegalArgumentException("Invalid operation " + operationType);
+    }
+
+    assertEquals(String.format("%s %s %s", leftInterval, operationType, rightInterval),
+        expectedResult, testResult);
+  }
+
+  private static void checkTsArithmetic(
+      String left, String right, String expected) throws Exception {
+    Timestamp leftTs = null;
+    if (left != null) {
+      leftTs = Timestamp.valueOf(left);
+    }
+    Timestamp rightTs = null;
+    if (left != null) {
+      rightTs = Timestamp.valueOf(right);
+    }
+    HiveIntervalDayTime expectedResult = null;
+    if (expected != null) {
+      expectedResult = HiveIntervalDayTime.valueOf(expected);
+    }
+    DateTimeMath dtm = new DateTimeMath();
+    HiveIntervalDayTime testResult =
+        dtm.subtract(leftTs, rightTs);
+
+    assertEquals(String.format("%s - %s", leftTs, rightTs),
+        expectedResult, testResult);
+  }
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1667850&r1=1667849&r2=1667850&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Thu Mar 19 19:05:28 2015
@@ -337,6 +337,8 @@ public final class FunctionRegistry {
 
     system.registerGenericUDF(serdeConstants.DATE_TYPE_NAME, GenericUDFToDate.class);
     system.registerGenericUDF(serdeConstants.TIMESTAMP_TYPE_NAME, GenericUDFTimestamp.class);
+    system.registerGenericUDF(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME, GenericUDFToIntervalYearMonth.class);
+    system.registerGenericUDF(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME, GenericUDFToIntervalDayTime.class);
     system.registerGenericUDF(serdeConstants.BINARY_TYPE_NAME, GenericUDFToBinary.class);
     system.registerGenericUDF(serdeConstants.DECIMAL_TYPE_NAME, GenericUDFToDecimal.class);
     system.registerGenericUDF(serdeConstants.VARCHAR_TYPE_NAME, GenericUDFToVarchar.class);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java?rev=1667850&r1=1667849&r2=1667850&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java Thu Mar 19 19:05:28 2015
@@ -82,6 +82,8 @@ public abstract class MapJoinKey {
     SUPPORTED_PRIMITIVES.add(PrimitiveCategory.STRING);
     SUPPORTED_PRIMITIVES.add(PrimitiveCategory.DATE);
     SUPPORTED_PRIMITIVES.add(PrimitiveCategory.TIMESTAMP);
+    SUPPORTED_PRIMITIVES.add(PrimitiveCategory.INTERVAL_YEAR_MONTH);
+    SUPPORTED_PRIMITIVES.add(PrimitiveCategory.INTERVAL_DAY_TIME);
     SUPPORTED_PRIMITIVES.add(PrimitiveCategory.BINARY);
     SUPPORTED_PRIMITIVES.add(PrimitiveCategory.VARCHAR);
     SUPPORTED_PRIMITIVES.add(PrimitiveCategory.CHAR);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java?rev=1667850&r1=1667849&r2=1667850&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java Thu Mar 19 19:05:28 2015
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.ql.optimizer.calcite.translator;
 
+import java.math.BigDecimal;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
@@ -26,11 +27,14 @@ import org.apache.calcite.rel.core.JoinR
 import org.apache.calcite.rel.core.TableScan;
 import org.apache.calcite.rex.RexLiteral;
 import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.ParseDriver;
+import org.apache.hive.common.util.DateTimeMath;
 
 class ASTBuilder {
 
@@ -218,6 +222,23 @@ class ASTBuilder {
       val = "'" + val + "'";
     }
       break;
+    case INTERVAL_YEAR_MONTH: {
+      type = HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL;
+      BigDecimal monthsBd = (BigDecimal) literal.getValue();
+      HiveIntervalYearMonth intervalYearMonth = new HiveIntervalYearMonth(monthsBd.intValue());
+      val = "'" + intervalYearMonth.toString() + "'";
+      break;
+    }
+    case INTERVAL_DAY_TIME: {
+      type = HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL;
+      BigDecimal millisBd = (BigDecimal) literal.getValue();
+
+      // Calcite literal is in millis, convert to seconds
+      BigDecimal secsBd = millisBd.divide(BigDecimal.valueOf(1000));
+      HiveIntervalDayTime intervalDayTime = new HiveIntervalDayTime(secsBd);
+      val = "'" + intervalDayTime.toString() + "'";
+      break;
+    }
     case NULL:
       type = HiveParser.TOK_NULL;
       break;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java?rev=1667850&r1=1667849&r2=1667850&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java Thu Mar 19 19:05:28 2015
@@ -29,6 +29,7 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.calcite.avatica.util.TimeUnit;
 import org.apache.calcite.plan.RelOptCluster;
 import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.rel.type.RelDataType;
@@ -37,14 +38,18 @@ import org.apache.calcite.rex.RexBuilder
 import org.apache.calcite.rex.RexCall;
 import org.apache.calcite.rex.RexNode;
 import org.apache.calcite.rex.RexUtil;
+import org.apache.calcite.sql.SqlIntervalQualifier;
 import org.apache.calcite.sql.SqlOperator;
 import org.apache.calcite.sql.fun.SqlCastFunction;
+import org.apache.calcite.sql.parser.SqlParserPos;
 import org.apache.calcite.sql.type.SqlTypeName;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException;
@@ -371,13 +376,15 @@ public class RexNodeConverter {
       calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal((Double) value), calciteDataType);
       break;
     case CHAR:
-      if (value instanceof HiveChar)
+      if (value instanceof HiveChar) {
         value = ((HiveChar) value).getValue();
+      }
       calciteLiteral = rexBuilder.makeLiteral((String) value);
       break;
     case VARCHAR:
-      if (value instanceof HiveVarchar)
+      if (value instanceof HiveVarchar) {
         value = ((HiveVarchar) value).getValue();
+      }
       calciteLiteral = rexBuilder.makeLiteral((String) value);
       break;
     case STRING:
@@ -398,6 +405,21 @@ public class RexNodeConverter {
       }
       calciteLiteral = rexBuilder.makeTimestampLiteral(c, RelDataType.PRECISION_NOT_SPECIFIED);
       break;
+    case INTERVAL_YEAR_MONTH:
+      // Calcite year-month literal value is months as BigDecimal
+      BigDecimal totalMonths = BigDecimal.valueOf(((HiveIntervalYearMonth) value).getTotalMonths());
+      calciteLiteral = rexBuilder.makeIntervalLiteral(totalMonths,
+          new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1,1)));
+      break;
+    case INTERVAL_DAY_TIME:
+      // Calcite day-time interval is millis value as BigDecimal
+      // Seconds converted to millis
+      BigDecimal secsValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getTotalSeconds() * 1000);
+      // Nanos converted to millis
+      BigDecimal nanosValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getNanos(), 6);
+      calciteLiteral = rexBuilder.makeIntervalLiteral(secsValueBd.add(nanosValueBd),
+          new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.SECOND, new SqlParserPos(1,1)));
+      break;
     case VOID:
       calciteLiteral = cluster.getRexBuilder().makeLiteral(null,
           cluster.getTypeFactory().createSqlType(SqlTypeName.NULL), true);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java?rev=1667850&r1=1667849&r2=1667850&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java Thu Mar 19 19:05:28 2015
@@ -22,11 +22,14 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.calcite.avatica.util.TimeUnit;
 import org.apache.calcite.plan.RelOptCluster;
 import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rel.type.RelDataTypeFactory;
 import org.apache.calcite.rel.type.RelDataTypeField;
 import org.apache.calcite.rex.RexBuilder;
+import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.SqlIntervalQualifier;
 import org.apache.calcite.sql.type.SqlTypeName;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.RowSchema;
@@ -65,6 +68,8 @@ public class TypeConverter {
     b.put(SqlTypeName.DOUBLE.getName(), new HiveToken(HiveParser.TOK_DOUBLE, "TOK_DOUBLE"));
     b.put(SqlTypeName.DATE.getName(), new HiveToken(HiveParser.TOK_DATE, "TOK_DATE"));
     b.put(SqlTypeName.TIMESTAMP.getName(), new HiveToken(HiveParser.TOK_TIMESTAMP, "TOK_TIMESTAMP"));
+    b.put(SqlTypeName.INTERVAL_YEAR_MONTH.getName(), new HiveToken(HiveParser.TOK_INTERVAL_YEAR_MONTH, "TOK_INTERVAL_YEAR_MONTH"));
+    b.put(SqlTypeName.INTERVAL_DAY_TIME.getName(), new HiveToken(HiveParser.TOK_INTERVAL_DAY_TIME, "TOK_INTERVAL_DAY_TIME"));
     b.put(SqlTypeName.BINARY.getName(), new HiveToken(HiveParser.TOK_BINARY, "TOK_BINARY"));
     calciteToHiveTypeNameMap = b.build();
   };
@@ -162,6 +167,14 @@ public class TypeConverter {
     case TIMESTAMP:
       convertedType = dtFactory.createSqlType(SqlTypeName.TIMESTAMP);
       break;
+    case INTERVAL_YEAR_MONTH:
+      convertedType = dtFactory.createSqlIntervalType(
+          new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1,1)));
+      break;
+    case INTERVAL_DAY_TIME:
+      convertedType = dtFactory.createSqlIntervalType(
+          new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.SECOND, new SqlParserPos(1,1)));
+      break;
     case BINARY:
       convertedType = dtFactory.createSqlType(SqlTypeName.BINARY);
       break;
@@ -277,6 +290,10 @@ public class TypeConverter {
       return TypeInfoFactory.dateTypeInfo;
     case TIMESTAMP:
       return TypeInfoFactory.timestampTypeInfo;
+    case INTERVAL_YEAR_MONTH:
+      return TypeInfoFactory.intervalYearMonthTypeInfo;
+    case INTERVAL_DAY_TIME:
+      return TypeInfoFactory.intervalDayTimeTypeInfo;
     case BINARY:
       return TypeInfoFactory.binaryTypeInfo;
     case DECIMAL:

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g?rev=1667850&r1=1667849&r2=1667850&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g Thu Mar 19 19:05:28 2015
@@ -122,6 +122,7 @@ KW_DOUBLE: 'DOUBLE';
 KW_DATE: 'DATE';
 KW_DATETIME: 'DATETIME';
 KW_TIMESTAMP: 'TIMESTAMP';
+KW_INTERVAL: 'INTERVAL';
 KW_DECIMAL: 'DECIMAL';
 KW_STRING: 'STRING';
 KW_CHAR: 'CHAR';
@@ -297,6 +298,12 @@ KW_AUTHORIZATION: 'AUTHORIZATION';
 KW_CONF: 'CONF';
 KW_VALUES: 'VALUES';
 KW_RELOAD: 'RELOAD';
+KW_YEAR: 'YEAR';
+KW_MONTH: 'MONTH';
+KW_DAY: 'DAY';
+KW_HOUR: 'HOUR';
+KW_MINUTE: 'MINUTE';
+KW_SECOND: 'SECOND';
 
 // Operators
 // NOTE: if you add a new function/operator, add it to sysFuncNames so that describe function _FUNC_ will work.

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1667850&r1=1667849&r2=1667850&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Thu Mar 19 19:05:28 2015
@@ -111,6 +111,16 @@ TOK_DATELITERAL;
 TOK_DATETIME;
 TOK_TIMESTAMP;
 TOK_TIMESTAMPLITERAL;
+TOK_INTERVAL_YEAR_MONTH;
+TOK_INTERVAL_YEAR_MONTH_LITERAL;
+TOK_INTERVAL_DAY_TIME;
+TOK_INTERVAL_DAY_TIME_LITERAL;
+TOK_INTERVAL_YEAR_LITERAL;
+TOK_INTERVAL_MONTH_LITERAL;
+TOK_INTERVAL_DAY_LITERAL;
+TOK_INTERVAL_HOUR_LITERAL;
+TOK_INTERVAL_MINUTE_LITERAL;
+TOK_INTERVAL_SECOND_LITERAL;
 TOK_STRING;
 TOK_CHAR;
 TOK_VARCHAR;
@@ -2034,6 +2044,9 @@ primitiveType
     | KW_DATE          ->    TOK_DATE
     | KW_DATETIME      ->    TOK_DATETIME
     | KW_TIMESTAMP     ->    TOK_TIMESTAMP
+    // Uncomment to allow intervals as table column types
+    //| KW_INTERVAL KW_YEAR KW_TO KW_MONTH -> TOK_INTERVAL_YEAR_MONTH
+    //| KW_INTERVAL KW_DAY KW_TO KW_SECOND -> TOK_INTERVAL_DAY_TIME
     | KW_STRING        ->    TOK_STRING
     | KW_BINARY        ->    TOK_BINARY
     | KW_DECIMAL (LPAREN prec=Number (COMMA scale=Number)? RPAREN)? -> ^(TOK_DECIMAL $prec? $scale?)

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g?rev=1667850&r1=1667849&r2=1667850&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g Thu Mar 19 19:05:28 2015
@@ -248,6 +248,7 @@ constant
     Number
     | dateLiteral
     | timestampLiteral
+    | intervalLiteral
     | StringLiteral
     | stringLiteralSequence
     | BigintLiteral
@@ -292,6 +293,26 @@ timestampLiteral
     KW_CURRENT_TIMESTAMP -> ^(TOK_FUNCTION KW_CURRENT_TIMESTAMP)
     ;
 
+intervalLiteral
+    :
+    KW_INTERVAL StringLiteral qualifiers=intervalQualifiers ->
+    {
+      adaptor.create(qualifiers.tree.token.getType(), $StringLiteral.text)
+    }
+    ;
+
+intervalQualifiers
+    :
+    KW_YEAR KW_TO KW_MONTH -> TOK_INTERVAL_YEAR_MONTH_LITERAL
+    | KW_DAY KW_TO KW_SECOND -> TOK_INTERVAL_DAY_TIME_LITERAL
+    | KW_YEAR -> TOK_INTERVAL_YEAR_LITERAL
+    | KW_MONTH -> TOK_INTERVAL_MONTH_LITERAL
+    | KW_DAY -> TOK_INTERVAL_DAY_LITERAL
+    | KW_HOUR -> TOK_INTERVAL_HOUR_LITERAL
+    | KW_MINUTE -> TOK_INTERVAL_MINUTE_LITERAL
+    | KW_SECOND -> TOK_INTERVAL_SECOND_LITERAL
+    ;
+
 expression
 @init { gParent.pushMsg("expression specification", state); }
 @after { gParent.popMsg(state); }
@@ -592,22 +613,22 @@ nonReserved
     :
     KW_ADD | KW_ADMIN | KW_AFTER | KW_ANALYZE | KW_ARCHIVE | KW_ASC | KW_BEFORE | KW_BUCKET | KW_BUCKETS
     | KW_CASCADE | KW_CHANGE | KW_CLUSTER | KW_CLUSTERED | KW_CLUSTERSTATUS | KW_COLLECTION | KW_COLUMNS
-    | KW_COMMENT | KW_COMPACT | KW_COMPACTIONS | KW_COMPUTE | KW_CONCATENATE | KW_CONTINUE | KW_DATA 
+    | KW_COMMENT | KW_COMPACT | KW_COMPACTIONS | KW_COMPUTE | KW_CONCATENATE | KW_CONTINUE | KW_DATA | KW_DAY
     | KW_DATABASES | KW_DATETIME | KW_DBPROPERTIES | KW_DEFERRED | KW_DEFINED | KW_DELIMITED | KW_DEPENDENCY 
     | KW_DESC | KW_DIRECTORIES | KW_DIRECTORY | KW_DISABLE | KW_DISTRIBUTE | KW_ELEM_TYPE 
     | KW_ENABLE | KW_ESCAPED | KW_EXCLUSIVE | KW_EXPLAIN | KW_EXPORT | KW_FIELDS | KW_FILE | KW_FILEFORMAT
-    | KW_FIRST | KW_FORMAT | KW_FORMATTED | KW_FUNCTIONS | KW_HOLD_DDLTIME | KW_IDXPROPERTIES | KW_IGNORE
+    | KW_FIRST | KW_FORMAT | KW_FORMATTED | KW_FUNCTIONS | KW_HOLD_DDLTIME | KW_HOUR | KW_IDXPROPERTIES | KW_IGNORE
     | KW_INDEX | KW_INDEXES | KW_INPATH | KW_INPUTDRIVER | KW_INPUTFORMAT | KW_ITEMS | KW_JAR
     | KW_KEYS | KW_KEY_TYPE | KW_LIMIT | KW_LINES | KW_LOAD | KW_LOCATION | KW_LOCK | KW_LOCKS | KW_LOGICAL | KW_LONG
-    | KW_MAPJOIN | KW_MATERIALIZED | KW_MINUS | KW_MSCK | KW_NOSCAN | KW_NO_DROP | KW_OFFLINE | KW_OPTION
+    | KW_MAPJOIN | KW_MATERIALIZED | KW_MINUS | KW_MINUTE | KW_MONTH | KW_MSCK | KW_NOSCAN | KW_NO_DROP | KW_OFFLINE | KW_OPTION
     | KW_OUTPUTDRIVER | KW_OUTPUTFORMAT | KW_OVERWRITE | KW_OWNER | KW_PARTITIONED | KW_PARTITIONS | KW_PLUS | KW_PRETTY | KW_PRINCIPALS
     | KW_PROTECTION | KW_PURGE | KW_READ | KW_READONLY | KW_REBUILD | KW_RECORDREADER | KW_RECORDWRITER
     | KW_REGEXP | KW_RELOAD | KW_RENAME | KW_REPAIR | KW_REPLACE | KW_RESTRICT | KW_REWRITE | KW_RLIKE
-    | KW_ROLE | KW_ROLES | KW_SCHEMA | KW_SCHEMAS | KW_SEMI | KW_SERDE | KW_SERDEPROPERTIES | KW_SERVER | KW_SETS | KW_SHARED
+    | KW_ROLE | KW_ROLES | KW_SCHEMA | KW_SCHEMAS | KW_SECOND | KW_SEMI | KW_SERDE | KW_SERDEPROPERTIES | KW_SERVER | KW_SETS | KW_SHARED
     | KW_SHOW | KW_SHOW_DATABASE | KW_SKEWED | KW_SORT | KW_SORTED | KW_SSL | KW_STATISTICS | KW_STORED
     | KW_STREAMTABLE | KW_STRING | KW_STRUCT | KW_TABLES | KW_TBLPROPERTIES | KW_TEMPORARY | KW_TERMINATED
     | KW_TINYINT | KW_TOUCH | KW_TRANSACTIONS | KW_UNARCHIVE | KW_UNDO | KW_UNIONTYPE | KW_UNLOCK | KW_UNSET
-    | KW_UNSIGNED | KW_URI | KW_USE | KW_UTC | KW_UTCTIMESTAMP | KW_VALUE_TYPE | KW_VIEW | KW_WHILE
+    | KW_UNSIGNED | KW_URI | KW_USE | KW_UTC | KW_UTCTIMESTAMP | KW_VALUE_TYPE | KW_VIEW | KW_WHILE | KW_YEAR
     ;
 
 //The following SQL2011 reserved keywords are used as cast function name only, it is a subset of the sql11ReservedKeywordsUsedAsIdentifier.



Mime
View raw message