hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1670462 [2/26] - in /hive/branches/cbo: ./ ant/src/org/apache/hadoop/hive/ant/ beeline/src/java/org/apache/hive/beeline/ common/src/java/org/apache/hadoop/hive/conf/ common/src/java/org/apache/hive/common/util/ common/src/test/org/apache/h...
Date Tue, 31 Mar 2015 20:14:07 GMT
Modified: hive/branches/cbo/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java (original)
+++ hive/branches/cbo/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java Tue Mar 31 20:14:02 2015
@@ -35,6 +35,89 @@ public class GenVectorCode extends Task
 
   private static String [][] templateExpansions =
     {
+      // The following datetime/interval arithmetic operations can be done using the vectorized values
+      {"DTIColumnArithmeticDTIScalarNoConvert", "Add", "interval_year_month", "interval_year_month", "+"},
+      {"DTIScalarArithmeticDTIColumnNoConvert", "Add", "interval_year_month", "interval_year_month", "+"},
+      {"DTIColumnArithmeticDTIColumnNoConvert", "Add", "interval_year_month", "interval_year_month", "+"},
+
+      {"DTIColumnArithmeticDTIScalarNoConvert", "Subtract", "interval_year_month", "interval_year_month", "-"},
+      {"DTIScalarArithmeticDTIColumnNoConvert", "Subtract", "interval_year_month", "interval_year_month", "-"},
+      {"DTIColumnArithmeticDTIColumnNoConvert", "Subtract", "interval_year_month", "interval_year_month", "-"},
+
+      {"DTIColumnArithmeticDTIScalarNoConvert", "Add", "interval_day_time", "interval_day_time", "+"},
+      {"DTIScalarArithmeticDTIColumnNoConvert", "Add", "interval_day_time", "interval_day_time", "+"},
+      {"DTIColumnArithmeticDTIColumnNoConvert", "Add", "interval_day_time", "interval_day_time", "+"},
+
+      {"DTIColumnArithmeticDTIScalarNoConvert", "Subtract", "interval_day_time", "interval_day_time", "-"},
+      {"DTIScalarArithmeticDTIColumnNoConvert", "Subtract", "interval_day_time", "interval_day_time", "-"},
+      {"DTIColumnArithmeticDTIColumnNoConvert", "Subtract", "interval_day_time", "interval_day_time", "-"},
+
+      {"DTIColumnArithmeticDTIScalarNoConvert", "Add", "interval_day_time", "timestamp", "+"},
+      {"DTIScalarArithmeticDTIColumnNoConvert", "Add", "interval_day_time", "timestamp", "+"},
+      {"DTIColumnArithmeticDTIColumnNoConvert", "Add", "interval_day_time", "timestamp", "+"},
+
+      {"DTIColumnArithmeticDTIScalarNoConvert", "Add", "timestamp", "interval_day_time", "+"},
+      {"DTIScalarArithmeticDTIColumnNoConvert", "Add", "timestamp", "interval_day_time", "+"},
+      {"DTIColumnArithmeticDTIColumnNoConvert", "Add", "timestamp", "interval_day_time", "+"},
+
+      {"DTIColumnArithmeticDTIScalarNoConvert", "Subtract", "timestamp", "interval_day_time", "-"},
+      {"DTIScalarArithmeticDTIColumnNoConvert", "Subtract", "timestamp", "interval_day_time", "-"},
+      {"DTIColumnArithmeticDTIColumnNoConvert", "Subtract", "timestamp", "interval_day_time", "-"},
+
+      {"DTIColumnArithmeticDTIScalarNoConvert", "Subtract", "timestamp", "timestamp", "-"},
+      {"DTIScalarArithmeticDTIColumnNoConvert", "Subtract", "timestamp", "timestamp", "-"},
+      {"DTIColumnArithmeticDTIColumnNoConvert", "Subtract", "timestamp", "timestamp", "-"},
+
+      // The following datetime/interval arithmetic functions require type conversion for one or both operands
+      {"ColumnArithmeticColumnWithConvert", "Subtract", "date", "date", "-", "TimestampUtils.daysToNanoseconds", "TimestampUtils.daysToNanoseconds"},
+      {"ScalarArithmeticColumnWithConvert", "Subtract", "date", "date", "-", "TimestampUtils.daysToNanoseconds", "TimestampUtils.daysToNanoseconds"},
+      {"ColumnArithmeticScalarWithConvert", "Subtract", "date", "date", "-", "TimestampUtils.daysToNanoseconds", "TimestampUtils.daysToNanoseconds"},
+
+      {"ColumnArithmeticColumnWithConvert", "Subtract", "date", "timestamp", "-", "TimestampUtils.daysToNanoseconds", ""},
+      {"ScalarArithmeticColumnWithConvert", "Subtract", "date", "timestamp", "-", "TimestampUtils.daysToNanoseconds", ""},
+      {"ColumnArithmeticScalarWithConvert", "Subtract", "date", "timestamp", "-", "TimestampUtils.daysToNanoseconds", ""},
+
+      {"ColumnArithmeticColumnWithConvert", "Subtract", "timestamp", "date", "-", "", "TimestampUtils.daysToNanoseconds"},
+      {"ScalarArithmeticColumnWithConvert", "Subtract", "timestamp", "date", "-", "", "TimestampUtils.daysToNanoseconds"},
+      {"ColumnArithmeticScalarWithConvert", "Subtract", "timestamp", "date", "-", "", "TimestampUtils.daysToNanoseconds"},
+
+      {"ColumnArithmeticColumnWithConvert", "Add", "date", "interval_day_time", "+", "TimestampUtils.daysToNanoseconds", ""},
+      {"ScalarArithmeticColumnWithConvert", "Add", "date", "interval_day_time", "+", "TimestampUtils.daysToNanoseconds", ""},
+      {"ColumnArithmeticScalarWithConvert", "Add", "date", "interval_day_time", "+", "TimestampUtils.daysToNanoseconds", ""},
+
+      {"ColumnArithmeticColumnWithConvert", "Subtract", "date", "interval_day_time", "-", "TimestampUtils.daysToNanoseconds", ""},
+      {"ScalarArithmeticColumnWithConvert", "Subtract", "date", "interval_day_time", "-", "TimestampUtils.daysToNanoseconds", ""},
+      {"ColumnArithmeticScalarWithConvert", "Subtract", "date", "interval_day_time", "-", "TimestampUtils.daysToNanoseconds", ""},
+
+      {"ColumnArithmeticColumnWithConvert", "Add", "interval_day_time", "date", "+", "", "TimestampUtils.daysToNanoseconds"},
+      {"ScalarArithmeticColumnWithConvert", "Add", "interval_day_time", "date", "+", "", "TimestampUtils.daysToNanoseconds"},
+      {"ColumnArithmeticScalarWithConvert", "Add", "interval_day_time", "date", "+", "", "TimestampUtils.daysToNanoseconds"},
+
+      // Most year-month interval arithmetic needs its own generation
+      {"DateTimeColumnArithmeticIntervalColumnWithConvert", "Add", "date", "interval_year_month", "+", "", "dtm.addMonthsToDays"},
+      {"DateTimeScalarArithmeticIntervalColumnWithConvert", "Add", "date", "interval_year_month", "+", "", "dtm.addMonthsToDays"},
+      {"DateTimeColumnArithmeticIntervalScalarWithConvert", "Add", "date", "interval_year_month", "+", "", "dtm.addMonthsToDays"},
+
+      {"DateTimeColumnArithmeticIntervalColumnWithConvert", "Subtract", "date", "interval_year_month", "-", "", "dtm.addMonthsToDays"},
+      {"DateTimeScalarArithmeticIntervalColumnWithConvert", "Subtract", "date", "interval_year_month", "-", "", "dtm.addMonthsToDays"},
+      {"DateTimeColumnArithmeticIntervalScalarWithConvert", "Subtract", "date", "interval_year_month", "-", "", "dtm.addMonthsToDays"},
+
+      {"DateTimeColumnArithmeticIntervalColumnWithConvert", "Add", "timestamp", "interval_year_month", "+", "", "dtm.addMonthsToNanosUtc"},
+      {"DateTimeScalarArithmeticIntervalColumnWithConvert", "Add", "timestamp", "interval_year_month", "+", "", "dtm.addMonthsToNanosUtc"},
+      {"DateTimeColumnArithmeticIntervalScalarWithConvert", "Add", "timestamp", "interval_year_month", "+", "", "dtm.addMonthsToNanosUtc"},
+
+      {"DateTimeColumnArithmeticIntervalColumnWithConvert", "Subtract", "timestamp", "interval_year_month", "-", "", "dtm.addMonthsToNanosUtc"},
+      {"DateTimeScalarArithmeticIntervalColumnWithConvert", "Subtract", "timestamp", "interval_year_month", "-", "", "dtm.addMonthsToNanosUtc"},
+      {"DateTimeColumnArithmeticIntervalScalarWithConvert", "Subtract", "timestamp", "interval_year_month", "-", "", "dtm.addMonthsToNanosUtc"},
+
+      {"IntervalColumnArithmeticDateTimeColumnWithConvert", "Add", "interval_year_month", "date", "+", "", "dtm.addMonthsToDays"},
+      {"IntervalScalarArithmeticDateTimeColumnWithConvert", "Add", "interval_year_month", "date", "+", "", "dtm.addMonthsToDays"},
+      {"IntervalColumnArithmeticDateTimeScalarWithConvert", "Add", "interval_year_month", "date", "+", "", "dtm.addMonthsToDays"},
+
+      {"IntervalColumnArithmeticDateTimeColumnWithConvert", "Add", "interval_year_month", "timestamp", "+", "", "dtm.addMonthsToNanosUtc"},
+      {"IntervalScalarArithmeticDateTimeColumnWithConvert", "Add", "interval_year_month", "timestamp", "+", "", "dtm.addMonthsToNanosUtc"},
+      {"IntervalColumnArithmeticDateTimeScalarWithConvert", "Add", "interval_year_month", "timestamp", "+", "", "dtm.addMonthsToNanosUtc"},
+
       {"ColumnArithmeticScalar", "Add", "long", "long", "+"},
       {"ColumnArithmeticScalar", "Subtract", "long", "long", "-"},
       {"ColumnArithmeticScalar", "Multiply", "long", "long", "*"},
@@ -528,6 +611,88 @@ public class GenVectorCode extends Task
       {"ColumnCompareColumn", "GreaterEqual", "long", "long", ">="},
       {"ColumnCompareColumn", "GreaterEqual", "double", "long", ">="},
 
+      // Interval comparisons
+      {"DTIScalarCompareColumn", "Equal", "interval_year_month"},
+      {"DTIScalarCompareColumn", "Equal", "interval_day_time"},
+      {"DTIScalarCompareColumn", "NotEqual", "interval_year_month"},
+      {"DTIScalarCompareColumn", "NotEqual", "interval_day_time"},
+      {"DTIScalarCompareColumn", "Less", "interval_year_month"},
+      {"DTIScalarCompareColumn", "Less", "interval_day_time"},
+      {"DTIScalarCompareColumn", "LessEqual", "interval_year_month"},
+      {"DTIScalarCompareColumn", "LessEqual", "interval_day_time"},
+      {"DTIScalarCompareColumn", "Greater", "interval_year_month"},
+      {"DTIScalarCompareColumn", "Greater", "interval_day_time"},
+      {"DTIScalarCompareColumn", "GreaterEqual", "interval_year_month"},
+      {"DTIScalarCompareColumn", "GreaterEqual", "interval_day_time"},
+
+      {"DTIColumnCompareScalar", "Equal", "interval_year_month"},
+      {"DTIColumnCompareScalar", "Equal", "interval_day_time"},
+      {"DTIColumnCompareScalar", "NotEqual", "interval_year_month"},
+      {"DTIColumnCompareScalar", "NotEqual", "interval_day_time"},
+      {"DTIColumnCompareScalar", "Less", "interval_year_month"},
+      {"DTIColumnCompareScalar", "Less", "interval_day_time"},
+      {"DTIColumnCompareScalar", "LessEqual", "interval_year_month"},
+      {"DTIColumnCompareScalar", "LessEqual", "interval_day_time"},
+      {"DTIColumnCompareScalar", "Greater", "interval_year_month"},
+      {"DTIColumnCompareScalar", "Greater", "interval_day_time"},
+      {"DTIColumnCompareScalar", "GreaterEqual", "interval_year_month"},
+      {"DTIColumnCompareScalar", "GreaterEqual", "interval_day_time"},
+
+      {"FilterDTIScalarCompareColumn", "Equal", "interval_year_month"},
+      {"FilterDTIScalarCompareColumn", "Equal", "interval_day_time"},
+      {"FilterDTIScalarCompareColumn", "NotEqual", "interval_year_month"},
+      {"FilterDTIScalarCompareColumn", "NotEqual", "interval_day_time"},
+      {"FilterDTIScalarCompareColumn", "Less", "interval_year_month"},
+      {"FilterDTIScalarCompareColumn", "Less", "interval_day_time"},
+      {"FilterDTIScalarCompareColumn", "LessEqual", "interval_year_month"},
+      {"FilterDTIScalarCompareColumn", "LessEqual", "interval_day_time"},
+      {"FilterDTIScalarCompareColumn", "Greater", "interval_year_month"},
+      {"FilterDTIScalarCompareColumn", "Greater", "interval_day_time"},
+      {"FilterDTIScalarCompareColumn", "GreaterEqual", "interval_year_month"},
+      {"FilterDTIScalarCompareColumn", "GreaterEqual", "interval_day_time"},
+
+      {"FilterDTIColumnCompareScalar", "Equal", "interval_year_month"},
+      {"FilterDTIColumnCompareScalar", "Equal", "interval_day_time"},
+      {"FilterDTIColumnCompareScalar", "NotEqual", "interval_year_month"},
+      {"FilterDTIColumnCompareScalar", "NotEqual", "interval_day_time"},
+      {"FilterDTIColumnCompareScalar", "Less", "interval_year_month"},
+      {"FilterDTIColumnCompareScalar", "Less", "interval_day_time"},
+      {"FilterDTIColumnCompareScalar", "LessEqual", "interval_year_month"},
+      {"FilterDTIColumnCompareScalar", "LessEqual", "interval_day_time"},
+      {"FilterDTIColumnCompareScalar", "Greater", "interval_year_month"},
+      {"FilterDTIColumnCompareScalar", "Greater", "interval_day_time"},
+      {"FilterDTIColumnCompareScalar", "GreaterEqual", "interval_year_month"},
+      {"FilterDTIColumnCompareScalar", "GreaterEqual", "interval_day_time"},
+
+      // Date comparisons
+      {"DTIScalarCompareColumn", "Equal", "date"},
+      {"DTIScalarCompareColumn", "NotEqual", "date"},
+      {"DTIScalarCompareColumn", "Less", "date"},
+      {"DTIScalarCompareColumn", "LessEqual", "date"},
+      {"DTIScalarCompareColumn", "Greater", "date"},
+      {"DTIScalarCompareColumn", "GreaterEqual", "date"},
+
+      {"DTIColumnCompareScalar", "Equal", "date"},
+      {"DTIColumnCompareScalar", "NotEqual", "date"},
+      {"DTIColumnCompareScalar", "Less", "date"},
+      {"DTIColumnCompareScalar", "LessEqual", "date"},
+      {"DTIColumnCompareScalar", "Greater", "date"},
+      {"DTIColumnCompareScalar", "GreaterEqual", "date"},
+
+      {"FilterDTIScalarCompareColumn", "Equal", "date"},
+      {"FilterDTIScalarCompareColumn", "NotEqual", "date"},
+      {"FilterDTIScalarCompareColumn", "Less", "date"},
+      {"FilterDTIScalarCompareColumn", "LessEqual", "date"},
+      {"FilterDTIScalarCompareColumn", "Greater", "date"},
+      {"FilterDTIScalarCompareColumn", "GreaterEqual", "date"},
+
+      {"FilterDTIColumnCompareScalar", "Equal", "date"},
+      {"FilterDTIColumnCompareScalar", "NotEqual", "date"},
+      {"FilterDTIColumnCompareScalar", "Less", "date"},
+      {"FilterDTIColumnCompareScalar", "LessEqual", "date"},
+      {"FilterDTIColumnCompareScalar", "Greater", "date"},
+      {"FilterDTIColumnCompareScalar", "GreaterEqual", "date"},
+
       // template, <ClassNamePrefix>, <ReturnType>, <OperandType>, <FuncName>, <OperandCast>,
       //   <ResultCast>, <Cleanup> <VectorExprArgType>
       {"ColumnUnaryFunc", "FuncRound", "double", "double", "MathExpr.round", "", "", "", ""},
@@ -896,6 +1061,38 @@ public class GenVectorCode extends Task
         generateFilterDecimalScalarCompareColumn(tdesc);
       } else if (tdesc[0].equals("FilterDecimalColumnCompareColumn")) {
         generateFilterDecimalColumnCompareColumn(tdesc);
+      } else if (tdesc[0].equals("FilterDTIScalarCompareColumn")) {
+        generateFilterDTIScalarCompareColumn(tdesc);
+      } else if (tdesc[0].equals("FilterDTIColumnCompareScalar")) {
+        generateFilterDTIColumnCompareScalar(tdesc);
+      } else if (tdesc[0].equals("DTIScalarCompareColumn")) {
+        generateDTIScalarCompareColumn(tdesc);
+      } else if (tdesc[0].equals("DTIColumnCompareScalar")) {
+        generateDTIColumnCompareScalar(tdesc);
+      } else if (tdesc[0].equals("DTIColumnArithmeticDTIScalarNoConvert")) {
+        generateColumnArithmeticScalar(tdesc);
+      } else if (tdesc[0].equals("DTIScalarArithmeticDTIColumnNoConvert")) {
+        generateScalarArithmeticColumn(tdesc);
+      } else if (tdesc[0].equals("DTIColumnArithmeticDTIColumnNoConvert")) {
+        generateColumnArithmeticColumn(tdesc);
+      } else if (tdesc[0].equals("ColumnArithmeticColumnWithConvert")) {
+        generateColumnArithmeticColumnWithConvert(tdesc);
+      } else if (tdesc[0].equals("ScalarArithmeticColumnWithConvert")) {
+        generateScalarArithmeticColumnWithConvert(tdesc);
+      } else if (tdesc[0].equals("ColumnArithmeticScalarWithConvert")) {
+        generateColumnArithmeticScalarWithConvert(tdesc);
+      } else if (tdesc[0].equals("DateTimeColumnArithmeticIntervalColumnWithConvert")) {
+        generateDateTimeColumnArithmeticIntervalColumnWithConvert(tdesc);
+      } else if (tdesc[0].equals("DateTimeScalarArithmeticIntervalColumnWithConvert")) {
+        generateDateTimeScalarArithmeticIntervalColumnWithConvert(tdesc);
+      } else if (tdesc[0].equals("DateTimeColumnArithmeticIntervalScalarWithConvert")) {
+        generateDateTimeColumnArithmeticIntervalScalarWithConvert(tdesc);
+      } else if (tdesc[0].equals("IntervalColumnArithmeticDateTimeColumnWithConvert")) {
+        generateDateTimeColumnArithmeticIntervalColumnWithConvert(tdesc);
+      } else if (tdesc[0].equals("IntervalScalarArithmeticDateTimeColumnWithConvert")) {
+        generateDateTimeScalarArithmeticIntervalColumnWithConvert(tdesc);
+      } else if (tdesc[0].equals("IntervalColumnArithmeticDateTimeScalarWithConvert")) {
+        generateDateTimeColumnArithmeticIntervalScalarWithConvert(tdesc);
       } else {
         continue;
       }
@@ -1324,12 +1521,18 @@ public class GenVectorCode extends Task
     String className = getCamelCaseType(operandType) + "ColUnaryMinus";
         File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
     String templateString = readFile(templateFile);
+    String vectorExprArgType = operandType;
+    if (operandType.equals("long")) {
+      // interval types can use long version
+      vectorExprArgType = "int_interval_family";
+    }
     // Expand, and write result
     templateString = templateString.replaceAll("<ClassName>", className);
     templateString = templateString.replaceAll("<InputColumnVectorType>", inputColumnVectorType);
     templateString = templateString.replaceAll("<OutputColumnVectorType>", outputColumnVectorType);
     templateString = templateString.replaceAll("<OperandType>", operandType);
     templateString = templateString.replaceAll("<ReturnType>", returnType);
+    templateString = templateString.replaceAll("<VectorExprArgType>", vectorExprArgType);
     writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
         className, templateString);
   }
@@ -1353,7 +1556,7 @@ public class GenVectorCode extends Task
     // Toss in timestamp and date.
     if (operandType.equals("long")) {
       // Let comparisons occur for DATE and TIMESTAMP, too.
-      vectorExprArgType = "int_datetime_family";
+      vectorExprArgType = "int_datetime_interval_family";
     }
     templateString = templateString.replaceAll("<VectorExprArgType>", vectorExprArgType);
 
@@ -1385,8 +1588,8 @@ public class GenVectorCode extends Task
 
     // Toss in timestamp and date.
     if (operandType2.equals("long") && operandType3.equals("long")) {
-      vectorExprArgType2 = "int_datetime_family";
-      vectorExprArgType3 = "int_datetime_family";
+      vectorExprArgType2 = "int_datetime_interval_family";
+      vectorExprArgType3 = "int_datetime_interval_family";
     }
     templateString = templateString.replaceAll("<VectorExprArgType2>", vectorExprArgType2);
     templateString = templateString.replaceAll("<VectorExprArgType3>", vectorExprArgType3);
@@ -1418,8 +1621,8 @@ public class GenVectorCode extends Task
 
     // Toss in timestamp and date.
     if (operandType2.equals("long") && operandType3.equals("long")) {
-      vectorExprArgType2 = "int_datetime_family";
-      vectorExprArgType3 = "int_datetime_family";
+      vectorExprArgType2 = "int_datetime_interval_family";
+      vectorExprArgType3 = "int_datetime_interval_family";
     }
     templateString = templateString.replaceAll("<VectorExprArgType2>", vectorExprArgType2);
     templateString = templateString.replaceAll("<VectorExprArgType3>", vectorExprArgType3);
@@ -1450,8 +1653,8 @@ public class GenVectorCode extends Task
 
     // Toss in timestamp and date.
     if (operandType2.equals("long") && operandType3.equals("long")) {
-      vectorExprArgType2 = "int_datetime_family";
-      vectorExprArgType3 = "int_datetime_family";
+      vectorExprArgType2 = "int_datetime_interval_family";
+      vectorExprArgType3 = "int_datetime_interval_family";
     }
     templateString = templateString.replaceAll("<VectorExprArgType2>", vectorExprArgType2);
     templateString = templateString.replaceAll("<VectorExprArgType3>", vectorExprArgType3);
@@ -1586,8 +1789,8 @@ public class GenVectorCode extends Task
     // But {timestamp|date} and scalar must be handled separately.
     if (operandType1.equals("long") && operandType2.equals("long")) {
       // Let comparisons occur for DATE and TIMESTAMP, too.
-      vectorExprArgType1 = "int_datetime_family";
-      vectorExprArgType2 = "int_datetime_family";
+      vectorExprArgType1 = "int_datetime_interval_family";
+      vectorExprArgType2 = "int_datetime_interval_family";
     }
     templateString = templateString.replaceAll("<VectorExprArgType1>", vectorExprArgType1);
     templateString = templateString.replaceAll("<VectorExprArgType2>", vectorExprArgType2);
@@ -1738,6 +1941,7 @@ public class GenVectorCode extends Task
 
   private void generateColumnArithmeticOperatorColumn(String[] tdesc, String returnType,
          String className) throws Exception {
+    String operatorName = tdesc[1];
     String operandType1 = tdesc[2];
     String operandType2 = tdesc[3];
     String outputColumnVectorType = this.getColumnVectorType(returnType);
@@ -1752,6 +1956,7 @@ public class GenVectorCode extends Task
     templateString = templateString.replaceAll("<InputColumnVectorType1>", inputColumnVectorType1);
     templateString = templateString.replaceAll("<InputColumnVectorType2>", inputColumnVectorType2);
     templateString = templateString.replaceAll("<OutputColumnVectorType>", outputColumnVectorType);
+    templateString = templateString.replaceAll("<OperatorName>", operatorName);
     templateString = templateString.replaceAll("<OperatorSymbol>", operatorSymbol);
     templateString = templateString.replaceAll("<OperandType1>", operandType1);
     templateString = templateString.replaceAll("<OperandType2>", operandType2);
@@ -1813,6 +2018,7 @@ public class GenVectorCode extends Task
 
   private void generateColumnArithmeticOperatorScalar(String[] tdesc, String returnType,
      String className) throws Exception {
+    String operatorName = tdesc[1];
     String operandType1 = tdesc[2];
     String operandType2 = tdesc[3];
     String outputColumnVectorType = this.getColumnVectorType(returnType);
@@ -1825,6 +2031,7 @@ public class GenVectorCode extends Task
     templateString = templateString.replaceAll("<ClassName>", className);
     templateString = templateString.replaceAll("<InputColumnVectorType>", inputColumnVectorType);
     templateString = templateString.replaceAll("<OutputColumnVectorType>", outputColumnVectorType);
+    templateString = templateString.replaceAll("<OperatorName>", operatorName);
     templateString = templateString.replaceAll("<OperatorSymbol>", operatorSymbol);
     templateString = templateString.replaceAll("<OperandType1>", operandType1);
     templateString = templateString.replaceAll("<OperandType2>", operandType2);
@@ -1832,12 +2039,17 @@ public class GenVectorCode extends Task
     writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
         className, templateString);
 
+    String testScalarType = operandType2;
+    if (isDateTimeIntervalType(testScalarType)) {
+      testScalarType = "long";
+    }
+
     testCodeGen.addColumnScalarOperationTestCases(
           true,
           className,
           inputColumnVectorType,
           outputColumnVectorType,
-          operandType2);
+          testScalarType);
   }
 
   private void generateScalarCompareOperatorColumn(String[] tdesc, boolean filter,
@@ -1886,6 +2098,7 @@ public class GenVectorCode extends Task
 
   private void generateScalarArithmeticOperatorColumn(String[] tdesc, String returnType,
      String className) throws Exception {
+     String operatorName = tdesc[1];
      String operandType1 = tdesc[2];
      String operandType2 = tdesc[3];
      String outputColumnVectorType = this.getColumnVectorType(
@@ -1899,6 +2112,7 @@ public class GenVectorCode extends Task
      templateString = templateString.replaceAll("<ClassName>", className);
      templateString = templateString.replaceAll("<InputColumnVectorType>", inputColumnVectorType);
      templateString = templateString.replaceAll("<OutputColumnVectorType>", outputColumnVectorType);
+     templateString = templateString.replaceAll("<OperatorName>", operatorName);
      templateString = templateString.replaceAll("<OperatorSymbol>", operatorSymbol);
      templateString = templateString.replaceAll("<OperandType1>", operandType1);
      templateString = templateString.replaceAll("<OperandType2>", operandType2);
@@ -1907,12 +2121,17 @@ public class GenVectorCode extends Task
      writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
         className, templateString);
 
+     String testScalarType = operandType1;
+     if (isDateTimeIntervalType(testScalarType)) {
+       testScalarType = "long";
+     }
+
      testCodeGen.addColumnScalarOperationTestCases(
            false,
            className,
            inputColumnVectorType,
            outputColumnVectorType,
-           operandType1);
+           testScalarType);
   }
 
   //Binary arithmetic operator
@@ -2053,6 +2272,378 @@ public class GenVectorCode extends Task
         className, templateString);
   }
 
+  // TODO: These can eventually be used to replace generateTimestampScalarCompareTimestampColumn()
+  private void generateDTIScalarCompareColumn(String[] tdesc) throws Exception {
+    String operatorName = tdesc[1];
+    String operandType = tdesc[2];
+    String className = getCamelCaseType(operandType) + "Scalar" + operatorName
+        + getCamelCaseType(operandType) + "Column";
+    String baseClassName = "LongScalar" + operatorName + "LongColumn";
+    //Read the template into a string;
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String templateString = readFile(templateFile);
+    templateString = templateString.replaceAll("<ClassName>", className);
+    templateString = templateString.replaceAll("<BaseClassName>", baseClassName);
+    templateString = templateString.replaceAll("<VectorExprArgType>", operandType);
+    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
+        className, templateString);
+  }
+
+  private void generateFilterDTIScalarCompareColumn(String[] tdesc) throws Exception {
+    String operatorName = tdesc[1];
+    String operandType = tdesc[2];
+    String className = "Filter" + getCamelCaseType(operandType) + "Scalar" + operatorName
+        + getCamelCaseType(operandType) + "Column";
+    String baseClassName = "FilterLongScalar" + operatorName + "LongColumn";
+    //Read the template into a string;
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String templateString = readFile(templateFile);
+    templateString = templateString.replaceAll("<ClassName>", className);
+    templateString = templateString.replaceAll("<BaseClassName>", baseClassName);
+    templateString = templateString.replaceAll("<VectorExprArgType>", operandType);
+    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
+        className, templateString);
+  }
+
+  private void generateDTIColumnCompareScalar(String[] tdesc) throws Exception {
+    String operatorName = tdesc[1];
+    String operandType = tdesc[2];
+    String className = getCamelCaseType(operandType) + "Col" + operatorName
+        + getCamelCaseType(operandType) + "Scalar";
+    String baseClassName = "LongCol" + operatorName + "LongScalar";
+    //Read the template into a string;
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String templateString = readFile(templateFile);
+    templateString = templateString.replaceAll("<ClassName>", className);
+    templateString = templateString.replaceAll("<BaseClassName>", baseClassName);
+    templateString = templateString.replaceAll("<VectorExprArgType>", operandType);
+    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
+        className, templateString);
+  }
+
+  private void generateFilterDTIColumnCompareScalar(String[] tdesc) throws Exception {
+    String operatorName = tdesc[1];
+    String operandType = tdesc[2];
+    String className = "Filter" + getCamelCaseType(operandType) + "Col" + operatorName
+        + getCamelCaseType(operandType) + "Scalar";
+    String baseClassName = "FilterLongCol" + operatorName + "LongScalar";
+    //Read the template into a string;
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String templateString = readFile(templateFile);
+    templateString = templateString.replaceAll("<ClassName>", className);
+    templateString = templateString.replaceAll("<BaseClassName>", baseClassName);
+    templateString = templateString.replaceAll("<VectorExprArgType>", operandType);
+    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
+        className, templateString);
+  }
+
+  private void generateColumnArithmeticColumnWithConvert(String[] tdesc) throws Exception {
+    String operatorName = tdesc[1];
+    String operandType1 = tdesc[2];
+    String operandType2 = tdesc[3];
+    String operatorSymbol = tdesc[4];
+    String typeConversion1 = tdesc[5];
+    String typeConversion2 = tdesc[6];
+    String className = getCamelCaseType(operandType1)
+        + "Col" + operatorName + getCamelCaseType(operandType2) + "Column";
+    String returnType = getArithmeticReturnType(operandType1, operandType2);
+    String outputColumnVectorType = this.getColumnVectorType(returnType);
+    String inputColumnVectorType1 = this.getColumnVectorType(operandType1);
+    String inputColumnVectorType2 = this.getColumnVectorType(operandType2);
+    // For date/timestamp/interval, this should be "long"
+    String vectorOperandType1 = this.getVectorPrimitiveType(inputColumnVectorType1);
+    String vectorOperandType2 = this.getVectorPrimitiveType(inputColumnVectorType2);
+    String vectorReturnType = this.getVectorPrimitiveType(outputColumnVectorType);
+
+    //Read the template into a string;
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String templateString = readFile(templateFile);
+    templateString = templateString.replaceAll("<ClassName>", className);
+    templateString = templateString.replaceAll("<InputColumnVectorType1>", inputColumnVectorType1);
+    templateString = templateString.replaceAll("<InputColumnVectorType2>", inputColumnVectorType2);
+    templateString = templateString.replaceAll("<OutputColumnVectorType>", outputColumnVectorType);
+    templateString = templateString.replaceAll("<OperatorName>", operatorName);
+    templateString = templateString.replaceAll("<OperatorSymbol>", operatorSymbol);
+    templateString = templateString.replaceAll("<OperandType1>", operandType1);
+    templateString = templateString.replaceAll("<OperandType2>", operandType2);
+    templateString = templateString.replaceAll("<ReturnType>", returnType);
+    templateString = templateString.replaceAll("<VectorOperandType1>", vectorOperandType1);
+    templateString = templateString.replaceAll("<VectorOperandType2>", vectorOperandType2);
+    templateString = templateString.replaceAll("<VectorReturnType>", vectorReturnType);
+    templateString = templateString.replaceAll("<TypeConversion1>", typeConversion1);
+    templateString = templateString.replaceAll("<TypeConversion2>", typeConversion2);
+    templateString = templateString.replaceAll("<CamelReturnType>", getCamelCaseType(vectorReturnType));
+    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
+        className, templateString);
+
+    testCodeGen.addColumnColumnOperationTestCases(
+          className,
+          inputColumnVectorType1,
+          inputColumnVectorType2,
+          outputColumnVectorType);
+  }
+
+  private void generateScalarArithmeticColumnWithConvert(String[] tdesc) throws Exception {
+    String operatorName = tdesc[1];
+    String operandType1 = tdesc[2];
+    String operandType2 = tdesc[3];
+    String operatorSymbol = tdesc[4];
+    String typeConversion1 = tdesc[5];
+    String typeConversion2 = tdesc[6];
+    String className = getCamelCaseType(operandType1)
+        + "Scalar" + operatorName + getCamelCaseType(operandType2) + "Column";
+    String returnType = getArithmeticReturnType(operandType1, operandType2);
+    String outputColumnVectorType = this.getColumnVectorType(
+        returnType == null ? "long" : returnType);
+    String inputColumnVectorType = this.getColumnVectorType(operandType2);
+    String inputColumnVectorType1 = this.getColumnVectorType(operandType1);
+    String inputColumnVectorType2 = this.getColumnVectorType(operandType2);
+    // For date/timestamp/interval, this should be "long"
+    String vectorOperandType1 = this.getVectorPrimitiveType(inputColumnVectorType1);
+    String vectorOperandType2 = this.getVectorPrimitiveType(inputColumnVectorType2);
+    String vectorReturnType = this.getVectorPrimitiveType(outputColumnVectorType);
+
+    //Read the template into a string;
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String templateString = readFile(templateFile);
+    templateString = templateString.replaceAll("<ClassName>", className);
+    templateString = templateString.replaceAll("<InputColumnVectorType>", inputColumnVectorType);
+    templateString = templateString.replaceAll("<OutputColumnVectorType>", outputColumnVectorType);
+    templateString = templateString.replaceAll("<OperatorName>", operatorName);
+    templateString = templateString.replaceAll("<OperatorSymbol>", operatorSymbol);
+    templateString = templateString.replaceAll("<OperandType1>", operandType1);
+    templateString = templateString.replaceAll("<OperandType2>", operandType2);
+    templateString = templateString.replaceAll("<ReturnType>", returnType);
+    templateString = templateString.replaceAll("<VectorOperandType1>", vectorOperandType1);
+    templateString = templateString.replaceAll("<VectorOperandType2>", vectorOperandType2);
+    templateString = templateString.replaceAll("<VectorReturnType>", vectorReturnType);
+    templateString = templateString.replaceAll("<TypeConversion1>", typeConversion1);
+    templateString = templateString.replaceAll("<TypeConversion2>", typeConversion2);
+    templateString = templateString.replaceAll("<CamelReturnType>", getCamelCaseType(vectorReturnType));
+    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
+       className, templateString);
+
+    String testScalarType = operandType1;
+    if (isDateTimeIntervalType(testScalarType)) {
+      testScalarType = "long";
+    }
+
+    testCodeGen.addColumnScalarOperationTestCases(
+          false,
+          className,
+          inputColumnVectorType,
+          outputColumnVectorType,
+          testScalarType);
+  }
+
+  private void generateColumnArithmeticScalarWithConvert(String[] tdesc) throws Exception {
+    String operatorName = tdesc[1];
+    String operandType1 = tdesc[2];
+    String operandType2 = tdesc[3];
+    String operatorSymbol = tdesc[4];
+    String typeConversion1 = tdesc[5];
+    String typeConversion2 = tdesc[6];
+    String className = getCamelCaseType(operandType1)
+        + "Col" + operatorName + getCamelCaseType(operandType2) + "Scalar";
+    String returnType = getArithmeticReturnType(operandType1, operandType2);
+    String outputColumnVectorType = this.getColumnVectorType(returnType);
+    String inputColumnVectorType = this.getColumnVectorType(operandType1);
+    String inputColumnVectorType1 = this.getColumnVectorType(operandType1);
+    String inputColumnVectorType2 = this.getColumnVectorType(operandType2);
+    // For date/timestamp/interval, this should be "long"
+    String vectorOperandType1 = this.getVectorPrimitiveType(inputColumnVectorType1);
+    String vectorOperandType2 = this.getVectorPrimitiveType(inputColumnVectorType2);
+    String vectorReturnType = this.getVectorPrimitiveType(outputColumnVectorType);
+
+    //Read the template into a string;
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String templateString = readFile(templateFile);
+    templateString = templateString.replaceAll("<ClassName>", className);
+    templateString = templateString.replaceAll("<InputColumnVectorType>", inputColumnVectorType);
+    templateString = templateString.replaceAll("<OutputColumnVectorType>", outputColumnVectorType);
+    templateString = templateString.replaceAll("<OperatorName>", operatorName);
+    templateString = templateString.replaceAll("<OperatorSymbol>", operatorSymbol);
+    templateString = templateString.replaceAll("<OperandType1>", operandType1);
+    templateString = templateString.replaceAll("<OperandType2>", operandType2);
+    templateString = templateString.replaceAll("<ReturnType>", returnType);
+    templateString = templateString.replaceAll("<VectorOperandType1>", vectorOperandType1);
+    templateString = templateString.replaceAll("<VectorOperandType2>", vectorOperandType2);
+    templateString = templateString.replaceAll("<VectorReturnType>", vectorReturnType);
+    templateString = templateString.replaceAll("<TypeConversion1>", typeConversion1);
+    templateString = templateString.replaceAll("<TypeConversion2>", typeConversion2);
+    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
+        className, templateString);
+
+    String testScalarType = operandType2;
+    if (isDateTimeIntervalType(testScalarType)) {
+      testScalarType = "long";
+    }
+
+    testCodeGen.addColumnScalarOperationTestCases(
+          true,
+          className,
+          inputColumnVectorType,
+          outputColumnVectorType,
+          testScalarType);
+  }
+
+  private void generateDateTimeColumnArithmeticIntervalColumnWithConvert(String[] tdesc) throws Exception {
+    String operatorName = tdesc[1];
+    String operandType1 = tdesc[2];
+    String operandType2 = tdesc[3];
+    String operatorSymbol = tdesc[4];
+    String typeConversion = tdesc[5];
+    String operatorFunction = tdesc[6];
+    String className = getCamelCaseType(operandType1)
+        + "Col" + operatorName + getCamelCaseType(operandType2) + "Column";
+    String returnType = getArithmeticReturnType(operandType1, operandType2);
+    String outputColumnVectorType = this.getColumnVectorType(returnType);
+    String inputColumnVectorType1 = this.getColumnVectorType(operandType1);
+    String inputColumnVectorType2 = this.getColumnVectorType(operandType2);
+    // For date/timestamp/interval, this should be "long"
+    String vectorOperandType1 = this.getVectorPrimitiveType(inputColumnVectorType1);
+    String vectorOperandType2 = this.getVectorPrimitiveType(inputColumnVectorType2);
+    String vectorReturnType = this.getVectorPrimitiveType(outputColumnVectorType);
+
+    //Read the template into a string;
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String templateString = readFile(templateFile);
+    templateString = templateString.replaceAll("<ClassName>", className);
+    templateString = templateString.replaceAll("<InputColumnVectorType1>", inputColumnVectorType1);
+    templateString = templateString.replaceAll("<InputColumnVectorType2>", inputColumnVectorType2);
+    templateString = templateString.replaceAll("<OutputColumnVectorType>", outputColumnVectorType);
+    templateString = templateString.replaceAll("<OperatorName>", operatorName);
+    templateString = templateString.replaceAll("<OperatorSymbol>", operatorSymbol);
+    templateString = templateString.replaceAll("<OperandType1>", operandType1);
+    templateString = templateString.replaceAll("<OperandType2>", operandType2);
+    templateString = templateString.replaceAll("<ReturnType>", returnType);
+    templateString = templateString.replaceAll("<VectorOperandType1>", vectorOperandType1);
+    templateString = templateString.replaceAll("<VectorOperandType2>", vectorOperandType2);
+    templateString = templateString.replaceAll("<VectorReturnType>", vectorReturnType);
+    templateString = templateString.replaceAll("<TypeConversionToMillis>", typeConversion);
+    templateString = templateString.replaceAll("<OperatorFunction>", operatorFunction);
+    templateString = templateString.replaceAll("<CamelReturnType>", getCamelCaseType(vectorReturnType));
+    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
+        className, templateString);
+
+    testCodeGen.addColumnColumnOperationTestCases(
+          className,
+          inputColumnVectorType1,
+          inputColumnVectorType2,
+          outputColumnVectorType);
+  }
+
+  private void generateDateTimeScalarArithmeticIntervalColumnWithConvert(String[] tdesc) throws Exception {
+    String operatorName = tdesc[1];
+    String operandType1 = tdesc[2];
+    String operandType2 = tdesc[3];
+    String operatorSymbol = tdesc[4];
+    String typeConversion = tdesc[5];
+    String operatorFunction = tdesc[6];
+    String className = getCamelCaseType(operandType1)
+        + "Scalar" + operatorName + getCamelCaseType(operandType2) + "Column";
+    String returnType = getArithmeticReturnType(operandType1, operandType2);
+    String outputColumnVectorType = this.getColumnVectorType(
+        returnType == null ? "long" : returnType);
+    String inputColumnVectorType = this.getColumnVectorType(operandType2);
+    String inputColumnVectorType1 = this.getColumnVectorType(operandType1);
+    String inputColumnVectorType2 = this.getColumnVectorType(operandType2);
+    // For date/timestamp/interval, this should be "long"
+    String vectorOperandType1 = this.getVectorPrimitiveType(inputColumnVectorType1);
+    String vectorOperandType2 = this.getVectorPrimitiveType(inputColumnVectorType2);
+    String vectorReturnType = this.getVectorPrimitiveType(outputColumnVectorType);
+
+    //Read the template into a string;
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String templateString = readFile(templateFile);
+    templateString = templateString.replaceAll("<ClassName>", className);
+    templateString = templateString.replaceAll("<InputColumnVectorType>", inputColumnVectorType);
+    templateString = templateString.replaceAll("<OutputColumnVectorType>", outputColumnVectorType);
+    templateString = templateString.replaceAll("<OperatorName>", operatorName);
+    templateString = templateString.replaceAll("<OperatorSymbol>", operatorSymbol);
+    templateString = templateString.replaceAll("<OperandType1>", operandType1);
+    templateString = templateString.replaceAll("<OperandType2>", operandType2);
+    templateString = templateString.replaceAll("<ReturnType>", returnType);
+    templateString = templateString.replaceAll("<VectorOperandType1>", vectorOperandType1);
+    templateString = templateString.replaceAll("<VectorOperandType2>", vectorOperandType2);
+    templateString = templateString.replaceAll("<VectorReturnType>", vectorReturnType);
+    templateString = templateString.replaceAll("<TypeConversionToMillis>", typeConversion);
+    templateString = templateString.replaceAll("<OperatorFunction>", operatorFunction);
+    templateString = templateString.replaceAll("<CamelReturnType>", getCamelCaseType(vectorReturnType));
+    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
+       className, templateString);
+
+    String testScalarType = operandType1;
+    if (isDateTimeIntervalType(testScalarType)) {
+      testScalarType = "long";
+    }
+
+    testCodeGen.addColumnScalarOperationTestCases(
+          false,
+          className,
+          inputColumnVectorType,
+          outputColumnVectorType,
+          testScalarType);
+  }
+
+  private void generateDateTimeColumnArithmeticIntervalScalarWithConvert(String[] tdesc) throws Exception {
+    String operatorName = tdesc[1];
+    String operandType1 = tdesc[2];
+    String operandType2 = tdesc[3];
+    String operatorSymbol = tdesc[4];
+    String typeConversion = tdesc[5];
+    String operatorFunction = tdesc[6];
+    String className = getCamelCaseType(operandType1)
+        + "Col" + operatorName + getCamelCaseType(operandType2) + "Scalar";
+    String returnType = getArithmeticReturnType(operandType1, operandType2);
+    String outputColumnVectorType = this.getColumnVectorType(returnType);
+    String inputColumnVectorType = this.getColumnVectorType(operandType1);
+    String inputColumnVectorType1 = this.getColumnVectorType(operandType1);
+    String inputColumnVectorType2 = this.getColumnVectorType(operandType2);
+    // For date/timestamp/interval, this should be "long"
+    String vectorOperandType1 = this.getVectorPrimitiveType(inputColumnVectorType1);
+    String vectorOperandType2 = this.getVectorPrimitiveType(inputColumnVectorType2);
+    String vectorReturnType = this.getVectorPrimitiveType(outputColumnVectorType);
+
+    //Read the template into a string;
+    File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt"));
+    String templateString = readFile(templateFile);
+    templateString = templateString.replaceAll("<ClassName>", className);
+    templateString = templateString.replaceAll("<InputColumnVectorType>", inputColumnVectorType);
+    templateString = templateString.replaceAll("<OutputColumnVectorType>", outputColumnVectorType);
+    templateString = templateString.replaceAll("<OperatorName>", operatorName);
+    templateString = templateString.replaceAll("<OperatorSymbol>", operatorSymbol);
+    templateString = templateString.replaceAll("<OperandType1>", operandType1);
+    templateString = templateString.replaceAll("<OperandType2>", operandType2);
+    templateString = templateString.replaceAll("<ReturnType>", returnType);
+    templateString = templateString.replaceAll("<VectorOperandType1>", vectorOperandType1);
+    templateString = templateString.replaceAll("<VectorOperandType2>", vectorOperandType2);
+    templateString = templateString.replaceAll("<VectorReturnType>", vectorReturnType);
+    templateString = templateString.replaceAll("<TypeConversionToMillis>", typeConversion);
+    templateString = templateString.replaceAll("<OperatorFunction>", operatorFunction);
+    writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory,
+        className, templateString);
+
+    String testScalarType = operandType2;
+    if (isDateTimeIntervalType(testScalarType)) {
+      testScalarType = "long";
+    }
+
+    testCodeGen.addColumnScalarOperationTestCases(
+          true,
+          className,
+          inputColumnVectorType,
+          outputColumnVectorType,
+          testScalarType);
+  }
+
+  private static boolean isDateTimeIntervalType(String type) {
+    return (type.equals("date")
+        || type.equals("timestamp")
+        || type.equals("interval_year_month")
+        || type.equals("interval_day_time"));
+  }
+
   static void writeFile(long templateTime, String outputDir, String classesDir,
        String className, String str) throws IOException {
     File outputFile = new File(outputDir, className + ".java");
@@ -2098,6 +2689,14 @@ public class GenVectorCode extends Task
       return "Double";
     } else if (type.equals("decimal")) {
       return "Decimal";
+    } else if (type.equals("interval_year_month")) {
+      return "IntervalYearMonth";
+    } else if (type.equals("interval_day_time")) {
+      return "IntervalDayTime";
+    } else if (type.equals("timestamp")) {
+      return "Timestamp";
+    } else if (type.equals("date")) {
+      return "Date";
     } else {
       return type;
     }
@@ -2111,20 +2710,60 @@ public class GenVectorCode extends Task
     return firstLetterAsCap + word.substring(1);
   }
 
+  private static final String ARITHMETIC_RETURN_TYPES[][] = {
+    { "interval_year_month", "interval_year_month", "interval_year_month"},
+    { "interval_year_month", "date", "date"},
+    { "date", "interval_year_month", "date"},
+    { "interval_year_month", "timestamp", "timestamp"},
+    { "timestamp", "interval_year_month", "timestamp"},
+    { "interval_day_time", "interval_day_time", "interval_day_time"},
+    { "interval_day_time", "date", "timestamp"},
+    { "date", "interval_day_time", "timestamp"},
+    { "interval_day_time", "timestamp", "timestamp"},
+    { "timestamp", "interval_day_time", "timestamp"},
+    { "date", "date", "interval_day_time"},
+    { "timestamp", "timestamp", "interval_day_time"},
+    { "timestamp", "date", "interval_day_time"},
+    { "date", "timestamp", "interval_day_time"},
+    { "*", "double", "double"},
+    { "double", "*", "double"},
+  };
+
   private String getArithmeticReturnType(String operandType1,
       String operandType2) {
+/*
     if (operandType1.equals("double") ||
         operandType2.equals("double")) {
       return "double";
+    } else if (operandType1.equals("interval_year_month") &&
+        operandType2.equals("interval_year_month")) {
+      return "interval_year_month";
+    } else if (operandType1.equals("interval_year_month") &&
+        operandType2.equals("date")) {
+      return "date";
+    } else if (operandType1.equals("date") &&
+        operandType2.equals("interval_year_month")) {
+      return "date";
+    } else if (operandType1.equals("interval_day_time") &&
+        operandType2.equals("interval_day_time")) {
+      return "interval_day_time";
     } else {
       return "long";
     }
+*/
+    for (String[] combination : ARITHMETIC_RETURN_TYPES) {
+      if ((combination[0].equals("*") || combination[0].equals(operandType1)) &&
+          (combination[1].equals("*") || combination[1].equals(operandType2))) {
+        return combination[2];
+      }
+    }
+    return "long";
   }
 
   private String getColumnVectorType(String primitiveType) throws Exception {
     if(primitiveType.equals("double")) {
       return "DoubleColumnVector";
-    } else if (primitiveType.equals("long")) {
+    } else if (primitiveType.equals("long") || isDateTimeIntervalType(primitiveType)) {
         return "LongColumnVector";
     } else if (primitiveType.equals("decimal")) {
         return "DecimalColumnVector";
@@ -2134,6 +2773,19 @@ public class GenVectorCode extends Task
     throw new Exception("Unimplemented primitive column vector type: " + primitiveType);
   }
 
+  private String getVectorPrimitiveType(String columnVectorType) throws Exception {
+    if (columnVectorType.equals("LongColumnVector")) {
+      return "long";
+    } else if (columnVectorType.equals("double")) {
+      return "double";
+    } else if (columnVectorType.equals("DecimalColumnVector")) {
+      return "decimal";
+    } else if (columnVectorType.equals("BytesColumnVector")) {
+      return "string";
+    }
+    throw new Exception("Could not determine primitive type for column vector type: " + columnVectorType);
+  }
+
   private String getOutputWritableType(String primitiveType) throws Exception {
     if (primitiveType.equals("long")) {
       return "LongWritable";
@@ -2141,6 +2793,14 @@ public class GenVectorCode extends Task
       return "DoubleWritable";
     } else if (primitiveType.equals("decimal")) {
       return "HiveDecimalWritable";
+    } else if (primitiveType.equals("interval_year_month")) {
+      return "HiveIntervalYearMonthWritable";
+    } else if (primitiveType.equals("interval_day_time")) {
+      return "HiveIntervalDayTimeWritable";
+    } else if (primitiveType.equals("date")) {
+      return "HiveDateWritable";
+    } else if (primitiveType.equals("timestamp")) {
+      return "HiveTimestampWritable";
     }
     throw new Exception("Unimplemented primitive output writable: " + primitiveType);
   }
@@ -2152,6 +2812,14 @@ public class GenVectorCode extends Task
       return "PrimitiveObjectInspectorFactory.writableDoubleObjectInspector";
     } else if (primitiveType.equals("decimal")) {
       return "PrimitiveObjectInspectorFactory.writableHiveDecimalObjectInspector";
+    } else if (primitiveType.equals("interval_year_month")) {
+      return "PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector";
+    } else if (primitiveType.equals("interval_day_time")) {
+      return "PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector";
+    } else if (primitiveType.equals("date")) {
+      return "PrimitiveObjectInspectorFactory.writableDateObjectInspector";
+    } else if (primitiveType.equals("timestamp")) {
+      return "PrimitiveObjectInspectorFactory.writableTimestampObjectInspector";
     }
     throw new Exception("Unimplemented primitive output inspector: " + primitiveType);
   }

Modified: hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/BeeLine.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/BeeLine.java?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/BeeLine.java (original)
+++ hive/branches/cbo/beeline/src/java/org/apache/hive/beeline/BeeLine.java Tue Mar 31 20:14:02 2015
@@ -803,10 +803,14 @@ public class BeeLine implements Closeabl
   }
 
   private int execute(ConsoleReader reader, boolean exitOnError) {
+    String line;
     while (!exit) {
       try {
         // Execute one instruction; terminate on executing a script if there is an error
-        if (!dispatch(reader.readLine(getPrompt())) && exitOnError) {
+        // in silent mode, prevent the query and prompt being echoed back to terminal
+        line = getOpts().isSilent() ? reader.readLine(null, ConsoleReader.NULL_MASK) : reader.readLine(getPrompt());
+
+        if (!dispatch(line) && exitOnError) {
           return ERRNO_OTHER;
         }
       } catch (Throwable t) {

Modified: hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Tue Mar 31 20:14:02 2015
@@ -2042,7 +2042,9 @@ public class HiveConf extends Configurat
     SPARK_RPC_SASL_MECHANISM("hive.spark.client.rpc.sasl.mechanisms", "DIGEST-MD5",
       "Name of the SASL mechanism to use for authentication."),
     NWAYJOINREORDER("hive.reorder.nway.joins", true,
-      "Runs reordering of tables within single n-way join (i.e.: picks streamtable)");
+      "Runs reordering of tables within single n-way join (i.e.: picks streamtable)"),
+    HIVE_LOG_N_RECORDS("hive.log.every.n.records", 0L, new RangeValidator(0L, null),
+      "If value is greater than 0 logs in fixed intervals of size n rather than exponentially.");
 
     public final String varname;
     private final String defaultExpr;

Modified: hive/branches/cbo/common/src/java/org/apache/hive/common/util/DateUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/common/src/java/org/apache/hive/common/util/DateUtils.java?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/common/src/java/org/apache/hive/common/util/DateUtils.java (original)
+++ hive/branches/cbo/common/src/java/org/apache/hive/common/util/DateUtils.java Tue Mar 31 20:14:02 2015
@@ -21,6 +21,8 @@ package org.apache.hive.common.util;
 import java.math.BigDecimal;
 import java.text.SimpleDateFormat;
 
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+
 /**
  * DateUtils. Thread-safe class
  *
@@ -54,4 +56,14 @@ public class DateUtils {
     }
     return result;
   }
-}
\ No newline at end of file
+
+  public static long getIntervalDayTimeTotalNanos(HiveIntervalDayTime intervalDayTime) {
+    return intervalDayTime.getTotalSeconds() * NANOS_PER_SEC + intervalDayTime.getNanos();
+  }
+
+  public static void setIntervalDayTimeTotalNanos(HiveIntervalDayTime intervalDayTime,
+      long totalNanos) {
+    intervalDayTime.set(totalNanos / NANOS_PER_SEC, (int) (totalNanos % NANOS_PER_SEC));
+  }
+}
+

Modified: hive/branches/cbo/dev-support/jenkins-common.sh
URL: http://svn.apache.org/viewvc/hive/branches/cbo/dev-support/jenkins-common.sh?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/dev-support/jenkins-common.sh (original)
+++ hive/branches/cbo/dev-support/jenkins-common.sh Tue Mar 31 20:14:02 2015
@@ -91,3 +91,9 @@ process_jira() {
   fi
   export BUILD_OPTS=$build_opts
 }
+
+# Checks if a specified URL patch contains HMS upgrade changes
+# Returns 0 if there are changes; non-zero value otherwise.
+patch_contains_hms_upgrade() {
+	curl -s "$1" | grep "^diff.*metastore/scripts/upgrade/" >/dev/null
+}
\ No newline at end of file

Modified: hive/branches/cbo/dev-support/jenkins-execute-hms-test.sh
URL: http://svn.apache.org/viewvc/hive/branches/cbo/dev-support/jenkins-execute-hms-test.sh?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/dev-support/jenkins-execute-hms-test.sh (original)
+++ hive/branches/cbo/dev-support/jenkins-execute-hms-test.sh Tue Mar 31 20:14:02 2015
@@ -163,10 +163,6 @@ create_publish_file() {
 	echo $json_file
 }
 
-patch_contains_hms_upgrade() {
-	curl -s "$1" | grep "^diff.*metastore/scripts/upgrade/" >/dev/null
-}
-
 if patch_contains_hms_upgrade "$PATCH_URL"; then
 	ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i $SSH_KEY $SSH_HOST "
 		rm -rf metastore/ &&
@@ -184,6 +180,10 @@ if patch_contains_hms_upgrade "$PATCH_UR
 		do
 			if echo $line | grep 'Test failed' > /dev/null; then
 				FAILED_TESTS+=("$line")
+			elif echo $line | grep 'Executing sql test' >/dev/null; then
+				# Remove 'Executing sql test' line from MESSAGES log to avoid a verbose
+				# comment on JIRA
+				continue
 			fi
 
 			MESSAGES+=("$line")

Modified: hive/branches/cbo/dev-support/jenkins-submit-build.sh
URL: http://svn.apache.org/viewvc/hive/branches/cbo/dev-support/jenkins-submit-build.sh?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/dev-support/jenkins-submit-build.sh (original)
+++ hive/branches/cbo/dev-support/jenkins-submit-build.sh Tue Mar 31 20:14:02 2015
@@ -27,23 +27,28 @@ case "$BUILD_PROFILE" in
   trunk-mr1|trunk-mr2)
    test -n "$TRUNK_URL" || fail "TRUNK_URL must be specified"
    url="$TRUNK_URL&ISSUE_NUM=$ISSUE_NUM"
-   curl -v -i "$url"
-   exit 0
   ;;
   spark-mr2)
    test -n "$SPARK_URL" || fail "SPARK_URL must be specified"
    url="$SPARK_URL&ISSUE_NUM=$ISSUE_NUM"
-   curl -v -i "$url"
-   exit 0
   ;;
   encryption-mr2)
    test -n "$ENCRYPTION_URL" || fail "ENCRYPTION_URL must be specified"
    url="$ENCRYPTION_URL&ISSUE_NUM=$ISSUE_NUM"
-   curl -v -i "$url"
-   exit 0
   ;;
   *)
   echo "Unknown profile '$BUILD_PROFILE'"
   exit 1
   ;;
 esac
+
+# Execute jenkins job for HMS upgrade tests if needed
+if patch_contains_hms_upgrade "${JIRA_ROOT_URL}$PATCH_URL"; then
+  test -n "$HMS_UPGRADE_URL" || fail "HMS_UPGRADE_URL must be specified"
+  echo "Calling HMS upgrade testing job..."
+  curl -v -i "${HMS_UPGRADE_URL}&ISSUE_NUM=${ISSUE_NUM}&BRANCH=${BRANCH}"
+fi
+
+# Execute jenkins job for specific profile
+echo "Calling Precommit $BRANCH Build..."
+curl -v -i "$url"

Modified: hive/branches/cbo/hbase-handler/src/test/queries/positive/hbase_timestamp.q
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/queries/positive/hbase_timestamp.q?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/queries/positive/hbase_timestamp.q (original)
+++ hive/branches/cbo/hbase-handler/src/test/queries/positive/hbase_timestamp.q Tue Mar 31 20:14:02 2015
@@ -10,7 +10,7 @@ DROP TABLE hbase_table;
 CREATE TABLE hbase_table (key string, value string, time bigint)
   STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
   WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp");
-FROM src INSERT OVERWRITE TABLE hbase_table SELECT key, value, 1329959754000 WHERE (key % 17) = 0;
+FROM src INSERT OVERWRITE TABLE hbase_table SELECT key, value, 1329959754 WHERE (key % 17) = 0;
 SELECT key, value, cast(time as timestamp) FROM hbase_table;
 
 DROP TABLE hbase_table;
@@ -19,23 +19,23 @@ CREATE TABLE hbase_table (key string, va
   WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string,:timestamp");
 insert overwrite table hbase_table select key,value,ts FROM
 (
-  select key, value, 100000000000 as ts from src WHERE (key % 33) = 0
+  select key, value, 100000000 as ts from src WHERE (key % 33) = 0
   UNION ALL
-  select key, value, 200000000000 as ts from src WHERE (key % 37) = 0
+  select key, value, 200000000 as ts from src WHERE (key % 37) = 0
 ) T;
 
 explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000;
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000;
+SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000;
+SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000;
 
 explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000;
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000;
+SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000;
+SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000;
 
 explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000;
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000;
+SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000;
+SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000;
 
 explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000;
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000;
+SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000;
+SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000;

Modified: hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_timestamp.q.out?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_timestamp.q.out (original)
+++ hive/branches/cbo/hbase-handler/src/test/results/positive/hbase_timestamp.q.out Tue Mar 31 20:14:02 2015
@@ -81,11 +81,11 @@ POSTHOOK: query: CREATE TABLE hbase_tabl
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@hbase_table
-PREHOOK: query: FROM src INSERT OVERWRITE TABLE hbase_table SELECT key, value, 1329959754000 WHERE (key % 17) = 0
+PREHOOK: query: FROM src INSERT OVERWRITE TABLE hbase_table SELECT key, value, 1329959754 WHERE (key % 17) = 0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@hbase_table
-POSTHOOK: query: FROM src INSERT OVERWRITE TABLE hbase_table SELECT key, value, 1329959754000 WHERE (key % 17) = 0
+POSTHOOK: query: FROM src INSERT OVERWRITE TABLE hbase_table SELECT key, value, 1329959754 WHERE (key % 17) = 0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@hbase_table
@@ -139,27 +139,27 @@ POSTHOOK: Output: database:default
 POSTHOOK: Output: default@hbase_table
 PREHOOK: query: insert overwrite table hbase_table select key,value,ts FROM
 (
-  select key, value, 100000000000 as ts from src WHERE (key % 33) = 0
+  select key, value, 100000000 as ts from src WHERE (key % 33) = 0
   UNION ALL
-  select key, value, 200000000000 as ts from src WHERE (key % 37) = 0
+  select key, value, 200000000 as ts from src WHERE (key % 37) = 0
 ) T
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@hbase_table
 POSTHOOK: query: insert overwrite table hbase_table select key,value,ts FROM
 (
-  select key, value, 100000000000 as ts from src WHERE (key % 33) = 0
+  select key, value, 100000000 as ts from src WHERE (key % 33) = 0
   UNION ALL
-  select key, value, 200000000000 as ts from src WHERE (key % 37) = 0
+  select key, value, 200000000 as ts from src WHERE (key % 37) = 0
 ) T
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@hbase_table
 PREHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000
+SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000
 PREHOOK: type: QUERY
 POSTHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000
+SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -171,10 +171,10 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_table
-            filterExpr: (((key > 100.0) and (key < 400.0)) and (time < 200000000000)) (type: boolean)
+            filterExpr: (((key > 100.0) and (key < 400.0)) and (time < 200000000)) (type: boolean)
             Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
             Filter Operator
-              predicate: ((UDFToDouble(key) > 100.0) and ((UDFToDouble(key) < 400.0) and (time < 200000000000))) (type: boolean)
+              predicate: ((UDFToDouble(key) > 100.0) and ((UDFToDouble(key) < 400.0) and (time < 200000000))) (type: boolean)
               Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string), CAST( time AS TIMESTAMP) (type: timestamp)
@@ -194,21 +194,21 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000
+PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000000
+POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time < 200000000
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
 165	val_165	1973-03-03 01:46:40
 396	val_396	1973-03-03 01:46:40
 PREHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000
+SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000
 PREHOOK: type: QUERY
 POSTHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000
+SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -220,10 +220,10 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_table
-            filterExpr: (((key > 100.0) and (key < 400.0)) and (time > 100000000000)) (type: boolean)
+            filterExpr: (((key > 100.0) and (key < 400.0)) and (time > 100000000)) (type: boolean)
             Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
             Filter Operator
-              predicate: ((UDFToDouble(key) > 100.0) and ((UDFToDouble(key) < 400.0) and (time > 100000000000))) (type: boolean)
+              predicate: ((UDFToDouble(key) > 100.0) and ((UDFToDouble(key) < 400.0) and (time > 100000000))) (type: boolean)
               Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string), CAST( time AS TIMESTAMP) (type: timestamp)
@@ -243,11 +243,11 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000
+PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000000
+POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time > 100000000
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
@@ -256,10 +256,10 @@ POSTHOOK: Input: default@hbase_table
 296	val_296	1976-05-03 12:33:20
 333	val_333	1976-05-03 12:33:20
 PREHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000
+SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000
 PREHOOK: type: QUERY
 POSTHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000
+SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -271,10 +271,10 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_table
-            filterExpr: (((key > 100.0) and (key < 400.0)) and (time <= 100000000000)) (type: boolean)
+            filterExpr: (((key > 100.0) and (key < 400.0)) and (time <= 100000000)) (type: boolean)
             Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
             Filter Operator
-              predicate: ((UDFToDouble(key) > 100.0) and ((UDFToDouble(key) < 400.0) and (time <= 100000000000))) (type: boolean)
+              predicate: ((UDFToDouble(key) > 100.0) and ((UDFToDouble(key) < 400.0) and (time <= 100000000))) (type: boolean)
               Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string), CAST( time AS TIMESTAMP) (type: timestamp)
@@ -294,21 +294,21 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000
+PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000000
+POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time <= 100000000
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
 165	val_165	1973-03-03 01:46:40
 396	val_396	1973-03-03 01:46:40
 PREHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000
+SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000
 PREHOOK: type: QUERY
 POSTHOOK: query: explain
-SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000
+SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -320,10 +320,10 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_table
-            filterExpr: (((key > 100.0) and (key < 400.0)) and (time >= 200000000000)) (type: boolean)
+            filterExpr: (((key > 100.0) and (key < 400.0)) and (time >= 200000000)) (type: boolean)
             Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
             Filter Operator
-              predicate: ((UDFToDouble(key) > 100.0) and ((UDFToDouble(key) < 400.0) and (time >= 200000000000))) (type: boolean)
+              predicate: ((UDFToDouble(key) > 100.0) and ((UDFToDouble(key) < 400.0) and (time >= 200000000))) (type: boolean)
               Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string), CAST( time AS TIMESTAMP) (type: timestamp)
@@ -343,11 +343,11 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000
+PREHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000
 PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_table
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000000
+POSTHOOK: query: SELECT key, value, cast(time as timestamp) FROM hbase_table WHERE key > 100 AND key < 400 AND time >= 200000000
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_table
 #### A masked pattern was here ####

Modified: hive/branches/cbo/hcatalog/hcatalog-pig-adapter/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/hcatalog-pig-adapter/pom.xml?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/hcatalog-pig-adapter/pom.xml (original)
+++ hive/branches/cbo/hcatalog/hcatalog-pig-adapter/pom.xml Tue Mar 31 20:14:02 2015
@@ -68,7 +68,6 @@
     </dependency>
   </dependencies>
 
-
   <profiles>
     <profile>
       <id>hadoop-1</id>
@@ -79,6 +78,12 @@
           <version>${hadoop-20S.version}</version>
         </dependency>
         <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-test</artifactId>
+          <version>${hadoop-20S.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
           <groupId>org.apache.pig</groupId>
           <artifactId>pig</artifactId>
           <version>${pig.version}</version>
@@ -102,6 +107,11 @@
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+          <version>${hadoop-23.version}</version>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-mapreduce-client-core</artifactId>
           <version>${hadoop-23.version}</version>
         </dependency>
@@ -112,6 +122,12 @@
           <classifier>h2</classifier>
         </dependency>
         <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdfs</artifactId>
+          <version>${hadoop-23.version}</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
           <!--this should be automatically brought in by Pig, it's not in Pig 0.12 due to a bug
               in Pig which requires it This is fixed in Pig's pom file in ASF trunk (pig 13)-->
           <groupId>joda-time</groupId>
@@ -121,11 +137,30 @@
         <!-- Test dependencies -->
         <dependency>
           <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdfs</artifactId>
+          <version>${hadoop-23.version}</version>
+          <classifier>tests</classifier>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-mapreduce-client-common</artifactId>
           <version>${hadoop-23.version}</version>
           <optional>true</optional>
           <scope>test</scope>
         </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>${hadoop-23.version}</version>
+          <classifier>tests</classifier>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-servlet</artifactId>
+          <scope>test</scope>
+        </dependency>
       </dependencies>
     </profile>
   </profiles>

Modified: hive/branches/cbo/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml (original)
+++ hive/branches/cbo/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml Tue Mar 31 20:14:02 2015
@@ -35,7 +35,7 @@
 
     <property>
         <name>templeton.libjars</name>
-        <value>${env.TEMPLETON_HOME}/../lib/zookeeper-3.4.5.jar</value>
+        <value>${env.TEMPLETON_HOME}/../lib/zookeeper-3.4.6.jar,${env.TEMPLETON_HOME}/../lib/hive-common-1.2.0-SNAPSHOT.jar</value>
         <description>Jars to add to the classpath.</description>
     </property>
 
@@ -69,6 +69,11 @@
             shipped to the target node in the cluster to execute Pig job which uses 
             HCat, Hive query, etc.</description>
     </property>
+
+    <property>
+      <name>templeton.hive.extra.files</name>
+      <value>${env.TEZ_CLIENT_HOME}/conf/tez-site.xml,${env.TEZ_CLIENT_HOME}/,${env.TEZ_CLIENT_HOME}/lib</value>
+    </property>
     <property>
         <name>templeton.hcat.home</name>
         <value>apache-hive-${env.HIVE_VERSION}-bin.tar.gz/apache-hive-${env.HIVE_VERSION}-bin/hcatalog</value>
@@ -101,7 +106,7 @@
     </property>
 
     <property>
-        <!--\,thrift://127.0.0.1:9933-->
+        <!--\,thrift://127.0.0.1:9933,,hive.execution.engine=tez-->
         <name>templeton.hive.properties</name>
         <value>hive.metastore.uris=thrift://localhost:9933,hive.metastore.sasl.enabled=false</value>
     </property>

Modified: hive/branches/cbo/hcatalog/src/test/e2e/templeton/deployers/env.sh
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/src/test/e2e/templeton/deployers/env.sh?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/src/test/e2e/templeton/deployers/env.sh (original)
+++ hive/branches/cbo/hcatalog/src/test/e2e/templeton/deployers/env.sh Tue Mar 31 20:14:02 2015
@@ -36,6 +36,10 @@ if [ -z ${PIG_VERSION} ]; then
   export PIG_VERSION=0.12.2-SNAPSHOT
 fi
 
+if [ -z ${TEZ_VERSION} ]; then
+  export TEZ_VERSION=0.5.3
+fi
+
 #Root of project source tree
 if [ -z ${PROJ_HOME} ]; then
   export PROJ_HOME=/Users/${USER}/dev/hive
@@ -46,6 +50,7 @@ if [ -z ${HADOOP_HOME} ]; then
   export HADOOP_HOME=/Users/${USER}/dev/hwxhadoop/hadoop-dist/target/hadoop-${HADOOP_VERSION}
 fi
 
+export TEZ_CLIENT_HOME=/Users/ekoifman/dev/apache-tez-client-${TEZ_VERSION}
 #Make sure Pig is built for the Hadoop version you are running
 export PIG_TAR_PATH=/Users/${USER}/dev/pig-${PIG_VERSION}-src/build
 #this is part of Pig distribution

Modified: hive/branches/cbo/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml (original)
+++ hive/branches/cbo/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml Tue Mar 31 20:14:02 2015
@@ -39,7 +39,7 @@
 
   <property>
     <name>templeton.libjars</name>
-    <value>${env.TEMPLETON_HOME}/share/webhcat/svr/lib/zookeeper-3.4.3.jar</value>
+    <value>${env.TEMPLETON_HOME}/../lib/zookeeper-3.4.6.jar,${env.TEMPLETON_HOME}/../lib/hive-common-1.2.0-SNAPSHOT.jar</value>
     <description>Jars to add to the classpath.</description>
   </property>
 
@@ -106,7 +106,20 @@
   <property>
     <name>templeton.hive.path</name>
     <value>hive-0.11.0.tar.gz/hive-0.11.0/bin/hive</value>
-    <description>The path to the Hive executable.</description>
+    <description>The path to the Hive executable.  Applies only if templeton.hive.archive is defined.</description>
+  </property>
+
+  <property>
+    <name>templeton.hive.extra.files</name>
+    <value>/tez-client/conf/tez-site.xml,/tez-client/,/tez-client/lib</value>
+    <description>The resources in this list will be localized to the node running LaunchMapper and added to HADOOP_CLASSPTH
+      before launching 'hive' command.  If the path /foo/bar is a directory, the contents of the the entire dir will be localized
+      and ./bar/* will be added to HADOOP_CLASSPATH.  Note that since classpath path processing does not recurse into subdirectories,
+      the paths in this property may be overlapping.  In the example above, "./tez-site.xml:./tez-client/*:./lib/*" will be added to
+      HADOOP_CLASSPATH.
+      This can be used to specify config files, Tez artifacts, etc.  This will be sent -files option of hadoop jar command thus
+      each path is interpreted by Generic Option Parser.  It can be local or hdfs path.
+    </description>
   </property>
 
   <property>
@@ -197,6 +210,32 @@
     </description>
   </property>
 
+  <!--
+  <property>
+    <name>templeton.controller.mr.am.java.opts</name>
+    <value></value>
+    <description>Java options to be set for the templeton controller job's
+        MapReduce application master. When submitting the controller job,
+        Templeton will override yarn.app.mapreduce.am.command-opts with
+        this value.  If this is not specified, Templeton will not set the
+        property and therefore the value will be picked up from
+        mapred-site.xml.
+    </description>
+  </property>
+
+  <property>
+    <name>templeton.mr.am.memory.mb</name>
+    <value></value>
+    <description>Templeton controller job's Application Master's memory
+        limit in MB. When submitting controller job, Templeton will
+        overwrite yarn.app.mapreduce.am.resource.mb with this value. If
+        empty, Templeton will not set yarn.app.mapreduce.am.resource.mb
+        when submitting the controller job, therefore the configuration
+        in mapred-site.xml will be used.
+    </description>
+  </property>
+  -->
+
   <property>
     <name>templeton.exec.envs</name>
     <value>HADOOP_PREFIX,HADOOP_HOME,JAVA_HOME,HIVE_HOME</value>

Modified: hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java?rev=1670462&r1=1670461&r2=1670462&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java (original)
+++ hive/branches/cbo/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java Tue Mar 31 20:14:02 2015
@@ -35,7 +35,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.SystemVariables;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.hive.hcatalog.templeton.tool.JobState;
@@ -104,6 +103,8 @@ public class AppConfig extends Configura
   public static final String HIVE_ARCHIVE_NAME   = "templeton.hive.archive";
   public static final String HIVE_PATH_NAME      = "templeton.hive.path";
   public static final String MAPPER_MEMORY_MB    = "templeton.mapper.memory.mb";
+  public static final String MR_AM_MEMORY_MB     = "templeton.mr.am.memory.mb";
+
   /**
    * see webhcat-default.xml
    */
@@ -130,6 +131,8 @@ public class AppConfig extends Configura
   public static final String OVERRIDE_JARS_ENABLED = "templeton.override.enabled";
   public static final String TEMPLETON_CONTROLLER_MR_CHILD_OPTS 
     = "templeton.controller.mr.child.opts";
+  public static final String TEMPLETON_CONTROLLER_MR_AM_JAVA_OPTS
+    = "templeton.controller.mr.am.java.opts";
 
   public static final String KERBEROS_SECRET     = "templeton.kerberos.secret";
   public static final String KERBEROS_PRINCIPAL  = "templeton.kerberos.principal";
@@ -148,7 +151,14 @@ public class AppConfig extends Configura
     = "mapred.map.tasks.speculative.execution";
   public static final String HADOOP_CHILD_JAVA_OPTS = "mapred.child.java.opts";
   public static final String HADOOP_MAP_MEMORY_MB = "mapreduce.map.memory.mb";
+  public static final String HADOOP_MR_AM_JAVA_OPTS = "yarn.app.mapreduce.am.command-opts";
+  public static final String HADOOP_MR_AM_MEMORY_MB = "yarn.app.mapreduce.am.resource.mb";
   public static final String UNIT_TEST_MODE     = "templeton.unit.test.mode";
+  /**
+   * comma-separated list of artifacts to add to HADOOP_CLASSPATH evn var in
+   * LaunchMapper before launching Hive command
+   */
+  public static final String HIVE_EXTRA_FILES = "templeton.hive.extra.files";
 
 
   private static final Log LOG = LogFactory.getLog(AppConfig.class);
@@ -313,7 +323,13 @@ public class AppConfig extends Configura
   public String controllerMRChildOpts() { 
     return get(TEMPLETON_CONTROLLER_MR_CHILD_OPTS); 
   }
+  public String controllerAMChildOpts() {
+    return get(TEMPLETON_CONTROLLER_MR_AM_JAVA_OPTS);
+  }
   public String mapperMemoryMb()   { return get(MAPPER_MEMORY_MB); }
+  public String amMemoryMb() {
+    return get(MR_AM_MEMORY_MB);
+  }
 
   /**
    * @see  #HIVE_PROPS_NAME



Mime
View raw message