hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From prasan...@apache.org
Subject svn commit: r1664556 [2/2] - in /hive/branches/llap: ./ beeline/src/java/org/apache/hive/beeline/ bin/ common/src/java/org/apache/hive/common/util/ data/files/ itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/ itests/hive-unit/src/t...
Date Fri, 06 Mar 2015 08:12:15 GMT
Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLevenshtein.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLevenshtein.java?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLevenshtein.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLevenshtein.java Fri Mar  6 08:12:14 2015
@@ -17,20 +17,16 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
+
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
 import org.apache.hadoop.io.IntWritable;
 
 /**
@@ -53,24 +49,22 @@ import org.apache.hadoop.io.IntWritable;
     + "Example:\n "
     + " > SELECT _FUNC_('kitten', 'sitting');\n 3")
 public class GenericUDFLevenshtein extends GenericUDF {
-  private transient Converter[] textConverters = new Converter[2];
+  private transient Converter[] converters = new Converter[2];
   private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[2];
   private final IntWritable output = new IntWritable();
 
   @Override
   public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
-    if (arguments.length != 2) {
-      throw new UDFArgumentLengthException(getFuncName() + " requires 2 arguments, got "
-          + arguments.length);
-    }
-    checkIfPrimitive(arguments, 0, "1st");
-    checkIfPrimitive(arguments, 1, "2nd");
+    checkArgsSize(arguments, 2, 2);
 
-    checkIfStringGroup(arguments, 0, "1st");
-    checkIfStringGroup(arguments, 1, "2nd");
+    checkArgPrimitive(arguments, 0);
+    checkArgPrimitive(arguments, 1);
 
-    getStringConverter(arguments, 0, "1st");
-    getStringConverter(arguments, 1, "2nd");
+    checkArgGroups(arguments, 0, inputTypes, STRING_GROUP);
+    checkArgGroups(arguments, 1, inputTypes, STRING_GROUP);
+
+    obtainStringConverter(arguments, 0, inputTypes, converters);
+    obtainStringConverter(arguments, 1, inputTypes, converters);
 
     ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
     return outputOI;
@@ -78,15 +72,13 @@ public class GenericUDFLevenshtein exten
 
   @Override
   public Object evaluate(DeferredObject[] arguments) throws HiveException {
-    Object obj0;
-    Object obj1;
-    if ((obj0 = arguments[0].get()) == null || (obj1 = arguments[1].get()) == null) {
+    String str0 = getStringValue(arguments, 0, converters);
+    String str1 = getStringValue(arguments, 1, converters);
+
+    if (str0 == null || str1 == null) {
       return null;
     }
 
-    String str0 = textConverters[0].convert(obj0).toString();
-    String str1 = textConverters[1].convert(obj1).toString();
-
     int dist = StringUtils.getLevenshteinDistance(str0, str1);
     output.set(dist);
     return output;
@@ -97,31 +89,7 @@ public class GenericUDFLevenshtein exten
     return getStandardDisplayString(getFuncName(), children);
   }
 
-  protected void checkIfPrimitive(ObjectInspector[] arguments, int i, String argOrder)
-      throws UDFArgumentTypeException {
-    ObjectInspector.Category oiCat = arguments[i].getCategory();
-    if (oiCat != ObjectInspector.Category.PRIMITIVE) {
-      throw new UDFArgumentTypeException(i, getFuncName() + " only takes primitive types as "
-          + argOrder + " argument, got " + oiCat);
-    }
-  }
-
-  protected void checkIfStringGroup(ObjectInspector[] arguments, int i, String argOrder)
-      throws UDFArgumentTypeException {
-    inputTypes[i] = ((PrimitiveObjectInspector) arguments[i]).getPrimitiveCategory();
-    if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputTypes[i]) != PrimitiveGrouping.STRING_GROUP) {
-      throw new UDFArgumentTypeException(i, getFuncName() + " only takes STRING_GROUP types as "
-          + argOrder + " argument, got " + inputTypes[i]);
-    }
-  }
-
-  protected void getStringConverter(ObjectInspector[] arguments, int i, String argOrder)
-      throws UDFArgumentTypeException {
-    textConverters[i] = ObjectInspectorConverters.getConverter(
-        (PrimitiveObjectInspector) arguments[i],
-        PrimitiveObjectInspectorFactory.writableStringObjectInspector);
-  }
-
+  @Override
   protected String getFuncName() {
     return "levenshtein";
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java Fri Mar  6 08:12:14 2015
@@ -24,30 +24,22 @@ import static org.apache.hadoop.hive.ql.
 import static org.apache.hadoop.hive.ql.udf.generic.GenericUDFNextDay.DayOfWeek.THU;
 import static org.apache.hadoop.hive.ql.udf.generic.GenericUDFNextDay.DayOfWeek.TUE;
 import static org.apache.hadoop.hive.ql.udf.generic.GenericUDFNextDay.DayOfWeek.WED;
+import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.DATE_GROUP;
+import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
 
-import java.sql.Timestamp;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
 import java.util.Calendar;
 import java.util.Date;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
 import org.apache.hadoop.io.Text;
+import org.apache.hive.common.util.DateUtils;
 
 /**
  * GenericUDFNextDay.
@@ -62,120 +54,68 @@ import org.apache.hadoop.io.Text;
         + " 'yyyy-MM-dd'. day_of_week is day of the week (e.g. Mo, tue, FRIDAY)."
         + "Example:\n " + " > SELECT _FUNC_('2015-01-14', 'TU') FROM src LIMIT 1;\n" + " '2015-01-20'")
 public class GenericUDFNextDay extends GenericUDF {
-  private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
-  private transient TimestampConverter timestampConverter;
-  private transient Converter textConverter0;
-  private transient Converter textConverter1;
-  private transient Converter dateWritableConverter;
-  private transient PrimitiveCategory inputType1;
-  private transient PrimitiveCategory inputType2;
+  private transient Converter[] converters = new Converter[2];
+  private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[2];
   private final Calendar calendar = Calendar.getInstance();
   private final Text output = new Text();
+  private transient int dayOfWeekIntConst;
+  private transient boolean isDayOfWeekConst;
 
   @Override
   public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
-    if (arguments.length != 2) {
-      throw new UDFArgumentLengthException("next_day() requires 2 argument, got "
-          + arguments.length);
-    }
-    if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
-      throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
-          + arguments[0].getTypeName() + " is passed as first arguments");
-    }
-    if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
-      throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but "
-          + arguments[1].getTypeName() + " is passed as second arguments");
+    checkArgsSize(arguments, 2, 2);
+
+    checkArgPrimitive(arguments, 0);
+    checkArgPrimitive(arguments, 1);
+
+    checkArgGroups(arguments, 0, inputTypes, STRING_GROUP, DATE_GROUP);
+    checkArgGroups(arguments, 1, inputTypes, STRING_GROUP);
+
+    obtainDateConverter(arguments, 0, inputTypes, converters);
+    obtainStringConverter(arguments, 1, inputTypes, converters);
+
+    if (arguments[1] instanceof ConstantObjectInspector) {
+      String dayOfWeek = getConstantStringValue(arguments, 1);
+      isDayOfWeekConst = true;
+      dayOfWeekIntConst = getIntDayOfWeek(dayOfWeek);
     }
-    inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
+
     ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-    switch (inputType1) {
-    case STRING:
-    case VARCHAR:
-    case CHAR:
-      inputType1 = PrimitiveCategory.STRING;
-      textConverter0 = ObjectInspectorConverters.getConverter(
-          (PrimitiveObjectInspector) arguments[0],
-          PrimitiveObjectInspectorFactory.writableStringObjectInspector);
-      break;
-    case TIMESTAMP:
-      timestampConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0],
-          PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
-      break;
-    case DATE:
-      dateWritableConverter = ObjectInspectorConverters.getConverter(
-          (PrimitiveObjectInspector) arguments[0],
-          PrimitiveObjectInspectorFactory.writableDateObjectInspector);
-      break;
-    default:
-      throw new UDFArgumentTypeException(0,
-          "next_day() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got "
-              + inputType1);
-    }
-    inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
-    if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) != PrimitiveGrouping.STRING_GROUP) {
-      throw new UDFArgumentTypeException(1,
-          "next_day() only takes STRING_GROUP types as second argument, got " + inputType2);
-    }
-    textConverter1 = ObjectInspectorConverters.getConverter(
-        (PrimitiveObjectInspector) arguments[1],
-        PrimitiveObjectInspectorFactory.writableStringObjectInspector);
     return outputOI;
   }
 
   @Override
   public Object evaluate(DeferredObject[] arguments) throws HiveException {
-    if (arguments[0].get() == null || arguments[1].get() == null) {
-      return null;
+    int dayOfWeekInt;
+    if (isDayOfWeekConst) {
+      dayOfWeekInt = dayOfWeekIntConst;
+    } else {
+      String dayOfWeek = getStringValue(arguments, 1, converters);
+      dayOfWeekInt = getIntDayOfWeek(dayOfWeek);
     }
-    String dayOfWeek = textConverter1.convert(arguments[1].get()).toString();
-    int dayOfWeekInt = getIntDayOfWeek(dayOfWeek);
     if (dayOfWeekInt == -1) {
       return null;
     }
 
-    Date date;
-    switch (inputType1) {
-    case STRING:
-      String dateString = textConverter0.convert(arguments[0].get()).toString();
-      try {
-        date = formatter.parse(dateString);
-      } catch (ParseException e) {
-        return null;
-      }
-      break;
-    case TIMESTAMP:
-      Timestamp ts = ((TimestampWritable) timestampConverter.convert(arguments[0].get()))
-          .getTimestamp();
-      date = ts;
-      break;
-    case DATE:
-      DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get());
-      date = dw.get();
-      break;
-    default:
-      throw new UDFArgumentTypeException(0,
-          "next_day() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
+    Date date = getDateValue(arguments, 0, inputTypes, converters);
+    if (date == null) {
+      return null;
     }
 
     nextDay(date, dayOfWeekInt);
     Date newDate = calendar.getTime();
-    output.set(formatter.format(newDate));
+    output.set(DateUtils.getDateFormat().format(newDate));
     return output;
   }
 
   @Override
   public String getDisplayString(String[] children) {
-    StringBuilder sb = new StringBuilder();
-    sb.append("next_day(");
-    if (children.length > 0) {
-      sb.append(children[0]);
-      for (int i = 1; i < children.length; i++) {
-        sb.append(", ");
-        sb.append(children[i]);
-      }
-    }
-    sb.append(")");
-    return sb.toString();
+    return getStandardDisplayString(getFuncName(), children);
+  }
+
+  @Override
+  protected String getFuncName() {
+    return "next_day";
   }
 
   protected Calendar nextDay(Date date, int dayOfWeek) {
@@ -196,6 +136,9 @@ public class GenericUDFNextDay extends G
   }
 
   protected int getIntDayOfWeek(String dayOfWeek) throws UDFArgumentException {
+    if (dayOfWeek == null) {
+      return -1;
+    }
     if (MON.matches(dayOfWeek)) {
       return Calendar.MONDAY;
     }
@@ -221,13 +164,9 @@ public class GenericUDFNextDay extends G
   }
 
   public static enum DayOfWeek {
-    MON ("MO", "MON", "MONDAY"),
-    TUE ("TU", "TUE", "TUESDAY"),
-    WED ("WE", "WED", "WEDNESDAY"),
-    THU ("TH", "THU", "THURSDAY"),
-    FRI ("FR", "FRI", "FRIDAY"),
-    SAT ("SA", "SAT", "SATURDAY"),
-    SUN ("SU", "SUN", "SUNDAY");
+    MON("MO", "MON", "MONDAY"), TUE("TU", "TUE", "TUESDAY"), WED("WE", "WED", "WEDNESDAY"), THU(
+        "TH", "THU", "THURSDAY"), FRI("FR", "FRI", "FRIDAY"), SAT("SA", "SAT", "SATURDAY"), SUN(
+        "SU", "SUN", "SUNDAY");
 
     private String name2;
     private String name3;

Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Fri Mar  6 08:12:14 2015
@@ -59,6 +59,7 @@ import org.apache.hadoop.hive.ql.plan.Se
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.util.Shell;
 
@@ -93,7 +94,8 @@ public class TestExecDriver extends Test
       tmppath = new Path(tmpdir);
 
       fs = FileSystem.get(conf);
-      if (fs.exists(tmppath) && !fs.getFileStatus(tmppath).isDirectory()) {
+      if (fs.exists(tmppath) &&
+          !ShimLoader.getHadoopShims().isDirectory(fs.getFileStatus(tmppath))) {
         throw new RuntimeException(tmpdir + " exists but is not a directory");
       }
 
@@ -166,7 +168,7 @@ public class TestExecDriver extends Test
     if (!fs.exists(di_test)) {
       throw new RuntimeException(tmpdir + File.separator + testdir + " does not exist");
     }
-    if (!fs.getFileStatus(di_test).isDirectory()) {
+    if (!ShimLoader.getHadoopShims().isDirectory(fs.getFileStatus(di_test))) {
       throw new RuntimeException(tmpdir + File.separator + testdir + " is not a directory");
     }
     FSDataInputStream fi_test = fs.open((fs.listStatus(di_test))[0].getPath());

Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestHiveSchemaConverter.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestHiveSchemaConverter.java?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestHiveSchemaConverter.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestHiveSchemaConverter.java Fri Mar  6 08:12:14 2015
@@ -108,6 +108,16 @@ public class TestHiveSchemaConverter {
   }
 
   @Test
+  public void testDateType() throws Exception {
+    testConversion(
+        "a",
+        "date",
+        "message hive_schema {\n"
+            + "  optional int32 a (DATE);\n"
+            + "}\n");
+  }
+
+  @Test
   public void testArray() throws Exception {
     testConversion("arrayCol",
             "array<int>",

Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java Fri Mar  6 08:12:14 2015
@@ -17,26 +17,30 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import junit.framework.TestCase;
+
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 
-import junit.framework.TestCase;
-
 public class TestGenericUDFAddMonths extends TestCase {
 
-  public void testAddMonths() throws HiveException {
+  public void testAddMonthsInt() throws HiveException {
     GenericUDFAddMonths udf = new GenericUDFAddMonths();
     ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
     ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
     ObjectInspector[] arguments = { valueOI0, valueOI1 };
 
     udf.initialize(arguments);
+
+    // date str
     runAndVerify("2014-01-14", 1, "2014-02-14", udf);
     runAndVerify("2014-01-31", 1, "2014-02-28", udf);
     runAndVerify("2014-02-28", -1, "2014-01-31", udf);
@@ -46,7 +50,64 @@ public class TestGenericUDFAddMonths ext
     runAndVerify("2016-02-29", -12, "2015-02-28", udf);
     runAndVerify("2016-01-29", 1, "2016-02-29", udf);
     runAndVerify("2016-02-29", -1, "2016-01-31", udf);
-    runAndVerify("2014-01-32", 1, "2014-03-01", udf);
+    // wrong date str
+    runAndVerify("2014-02-30", 1, "2014-04-02", udf);
+    runAndVerify("2014-02-32", 1, "2014-04-04", udf);
+    runAndVerify("2014-01", 1, null, udf);
+
+    // ts str
+    runAndVerify("2014-01-14 10:30:00", 1, "2014-02-14", udf);
+    runAndVerify("2014-01-31 10:30:00", 1, "2014-02-28", udf);
+    runAndVerify("2014-02-28 10:30:00.1", -1, "2014-01-31", udf);
+    runAndVerify("2014-02-28 10:30:00.100", 2, "2014-04-30", udf);
+    runAndVerify("2014-04-30 10:30:00.001", -2, "2014-02-28", udf);
+    runAndVerify("2015-02-28 10:30:00.000000001", 12, "2016-02-29", udf);
+    runAndVerify("2016-02-29 10:30:00", -12, "2015-02-28", udf);
+    runAndVerify("2016-01-29 10:30:00", 1, "2016-02-29", udf);
+    runAndVerify("2016-02-29 10:30:00", -1, "2016-01-31", udf);
+    // wrong ts str
+    runAndVerify("2014-02-30 10:30:00", 1, "2014-04-02", udf);
+    runAndVerify("2014-02-32 10:30:00", 1, "2014-04-04", udf);
+    runAndVerify("2014/01/31 10:30:00", 1, null, udf);
+    runAndVerify("2014-01-31T10:30:00", 1, "2014-02-28", udf);
+  }
+
+  public void testAddMonthsShort() throws HiveException {
+    GenericUDFAddMonths udf = new GenericUDFAddMonths();
+    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableShortObjectInspector;
+    ObjectInspector[] arguments = { valueOI0, valueOI1 };
+
+    udf.initialize(arguments);
+    // short
+    runAndVerify("2014-01-14", (short) 1, "2014-02-14", udf);
+  }
+
+  public void testAddMonthsByte() throws HiveException {
+    GenericUDFAddMonths udf = new GenericUDFAddMonths();
+    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableByteObjectInspector;
+    ObjectInspector[] arguments = { valueOI0, valueOI1 };
+
+    udf.initialize(arguments);
+    // short
+    runAndVerify("2014-01-14", (byte) 1, "2014-02-14", udf);
+  }
+
+  public void testAddMonthsLong() throws HiveException {
+    @SuppressWarnings("resource")
+    GenericUDFAddMonths udf = new GenericUDFAddMonths();
+    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
+    ObjectInspector[] arguments = { valueOI0, valueOI1 };
+
+    try {
+      udf.initialize(arguments);
+      assertTrue("add_months exception expected", false);
+    } catch (UDFArgumentTypeException e) {
+      assertEquals("add_months test",
+          "add_months only takes INT/SHORT/BYTE types as 2nd argument, got LONG", e.getMessage());
+    }
   }
 
   private void runAndVerify(String str, int months, String expResult, GenericUDF udf)
@@ -55,6 +116,24 @@ public class TestGenericUDFAddMonths ext
     DeferredObject valueObj1 = new DeferredJavaObject(new IntWritable(months));
     DeferredObject[] args = { valueObj0, valueObj1 };
     Text output = (Text) udf.evaluate(args);
-    assertEquals("add_months() test ", expResult, output.toString());
+    assertEquals("add_months() test ", expResult, output != null ? output.toString() : null);
+  }
+
+  private void runAndVerify(String str, short months, String expResult, GenericUDF udf)
+      throws HiveException {
+    DeferredObject valueObj0 = new DeferredJavaObject(new Text(str));
+    DeferredObject valueObj1 = new DeferredJavaObject(new ShortWritable(months));
+    DeferredObject[] args = { valueObj0, valueObj1 };
+    Text output = (Text) udf.evaluate(args);
+    assertEquals("add_months() test ", expResult, output != null ? output.toString() : null);
+  }
+
+  private void runAndVerify(String str, byte months, String expResult, GenericUDF udf)
+      throws HiveException {
+    DeferredObject valueObj0 = new DeferredJavaObject(new Text(str));
+    DeferredObject valueObj1 = new DeferredJavaObject(new ByteWritable(months));
+    DeferredObject[] args = { valueObj0, valueObj1 };
+    Text output = (Text) udf.evaluate(args);
+    assertEquals("add_months() test ", expResult, output != null ? output.toString() : null);
   }
 }

Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java Fri Mar  6 08:12:14 2015
@@ -35,6 +35,8 @@ public class TestGenericUDFLastDay exten
     ObjectInspector[] arguments = { valueOI0 };
 
     udf.initialize(arguments);
+
+    // date str
     runAndVerify("2014-01-01", "2014-01-31", udf);
     runAndVerify("2014-01-14", "2014-01-31", udf);
     runAndVerify("2014-01-31", "2014-01-31", udf);
@@ -43,17 +45,26 @@ public class TestGenericUDFLastDay exten
     runAndVerify("2016-02-03", "2016-02-29", udf);
     runAndVerify("2016-02-28", "2016-02-29", udf);
     runAndVerify("2016-02-29", "2016-02-29", udf);
+    //wrong date str
+    runAndVerify("2016-02-30", "2016-03-31", udf);
+    runAndVerify("2014-01-32", "2014-02-28", udf);
     runAndVerify("01/14/2014", null, udf);
     runAndVerify(null, null, udf);
 
+    // ts str
     runAndVerify("2014-01-01 10:30:45", "2014-01-31", udf);
     runAndVerify("2014-01-14 10:30:45", "2014-01-31", udf);
-    runAndVerify("2014-01-31 10:30:45", "2014-01-31", udf);
-    runAndVerify("2014-02-02 10:30:45", "2014-02-28", udf);
-    runAndVerify("2014-02-28 10:30:45", "2014-02-28", udf);
-    runAndVerify("2016-02-03 10:30:45", "2016-02-29", udf);
+    runAndVerify("2014-01-31 10:30:45.1", "2014-01-31", udf);
+    runAndVerify("2014-02-02 10:30:45.100", "2014-02-28", udf);
+    runAndVerify("2014-02-28 10:30:45.001", "2014-02-28", udf);
+    runAndVerify("2016-02-03 10:30:45.000000001", "2016-02-29", udf);
     runAndVerify("2016-02-28 10:30:45", "2016-02-29", udf);
     runAndVerify("2016-02-29 10:30:45", "2016-02-29", udf);
+    // wrong ts str
+    runAndVerify("2016-02-30 10:30:45", "2016-03-31", udf);
+    runAndVerify("2014-01-32 10:30:45", "2014-02-28", udf);
+    runAndVerify("01/14/2014 10:30:45", null, udf);
+    runAndVerify("2016-02-28T10:30:45", "2016-02-29", udf);
   }
 
   private void runAndVerify(String str, String expResult, GenericUDF udf)

Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLevenshtein.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLevenshtein.java?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLevenshtein.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLevenshtein.java Fri Mar  6 08:12:14 2015
@@ -92,7 +92,7 @@ public class TestGenericUDFLevenshtein e
       udf.initialize(arguments);
       assertTrue("levenshtein test. UDFArgumentLengthException is expected", false);
     } catch (UDFArgumentLengthException e) {
-      assertEquals("levenshtein test", "levenshtein requires 2 arguments, got 1", e.getMessage());
+      assertEquals("levenshtein test", "levenshtein requires 2 argument(s), got 1", e.getMessage());
     }
   }
 

Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNextDay.java Fri Mar  6 08:12:14 2015
@@ -57,10 +57,10 @@ public class TestGenericUDFNextDay exten
 
     // start_date is Wed, full timestamp, full day name
     runAndVerify("2015-01-14 14:04:34", "sunday", "2015-01-18", udf);
-    runAndVerify("2015-01-14 14:04:34", "Monday", "2015-01-19", udf);
-    runAndVerify("2015-01-14 14:04:34", "Tuesday", "2015-01-20", udf);
-    runAndVerify("2015-01-14 14:04:34", "wednesday", "2015-01-21", udf);
-    runAndVerify("2015-01-14 14:04:34", "thursDAY", "2015-01-15", udf);
+    runAndVerify("2015-01-14 14:04:34.1", "Monday", "2015-01-19", udf);
+    runAndVerify("2015-01-14 14:04:34.100", "Tuesday", "2015-01-20", udf);
+    runAndVerify("2015-01-14 14:04:34.001", "wednesday", "2015-01-21", udf);
+    runAndVerify("2015-01-14 14:04:34.000000001", "thursDAY", "2015-01-15", udf);
     runAndVerify("2015-01-14 14:04:34", "FRIDAY", "2015-01-16", udf);
     runAndVerify("2015-01-14 14:04:34", "SATurday", "2015-01-17", udf);
 
@@ -72,6 +72,12 @@ public class TestGenericUDFNextDay exten
     // not valid values
     runAndVerify("01/14/2015", "TU", null, udf);
     runAndVerify("2015-01-14", "VT", null, udf);
+    runAndVerify("2015-02-30", "WE", "2015-03-04", udf);
+    runAndVerify("2015-02-32", "WE", "2015-03-11", udf);
+    runAndVerify("2015-02-30 10:30:00", "WE", "2015-03-04", udf);
+    runAndVerify("2015-02-32 10:30:00", "WE", "2015-03-11", udf);
+    runAndVerify("2015/01/14 14:04:34", "SAT", null, udf);
+    runAndVerify("2015-01-14T14:04:34", "SAT", "2015-01-17", udf);
   }
 
   public void testNextDayErrorArg1() throws HiveException {
@@ -86,7 +92,7 @@ public class TestGenericUDFNextDay exten
       assertTrue("UDFArgumentException expected", false);
     } catch (UDFArgumentException e) {
       assertEquals(
-          "next_day() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got LONG",
+          "next_day only takes STRING_GROUP, DATE_GROUP types as 1st argument, got LONG",
           e.getMessage());
     }
   }
@@ -102,7 +108,7 @@ public class TestGenericUDFNextDay exten
       udf.initialize(arguments);
       assertTrue("UDFArgumentException expected", false);
     } catch (UDFArgumentException e) {
-      assertEquals("next_day() only takes STRING_GROUP types as second argument, got INT",
+      assertEquals("next_day only takes STRING_GROUP types as 2nd argument, got INT",
           e.getMessage());
     }
   }

Modified: hive/branches/llap/ql/src/test/queries/clientpositive/parquet_types.q
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/queries/clientpositive/parquet_types.q?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/queries/clientpositive/parquet_types.q (original)
+++ hive/branches/llap/ql/src/test/queries/clientpositive/parquet_types.q Fri Mar  6 08:12:14 2015
@@ -14,7 +14,8 @@ CREATE TABLE parquet_types_staging (
   cbinary string,
   m1 map<string, varchar(3)>,
   l1 array<int>,
-  st1 struct<c1:int, c2:char(1)>
+  st1 struct<c1:int, c2:char(1)>,
+  d date
 ) ROW FORMAT DELIMITED
 FIELDS TERMINATED BY '|'
 COLLECTION ITEMS TERMINATED BY ','
@@ -33,7 +34,8 @@ CREATE TABLE parquet_types (
   cbinary binary,
   m1 map<string, varchar(3)>,
   l1 array<int>,
-  st1 struct<c1:int, c2:char(1)>
+  st1 struct<c1:int, c2:char(1)>,
+  d date
 ) STORED AS PARQUET;
 
 LOAD DATA LOCAL INPATH '../../data/files/parquet_types.txt' OVERWRITE INTO TABLE parquet_types_staging;
@@ -42,10 +44,10 @@ SELECT * FROM parquet_types_staging;
 
 INSERT OVERWRITE TABLE parquet_types
 SELECT cint, ctinyint, csmallint, cfloat, cdouble, cstring1, t, cchar, cvarchar,
-unhex(cbinary), m1, l1, st1 FROM parquet_types_staging;
+unhex(cbinary), m1, l1, st1, d FROM parquet_types_staging;
 
 SELECT cint, ctinyint, csmallint, cfloat, cdouble, cstring1, t, cchar, cvarchar,
-hex(cbinary), m1, l1, st1 FROM parquet_types;
+hex(cbinary), m1, l1, st1, d FROM parquet_types;
 
 SELECT cchar, LENGTH(cchar), cvarchar, LENGTH(cvarchar) FROM parquet_types;
 

Modified: hive/branches/llap/ql/src/test/queries/clientpositive/tez_join_hash.q
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/queries/clientpositive/tez_join_hash.q?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/queries/clientpositive/tez_join_hash.q (original)
+++ hive/branches/llap/ql/src/test/queries/clientpositive/tez_join_hash.q Fri Mar  6 08:12:14 2015
@@ -10,3 +10,26 @@ explain
 SELECT count(*) FROM src, orc_src where src.key=orc_src.key;
 
 SELECT count(*) FROM src, orc_src where src.key=orc_src.key;
+
+set hive.auto.convert.join=true;
+set hive.auto.convert.join.noconditionaltask=true;
+set hive.auto.convert.join.noconditionaltask.size=3000;
+
+explain
+select count(*) from (select x.key as key, y.value as value from
+srcpart x join srcpart y on (x.key = y.key)
+union all
+select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value;
+
+select key, count(*) from (select x.key as key, y.value as value from
+srcpart x join srcpart y on (x.key = y.key)
+union all
+select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value;
+
+set hive.execution.engine=mr;
+select key, count(*) from (select x.key as key, y.value as value from
+srcpart x join srcpart y on (x.key = y.key)
+union all
+select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value;
+
+

Modified: hive/branches/llap/ql/src/test/results/clientnegative/udf_add_months_error_1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientnegative/udf_add_months_error_1.q.out?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientnegative/udf_add_months_error_1.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientnegative/udf_add_months_error_1.q.out Fri Mar  6 08:12:14 2015
@@ -1 +1 @@
-FAILED: SemanticException [Error 10016]: Line 1:18 Argument type mismatch '14567893456': ADD_MONTHS() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got LONG
+FAILED: SemanticException [Error 10016]: Line 1:18 Argument type mismatch '14567893456': add_months only takes STRING_GROUP, DATE_GROUP types as 1st argument, got LONG

Modified: hive/branches/llap/ql/src/test/results/clientnegative/udf_add_months_error_2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientnegative/udf_add_months_error_2.q.out?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientnegative/udf_add_months_error_2.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientnegative/udf_add_months_error_2.q.out Fri Mar  6 08:12:14 2015
@@ -1 +1 @@
-FAILED: SemanticException [Error 10016]: Line 1:32 Argument type mismatch '2.4': ADD_MONTHS() only takes INT types as second argument, got DOUBLE
+FAILED: SemanticException [Error 10016]: Line 1:32 Argument type mismatch '2.4': add_months only takes INT/SHORT/BYTE types as 2nd argument, got DOUBLE

Modified: hive/branches/llap/ql/src/test/results/clientnegative/udf_last_day_error_1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientnegative/udf_last_day_error_1.q.out?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientnegative/udf_last_day_error_1.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientnegative/udf_last_day_error_1.q.out Fri Mar  6 08:12:14 2015
@@ -1 +1 @@
-FAILED: SemanticException [Error 10016]: Line 1:16 Argument type mismatch '1423199465': LAST_DAY() only takes STRING/TIMESTAMP/DATEWRITABLE types, got INT
+FAILED: SemanticException [Error 10016]: Line 1:16 Argument type mismatch '1423199465': last_day only takes STRING_GROUP, DATE_GROUP types as 1st argument, got INT

Modified: hive/branches/llap/ql/src/test/results/clientnegative/udf_last_day_error_2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientnegative/udf_last_day_error_2.q.out?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientnegative/udf_last_day_error_2.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientnegative/udf_last_day_error_2.q.out Fri Mar  6 08:12:14 2015
@@ -1 +1 @@
-FAILED: SemanticException [Error 10016]: Line 1:16 Argument type mismatch ''test'': Only primitive type arguments are accepted but map<string,string> is passed
+FAILED: SemanticException [Error 10016]: Line 1:16 Argument type mismatch ''test'': last_day only takes primitive types as 1st argument, got MAP

Modified: hive/branches/llap/ql/src/test/results/clientnegative/udf_next_day_error_1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientnegative/udf_next_day_error_1.q.out?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientnegative/udf_next_day_error_1.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientnegative/udf_next_day_error_1.q.out Fri Mar  6 08:12:14 2015
@@ -1 +1 @@
-FAILED: SemanticException [Error 10016]: Line 1:16 Argument type mismatch '145622345': next_day() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got INT
+FAILED: SemanticException [Error 10016]: Line 1:16 Argument type mismatch '145622345': next_day only takes STRING_GROUP, DATE_GROUP types as 1st argument, got INT

Modified: hive/branches/llap/ql/src/test/results/clientnegative/udf_next_day_error_2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientnegative/udf_next_day_error_2.q.out?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientnegative/udf_next_day_error_2.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientnegative/udf_next_day_error_2.q.out Fri Mar  6 08:12:14 2015
@@ -1 +1 @@
-FAILED: SemanticException [Error 10016]: Line 1:30 Argument type mismatch '4': next_day() only takes STRING_GROUP types as second argument, got INT
+FAILED: SemanticException [Error 10016]: Line 1:30 Argument type mismatch '4': next_day only takes STRING_GROUP types as 2nd argument, got INT

Modified: hive/branches/llap/ql/src/test/results/clientpositive/parquet_types.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientpositive/parquet_types.q.out?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientpositive/parquet_types.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientpositive/parquet_types.q.out Fri Mar  6 08:12:14 2015
@@ -19,7 +19,8 @@ PREHOOK: query: CREATE TABLE parquet_typ
   cbinary string,
   m1 map<string, varchar(3)>,
   l1 array<int>,
-  st1 struct<c1:int, c2:char(1)>
+  st1 struct<c1:int, c2:char(1)>,
+  d date
 ) ROW FORMAT DELIMITED
 FIELDS TERMINATED BY '|'
 COLLECTION ITEMS TERMINATED BY ','
@@ -40,7 +41,8 @@ POSTHOOK: query: CREATE TABLE parquet_ty
   cbinary string,
   m1 map<string, varchar(3)>,
   l1 array<int>,
-  st1 struct<c1:int, c2:char(1)>
+  st1 struct<c1:int, c2:char(1)>,
+  d date
 ) ROW FORMAT DELIMITED
 FIELDS TERMINATED BY '|'
 COLLECTION ITEMS TERMINATED BY ','
@@ -61,7 +63,8 @@ PREHOOK: query: CREATE TABLE parquet_typ
   cbinary binary,
   m1 map<string, varchar(3)>,
   l1 array<int>,
-  st1 struct<c1:int, c2:char(1)>
+  st1 struct<c1:int, c2:char(1)>,
+  d date
 ) STORED AS PARQUET
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
@@ -79,7 +82,8 @@ POSTHOOK: query: CREATE TABLE parquet_ty
   cbinary binary,
   m1 map<string, varchar(3)>,
   l1 array<int>,
-  st1 struct<c1:int, c2:char(1)>
+  st1 struct<c1:int, c2:char(1)>,
+  d date
 ) STORED AS PARQUET
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
@@ -100,37 +104,37 @@ POSTHOOK: query: SELECT * FROM parquet_t
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@parquet_types_staging
 #### A masked pattern was here ####
-100	1	1	1.0	0.0	abc	2011-01-01 01:01:01.111111111	a    	a  	B4F3CAFDBEDD	{"k1":"v1"}	[101,200]	{"c1":10,"c2":"a"}
-101	2	2	1.1	0.3	def	2012-02-02 02:02:02.222222222	ab   	ab 	68692CCAC0BDE7	{"k2":"v2"}	[102,200]	{"c1":10,"c2":"d"}
-102	3	3	1.2	0.6	ghi	2013-03-03 03:03:03.333333333	abc  	abc	B4F3CAFDBEDD	{"k3":"v3"}	[103,200]	{"c1":10,"c2":"g"}
-103	1	4	1.3	0.9	jkl	2014-04-04 04:04:04.444444444	abcd 	abcd	68692CCAC0BDE7	{"k4":"v4"}	[104,200]	{"c1":10,"c2":"j"}
-104	2	5	1.4	1.2	mno	2015-05-05 05:05:05.555555555	abcde	abcde	B4F3CAFDBEDD	{"k5":"v5"}	[105,200]	{"c1":10,"c2":"m"}
-105	3	1	1.0	1.5	pqr	2016-06-06 06:06:06.666666666	abcde	abcdef	68692CCAC0BDE7	{"k6":"v6"}	[106,200]	{"c1":10,"c2":"p"}
-106	1	2	1.1	1.8	stu	2017-07-07 07:07:07.777777777	abcde	abcdefg	B4F3CAFDBEDD	{"k7":"v7"}	[107,200]	{"c1":10,"c2":"s"}
-107	2	3	1.2	2.1	vwx	2018-08-08 08:08:08.888888888	bcdef	abcdefgh	68692CCAC0BDE7	{"k8":"v8"}	[108,200]	{"c1":10,"c2":"v"}
-108	3	4	1.3	2.4	yza	2019-09-09 09:09:09.999999999	cdefg	B4F3CAFDBE	68656C6C6F	{"k9":"v9"}	[109,200]	{"c1":10,"c2":"y"}
-109	1	5	1.4	2.7	bcd	2020-10-10 10:10:10.101010101	klmno	abcdedef	68692CCAC0BDE7	{"k10":"v10"}	[110,200]	{"c1":10,"c2":"b"}
-110	2	1	1.0	3.0	efg	2021-11-11 11:11:11.111111111	pqrst	abcdede	B4F3CAFDBEDD	{"k11":"v11"}	[111,200]	{"c1":10,"c2":"e"}
-111	3	2	1.1	3.3	hij	2022-12-12 12:12:12.121212121	nopqr	abcded	68692CCAC0BDE7	{"k12":"v12"}	[112,200]	{"c1":10,"c2":"h"}
-112	1	3	1.2	3.6	klm	2023-01-02 13:13:13.131313131	opqrs	abcdd	B4F3CAFDBEDD	{"k13":"v13"}	[113,200]	{"c1":10,"c2":"k"}
-113	2	4	1.3	3.9	nop	2024-02-02 14:14:14.141414141	pqrst	abc	68692CCAC0BDE7	{"k14":"v14"}	[114,200]	{"c1":10,"c2":"n"}
-114	3	5	1.4	4.2	qrs	2025-03-03 15:15:15.151515151	qrstu	b	B4F3CAFDBEDD	{"k15":"v15"}	[115,200]	{"c1":10,"c2":"q"}
-115	1	1	1.0	4.5	qrs	2026-04-04 16:16:16.161616161	rstuv	abcded	68692CCAC0BDE7	{"k16":"v16"}	[116,200]	{"c1":10,"c2":"q"}
-116	2	2	1.1	4.8	wxy	2027-05-05 17:17:17.171717171	stuvw	abcded	B4F3CAFDBEDD	{"k17":"v17"}	[117,200]	{"c1":10,"c2":"w"}
-117	3	3	1.2	5.1	zab	2028-06-06 18:18:18.181818181	tuvwx	abcded	68692CCAC0BDE7	{"k18":"v18"}	[118,200]	{"c1":10,"c2":"z"}
-118	1	4	1.3	5.4	cde	2029-07-07 19:19:19.191919191	uvwzy	abcdede	B4F3CAFDBEDD	{"k19":"v19"}	[119,200]	{"c1":10,"c2":"c"}
-119	2	5	1.4	5.7	fgh	2030-08-08 20:20:20.202020202	vwxyz	abcdede	68692CCAC0BDE7	{"k20":"v20"}	[120,200]	{"c1":10,"c2":"f"}
-120	3	1	1.0	6.0	ijk	2031-09-09 21:21:21.212121212	wxyza	abcde	B4F3CAFDBEDD	{"k21":"v21"}	[121,200]	{"c1":10,"c2":"i"}
-121	1	2	1.1	6.3	lmn	2032-10-10 22:22:22.222222222	bcdef	abcde		{"k22":"v22"}	[122,200]	{"c1":10,"c2":"l"}
+100	1	1	1.0	0.0	abc	2011-01-01 01:01:01.111111111	a    	a  	B4F3CAFDBEDD	{"k1":"v1"}	[101,200]	{"c1":10,"c2":"a"}	2011-01-01
+101	2	2	1.1	0.3	def	2012-02-02 02:02:02.222222222	ab   	ab 	68692CCAC0BDE7	{"k2":"v2"}	[102,200]	{"c1":10,"c2":"d"}	2012-02-02
+102	3	3	1.2	0.6	ghi	2013-03-03 03:03:03.333333333	abc  	abc	B4F3CAFDBEDD	{"k3":"v3"}	[103,200]	{"c1":10,"c2":"g"}	2013-03-03
+103	1	4	1.3	0.9	jkl	2014-04-04 04:04:04.444444444	abcd 	abcd	68692CCAC0BDE7	{"k4":"v4"}	[104,200]	{"c1":10,"c2":"j"}	2014-04-04
+104	2	5	1.4	1.2	mno	2015-05-05 05:05:05.555555555	abcde	abcde	B4F3CAFDBEDD	{"k5":"v5"}	[105,200]	{"c1":10,"c2":"m"}	2015-05-05
+105	3	1	1.0	1.5	pqr	2016-06-06 06:06:06.666666666	abcde	abcdef	68692CCAC0BDE7	{"k6":"v6"}	[106,200]	{"c1":10,"c2":"p"}	2016-06-06
+106	1	2	1.1	1.8	stu	2017-07-07 07:07:07.777777777	abcde	abcdefg	B4F3CAFDBEDD	{"k7":"v7"}	[107,200]	{"c1":10,"c2":"s"}	2017-07-07
+107	2	3	1.2	2.1	vwx	2018-08-08 08:08:08.888888888	bcdef	abcdefgh	68692CCAC0BDE7	{"k8":"v8"}	[108,200]	{"c1":10,"c2":"v"}	2018-08-08
+108	3	4	1.3	2.4	yza	2019-09-09 09:09:09.999999999	cdefg	B4F3CAFDBE	68656C6C6F	{"k9":"v9"}	[109,200]	{"c1":10,"c2":"y"}	2019-09-09
+109	1	5	1.4	2.7	bcd	2020-10-10 10:10:10.101010101	klmno	abcdedef	68692CCAC0BDE7	{"k10":"v10"}	[110,200]	{"c1":10,"c2":"b"}	2020-10-10
+110	2	1	1.0	3.0	efg	2021-11-11 11:11:11.111111111	pqrst	abcdede	B4F3CAFDBEDD	{"k11":"v11"}	[111,200]	{"c1":10,"c2":"e"}	2021-11-11
+111	3	2	1.1	3.3	hij	2022-12-12 12:12:12.121212121	nopqr	abcded	68692CCAC0BDE7	{"k12":"v12"}	[112,200]	{"c1":10,"c2":"h"}	2022-12-12
+112	1	3	1.2	3.6	klm	2023-01-02 13:13:13.131313131	opqrs	abcdd	B4F3CAFDBEDD	{"k13":"v13"}	[113,200]	{"c1":10,"c2":"k"}	2023-01-02
+113	2	4	1.3	3.9	nop	2024-02-02 14:14:14.141414141	pqrst	abc	68692CCAC0BDE7	{"k14":"v14"}	[114,200]	{"c1":10,"c2":"n"}	2024-02-02
+114	3	5	1.4	4.2	qrs	2025-03-03 15:15:15.151515151	qrstu	b	B4F3CAFDBEDD	{"k15":"v15"}	[115,200]	{"c1":10,"c2":"q"}	2025-03-03
+115	1	1	1.0	4.5	qrs	2026-04-04 16:16:16.161616161	rstuv	abcded	68692CCAC0BDE7	{"k16":"v16"}	[116,200]	{"c1":10,"c2":"q"}	2026-04-04
+116	2	2	1.1	4.8	wxy	2027-05-05 17:17:17.171717171	stuvw	abcded	B4F3CAFDBEDD	{"k17":"v17"}	[117,200]	{"c1":10,"c2":"w"}	2027-05-05
+117	3	3	1.2	5.1	zab	2028-06-06 18:18:18.181818181	tuvwx	abcded	68692CCAC0BDE7	{"k18":"v18"}	[118,200]	{"c1":10,"c2":"z"}	2028-06-06
+118	1	4	1.3	5.4	cde	2029-07-07 19:19:19.191919191	uvwzy	abcdede	B4F3CAFDBEDD	{"k19":"v19"}	[119,200]	{"c1":10,"c2":"c"}	2029-07-07
+119	2	5	1.4	5.7	fgh	2030-08-08 20:20:20.202020202	vwxyz	abcdede	68692CCAC0BDE7	{"k20":"v20"}	[120,200]	{"c1":10,"c2":"f"}	2030-08-08
+120	3	1	1.0	6.0	ijk	2031-09-09 21:21:21.212121212	wxyza	abcde	B4F3CAFDBEDD	{"k21":"v21"}	[121,200]	{"c1":10,"c2":"i"}	2031-09-09
+121	1	2	1.1	6.3	lmn	2032-10-10 22:22:22.222222222	bcdef	abcde		{"k22":"v22"}	[122,200]	{"c1":10,"c2":"l"}	2032-10-10
 PREHOOK: query: INSERT OVERWRITE TABLE parquet_types
 SELECT cint, ctinyint, csmallint, cfloat, cdouble, cstring1, t, cchar, cvarchar,
-unhex(cbinary), m1, l1, st1 FROM parquet_types_staging
+unhex(cbinary), m1, l1, st1, d FROM parquet_types_staging
 PREHOOK: type: QUERY
 PREHOOK: Input: default@parquet_types_staging
 PREHOOK: Output: default@parquet_types
 POSTHOOK: query: INSERT OVERWRITE TABLE parquet_types
 SELECT cint, ctinyint, csmallint, cfloat, cdouble, cstring1, t, cchar, cvarchar,
-unhex(cbinary), m1, l1, st1 FROM parquet_types_staging
+unhex(cbinary), m1, l1, st1, d FROM parquet_types_staging
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@parquet_types_staging
 POSTHOOK: Output: default@parquet_types
@@ -143,42 +147,43 @@ POSTHOOK: Lineage: parquet_types.csmalli
 POSTHOOK: Lineage: parquet_types.cstring1 SIMPLE [(parquet_types_staging)parquet_types_staging.FieldSchema(name:cstring1, type:string, comment:null), ]
 POSTHOOK: Lineage: parquet_types.ctinyint SIMPLE [(parquet_types_staging)parquet_types_staging.FieldSchema(name:ctinyint, type:tinyint, comment:null), ]
 POSTHOOK: Lineage: parquet_types.cvarchar SIMPLE [(parquet_types_staging)parquet_types_staging.FieldSchema(name:cvarchar, type:varchar(10), comment:null), ]
+POSTHOOK: Lineage: parquet_types.d SIMPLE [(parquet_types_staging)parquet_types_staging.FieldSchema(name:d, type:date, comment:null), ]
 POSTHOOK: Lineage: parquet_types.l1 SIMPLE [(parquet_types_staging)parquet_types_staging.FieldSchema(name:l1, type:array<int>, comment:null), ]
 POSTHOOK: Lineage: parquet_types.m1 SIMPLE [(parquet_types_staging)parquet_types_staging.FieldSchema(name:m1, type:map<string,varchar(3)>, comment:null), ]
 POSTHOOK: Lineage: parquet_types.st1 SIMPLE [(parquet_types_staging)parquet_types_staging.FieldSchema(name:st1, type:struct<c1:int,c2:char(1)>, comment:null), ]
 POSTHOOK: Lineage: parquet_types.t SIMPLE [(parquet_types_staging)parquet_types_staging.FieldSchema(name:t, type:timestamp, comment:null), ]
 PREHOOK: query: SELECT cint, ctinyint, csmallint, cfloat, cdouble, cstring1, t, cchar, cvarchar,
-hex(cbinary), m1, l1, st1 FROM parquet_types
+hex(cbinary), m1, l1, st1, d FROM parquet_types
 PREHOOK: type: QUERY
 PREHOOK: Input: default@parquet_types
 #### A masked pattern was here ####
 POSTHOOK: query: SELECT cint, ctinyint, csmallint, cfloat, cdouble, cstring1, t, cchar, cvarchar,
-hex(cbinary), m1, l1, st1 FROM parquet_types
+hex(cbinary), m1, l1, st1, d FROM parquet_types
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@parquet_types
 #### A masked pattern was here ####
-100	1	1	1.0	0.0	abc	2011-01-01 01:01:01.111111111	a    	a  	B4F3CAFDBEDD	{"k1":"v1"}	[101,200]	{"c1":10,"c2":"a"}
-101	2	2	1.1	0.3	def	2012-02-02 02:02:02.222222222	ab   	ab 	68692CCAC0BDE7	{"k2":"v2"}	[102,200]	{"c1":10,"c2":"d"}
-102	3	3	1.2	0.6	ghi	2013-03-03 03:03:03.333333333	abc  	abc	B4F3CAFDBEDD	{"k3":"v3"}	[103,200]	{"c1":10,"c2":"g"}
-103	1	4	1.3	0.9	jkl	2014-04-04 04:04:04.444444444	abcd 	abcd	68692CCAC0BDE7	{"k4":"v4"}	[104,200]	{"c1":10,"c2":"j"}
-104	2	5	1.4	1.2	mno	2015-05-05 05:05:05.555555555	abcde	abcde	B4F3CAFDBEDD	{"k5":"v5"}	[105,200]	{"c1":10,"c2":"m"}
-105	3	1	1.0	1.5	pqr	2016-06-06 06:06:06.666666666	abcde	abcdef	68692CCAC0BDE7	{"k6":"v6"}	[106,200]	{"c1":10,"c2":"p"}
-106	1	2	1.1	1.8	stu	2017-07-07 07:07:07.777777777	abcde	abcdefg	B4F3CAFDBEDD	{"k7":"v7"}	[107,200]	{"c1":10,"c2":"s"}
-107	2	3	1.2	2.1	vwx	2018-08-08 08:08:08.888888888	bcdef	abcdefgh	68692CCAC0BDE7	{"k8":"v8"}	[108,200]	{"c1":10,"c2":"v"}
-108	3	4	1.3	2.4	yza	2019-09-09 09:09:09.999999999	cdefg	B4F3CAFDBE	68656C6C6F	{"k9":"v9"}	[109,200]	{"c1":10,"c2":"y"}
-109	1	5	1.4	2.7	bcd	2020-10-10 10:10:10.101010101	klmno	abcdedef	68692CCAC0BDE7	{"k10":"v10"}	[110,200]	{"c1":10,"c2":"b"}
-110	2	1	1.0	3.0	efg	2021-11-11 11:11:11.111111111	pqrst	abcdede	B4F3CAFDBEDD	{"k11":"v11"}	[111,200]	{"c1":10,"c2":"e"}
-111	3	2	1.1	3.3	hij	2022-12-12 12:12:12.121212121	nopqr	abcded	68692CCAC0BDE7	{"k12":"v12"}	[112,200]	{"c1":10,"c2":"h"}
-112	1	3	1.2	3.6	klm	2023-01-02 13:13:13.131313131	opqrs	abcdd	B4F3CAFDBEDD	{"k13":"v13"}	[113,200]	{"c1":10,"c2":"k"}
-113	2	4	1.3	3.9	nop	2024-02-02 14:14:14.141414141	pqrst	abc	68692CCAC0BDE7	{"k14":"v14"}	[114,200]	{"c1":10,"c2":"n"}
-114	3	5	1.4	4.2	qrs	2025-03-03 15:15:15.151515151	qrstu	b	B4F3CAFDBEDD	{"k15":"v15"}	[115,200]	{"c1":10,"c2":"q"}
-115	1	1	1.0	4.5	qrs	2026-04-04 16:16:16.161616161	rstuv	abcded	68692CCAC0BDE7	{"k16":"v16"}	[116,200]	{"c1":10,"c2":"q"}
-116	2	2	1.1	4.8	wxy	2027-05-05 17:17:17.171717171	stuvw	abcded	B4F3CAFDBEDD	{"k17":"v17"}	[117,200]	{"c1":10,"c2":"w"}
-117	3	3	1.2	5.1	zab	2028-06-06 18:18:18.181818181	tuvwx	abcded	68692CCAC0BDE7	{"k18":"v18"}	[118,200]	{"c1":10,"c2":"z"}
-118	1	4	1.3	5.4	cde	2029-07-07 19:19:19.191919191	uvwzy	abcdede	B4F3CAFDBEDD	{"k19":"v19"}	[119,200]	{"c1":10,"c2":"c"}
-119	2	5	1.4	5.7	fgh	2030-08-08 20:20:20.202020202	vwxyz	abcdede	68692CCAC0BDE7	{"k20":"v20"}	[120,200]	{"c1":10,"c2":"f"}
-120	3	1	1.0	6.0	ijk	2031-09-09 21:21:21.212121212	wxyza	abcde	B4F3CAFDBEDD	{"k21":"v21"}	[121,200]	{"c1":10,"c2":"i"}
-121	1	2	1.1	6.3	lmn	2032-10-10 22:22:22.222222222	bcdef	abcde		{"k22":"v22"}	[122,200]	{"c1":10,"c2":"l"}
+100	1	1	1.0	0.0	abc	2011-01-01 01:01:01.111111111	a    	a  	B4F3CAFDBEDD	{"k1":"v1"}	[101,200]	{"c1":10,"c2":"a"}	2011-01-01
+101	2	2	1.1	0.3	def	2012-02-02 02:02:02.222222222	ab   	ab 	68692CCAC0BDE7	{"k2":"v2"}	[102,200]	{"c1":10,"c2":"d"}	2012-02-02
+102	3	3	1.2	0.6	ghi	2013-03-03 03:03:03.333333333	abc  	abc	B4F3CAFDBEDD	{"k3":"v3"}	[103,200]	{"c1":10,"c2":"g"}	2013-03-03
+103	1	4	1.3	0.9	jkl	2014-04-04 04:04:04.444444444	abcd 	abcd	68692CCAC0BDE7	{"k4":"v4"}	[104,200]	{"c1":10,"c2":"j"}	2014-04-04
+104	2	5	1.4	1.2	mno	2015-05-05 05:05:05.555555555	abcde	abcde	B4F3CAFDBEDD	{"k5":"v5"}	[105,200]	{"c1":10,"c2":"m"}	2015-05-05
+105	3	1	1.0	1.5	pqr	2016-06-06 06:06:06.666666666	abcde	abcdef	68692CCAC0BDE7	{"k6":"v6"}	[106,200]	{"c1":10,"c2":"p"}	2016-06-06
+106	1	2	1.1	1.8	stu	2017-07-07 07:07:07.777777777	abcde	abcdefg	B4F3CAFDBEDD	{"k7":"v7"}	[107,200]	{"c1":10,"c2":"s"}	2017-07-07
+107	2	3	1.2	2.1	vwx	2018-08-08 08:08:08.888888888	bcdef	abcdefgh	68692CCAC0BDE7	{"k8":"v8"}	[108,200]	{"c1":10,"c2":"v"}	2018-08-08
+108	3	4	1.3	2.4	yza	2019-09-09 09:09:09.999999999	cdefg	B4F3CAFDBE	68656C6C6F	{"k9":"v9"}	[109,200]	{"c1":10,"c2":"y"}	2019-09-09
+109	1	5	1.4	2.7	bcd	2020-10-10 10:10:10.101010101	klmno	abcdedef	68692CCAC0BDE7	{"k10":"v10"}	[110,200]	{"c1":10,"c2":"b"}	2020-10-10
+110	2	1	1.0	3.0	efg	2021-11-11 11:11:11.111111111	pqrst	abcdede	B4F3CAFDBEDD	{"k11":"v11"}	[111,200]	{"c1":10,"c2":"e"}	2021-11-11
+111	3	2	1.1	3.3	hij	2022-12-12 12:12:12.121212121	nopqr	abcded	68692CCAC0BDE7	{"k12":"v12"}	[112,200]	{"c1":10,"c2":"h"}	2022-12-12
+112	1	3	1.2	3.6	klm	2023-01-02 13:13:13.131313131	opqrs	abcdd	B4F3CAFDBEDD	{"k13":"v13"}	[113,200]	{"c1":10,"c2":"k"}	2023-01-02
+113	2	4	1.3	3.9	nop	2024-02-02 14:14:14.141414141	pqrst	abc	68692CCAC0BDE7	{"k14":"v14"}	[114,200]	{"c1":10,"c2":"n"}	2024-02-02
+114	3	5	1.4	4.2	qrs	2025-03-03 15:15:15.151515151	qrstu	b	B4F3CAFDBEDD	{"k15":"v15"}	[115,200]	{"c1":10,"c2":"q"}	2025-03-03
+115	1	1	1.0	4.5	qrs	2026-04-04 16:16:16.161616161	rstuv	abcded	68692CCAC0BDE7	{"k16":"v16"}	[116,200]	{"c1":10,"c2":"q"}	2026-04-04
+116	2	2	1.1	4.8	wxy	2027-05-05 17:17:17.171717171	stuvw	abcded	B4F3CAFDBEDD	{"k17":"v17"}	[117,200]	{"c1":10,"c2":"w"}	2027-05-05
+117	3	3	1.2	5.1	zab	2028-06-06 18:18:18.181818181	tuvwx	abcded	68692CCAC0BDE7	{"k18":"v18"}	[118,200]	{"c1":10,"c2":"z"}	2028-06-06
+118	1	4	1.3	5.4	cde	2029-07-07 19:19:19.191919191	uvwzy	abcdede	B4F3CAFDBEDD	{"k19":"v19"}	[119,200]	{"c1":10,"c2":"c"}	2029-07-07
+119	2	5	1.4	5.7	fgh	2030-08-08 20:20:20.202020202	vwxyz	abcdede	68692CCAC0BDE7	{"k20":"v20"}	[120,200]	{"c1":10,"c2":"f"}	2030-08-08
+120	3	1	1.0	6.0	ijk	2031-09-09 21:21:21.212121212	wxyza	abcde	B4F3CAFDBEDD	{"k21":"v21"}	[121,200]	{"c1":10,"c2":"i"}	2031-09-09
+121	1	2	1.1	6.3	lmn	2032-10-10 22:22:22.222222222	bcdef	abcde		{"k22":"v22"}	[122,200]	{"c1":10,"c2":"l"}	2032-10-10
 PREHOOK: query: SELECT cchar, LENGTH(cchar), cvarchar, LENGTH(cvarchar) FROM parquet_types
 PREHOOK: type: QUERY
 PREHOOK: Input: default@parquet_types

Modified: hive/branches/llap/ql/src/test/results/clientpositive/tez/tez_join_hash.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientpositive/tez/tez_join_hash.q.out?rev=1664556&r1=1664555&r2=1664556&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientpositive/tez/tez_join_hash.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientpositive/tez/tez_join_hash.q.out Fri Mar  6 08:12:14 2015
@@ -118,3 +118,839 @@ POSTHOOK: Input: default@orc_src
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
 1028
+PREHOOK: query: explain
+select count(*) from (select x.key as key, y.value as value from
+srcpart x join srcpart y on (x.key = y.key)
+union all
+select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select count(*) from (select x.key as key, y.value as value from
+srcpart x join srcpart y on (x.key = y.key)
+union all
+select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Map 6 <- Map 7 (BROADCAST_EDGE), Union 3 (CONTAINS)
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE), Map 7 (BROADCAST_EDGE), Union 3 (CONTAINS)
+        Reducer 4 <- Union 3 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: x
+                  Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (key is not null and value is not null) (type: boolean)
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col1 (type: string)
+        Map 5 
+            Map Operator Tree:
+                TableScan
+                  alias: x
+                  Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+        Map 6 
+            Map Operator Tree:
+                TableScan
+                  alias: x
+                  Filter Operator
+                    predicate: value is not null (type: boolean)
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Map Join Operator
+                        condition map:
+                             Inner Join 0 to 1
+                        keys:
+                          0 _col1 (type: string)
+                          1 _col0 (type: string)
+                        outputColumnNames: _col0, _col1
+                        input vertices:
+                          1 Map 7
+                        Group By Operator
+                          aggregations: count()
+                          keys: _col0 (type: string), _col1 (type: string)
+                          mode: hash
+                          outputColumnNames: _col0, _col1, _col2
+                          Reduce Output Operator
+                            key expressions: _col0 (type: string), _col1 (type: string)
+                            sort order: ++
+                            Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+                            value expressions: _col2 (type: bigint)
+        Map 7 
+            Map Operator Tree:
+                TableScan
+                  alias: b
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: value is not null (type: boolean)
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: value (type: string)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+        Reducer 2 
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 _col0 (type: string)
+                  1 _col0 (type: string)
+                outputColumnNames: _col1, _col2
+                Select Operator
+                  expressions: _col2 (type: string), _col1 (type: string)
+                  outputColumnNames: _col0, _col1
+                  Map Join Operator
+                    condition map:
+                         Inner Join 0 to 1
+                    keys:
+                      0 _col1 (type: string)
+                      1 _col0 (type: string)
+                    outputColumnNames: _col0, _col1
+                    input vertices:
+                      1 Map 7
+                    Group By Operator
+                      aggregations: count()
+                      keys: _col0 (type: string), _col1 (type: string)
+                      mode: hash
+                      outputColumnNames: _col0, _col1, _col2
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string), _col1 (type: string)
+                        sort order: ++
+                        Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+                        value expressions: _col2 (type: bigint)
+        Reducer 4 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: string), KEY._col1 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 1155 Data size: 12270 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col2 (type: bigint)
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 1155 Data size: 12270 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1155 Data size: 12270 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+        Union 3 
+            Vertex: Union 3
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select key, count(*) from (select x.key as key, y.value as value from
+srcpart x join srcpart y on (x.key = y.key)
+union all
+select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select key, count(*) from (select x.key as key, y.value as value from
+srcpart x join srcpart y on (x.key = y.key)
+union all
+select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+0	468
+10	20
+103	144
+105	20
+116	20
+120	144
+125	144
+129	144
+131	20
+134	144
+136	20
+143	20
+145	20
+149	144
+150	20
+155	20
+157	20
+158	20
+160	20
+163	20
+164	144
+166	20
+17	20
+170	20
+172	144
+180	20
+183	20
+189	20
+19	20
+190	20
+191	144
+193	468
+195	144
+196	20
+20	20
+205	144
+207	144
+209	144
+213	144
+216	144
+217	144
+223	144
+224	144
+228	20
+229	144
+233	144
+235	20
+238	144
+239	144
+24	144
+241	20
+244	20
+247	20
+248	20
+255	144
+258	20
+26	144
+260	20
+263	20
+265	144
+266	20
+272	144
+273	468
+274	20
+28	20
+281	144
+286	20
+291	20
+296	20
+298	468
+30	20
+302	20
+305	20
+306	20
+307	144
+308	20
+309	144
+315	20
+316	468
+317	144
+318	468
+321	144
+325	144
+33	20
+331	144
+332	20
+335	20
+339	20
+342	144
+345	20
+353	144
+356	20
+360	20
+366	20
+367	144
+368	20
+369	468
+37	144
+373	20
+375	20
+377	20
+378	20
+379	20
+382	144
+384	468
+386	20
+389	20
+394	20
+395	144
+396	468
+399	144
+400	20
+401	2100
+402	20
+406	1088
+407	20
+41	20
+413	144
+414	144
+42	144
+430	468
+431	468
+436	20
+44	20
+446	20
+448	20
+449	20
+452	20
+459	144
+462	144
+466	468
+467	20
+468	1088
+47	20
+472	20
+480	468
+482	20
+484	20
+485	20
+487	20
+490	20
+493	20
+494	20
+495	20
+496	20
+497	20
+498	468
+5	468
+51	144
+54	20
+57	20
+58	144
+65	20
+66	20
+69	20
+70	468
+74	20
+77	20
+78	20
+8	20
+80	20
+84	144
+85	20
+87	20
+9	20
+92	20
+95	144
+96	20
+100	144
+104	144
+11	20
+111	20
+113	144
+114	20
+118	144
+119	468
+12	144
+126	20
+128	468
+133	20
+137	144
+138	1088
+146	144
+15	144
+152	144
+153	20
+156	20
+162	20
+165	144
+167	468
+168	20
+169	1088
+174	144
+175	144
+176	144
+177	20
+178	20
+179	144
+18	144
+181	20
+186	20
+187	468
+192	20
+194	20
+197	144
+199	468
+2	20
+200	144
+201	20
+202	20
+203	144
+208	468
+214	20
+218	20
+219	144
+221	144
+222	20
+226	20
+230	2100
+237	144
+242	144
+249	20
+252	20
+256	144
+257	20
+262	20
+27	20
+275	20
+277	1088
+278	144
+280	144
+282	144
+283	20
+284	20
+285	20
+287	20
+288	144
+289	20
+292	20
+310	20
+311	468
+322	144
+323	20
+327	468
+333	144
+336	20
+338	20
+34	20
+341	20
+344	144
+348	2100
+35	468
+351	20
+362	20
+364	20
+365	20
+374	20
+392	20
+393	20
+397	144
+4	20
+403	468
+404	144
+409	468
+411	20
+417	468
+418	20
+419	20
+421	20
+424	144
+427	20
+429	144
+43	20
+432	20
+435	20
+437	20
+438	468
+439	144
+443	20
+444	20
+453	20
+454	468
+455	20
+457	20
+458	144
+460	20
+463	144
+469	2100
+470	20
+475	20
+477	20
+478	144
+479	20
+481	20
+483	20
+489	1088
+491	20
+492	144
+53	20
+64	20
+67	144
+72	144
+76	144
+82	20
+83	144
+86	20
+90	468
+97	144
+98	144
+PREHOOK: query: select key, count(*) from (select x.key as key, y.value as value from
+srcpart x join srcpart y on (x.key = y.key)
+union all
+select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select key, count(*) from (select x.key as key, y.value as value from
+srcpart x join srcpart y on (x.key = y.key)
+union all
+select key, value from srcpart z) a join src b on (a.value = b.value) group by a.key, a.value
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+0	468
+10	20
+100	144
+103	144
+104	144
+105	20
+11	20
+111	20
+113	144
+114	20
+116	20
+118	144
+119	468
+12	144
+120	144
+125	144
+126	20
+128	468
+129	144
+131	20
+133	20
+134	144
+136	20
+137	144
+138	1088
+143	20
+145	20
+146	144
+149	144
+15	144
+150	20
+152	144
+153	20
+155	20
+156	20
+157	20
+158	20
+160	20
+162	20
+163	20
+164	144
+165	144
+166	20
+167	468
+168	20
+169	1088
+17	20
+170	20
+172	144
+174	144
+175	144
+176	144
+177	20
+178	20
+179	144
+18	144
+180	20
+181	20
+183	20
+186	20
+187	468
+189	20
+19	20
+190	20
+191	144
+192	20
+193	468
+194	20
+195	144
+196	20
+197	144
+199	468
+2	20
+20	20
+200	144
+201	20
+202	20
+203	144
+205	144
+207	144
+208	468
+209	144
+213	144
+214	20
+216	144
+217	144
+218	20
+219	144
+221	144
+222	20
+223	144
+224	144
+226	20
+228	20
+229	144
+230	2100
+233	144
+235	20
+237	144
+238	144
+239	144
+24	144
+241	20
+242	144
+244	20
+247	20
+248	20
+249	20
+252	20
+255	144
+256	144
+257	20
+258	20
+26	144
+260	20
+262	20
+263	20
+265	144
+266	20
+27	20
+272	144
+273	468
+274	20
+275	20
+277	1088
+278	144
+28	20
+280	144
+281	144
+282	144
+283	20
+284	20
+285	20
+286	20
+287	20
+288	144
+289	20
+291	20
+292	20
+296	20
+298	468
+30	20
+302	20
+305	20
+306	20
+307	144
+308	20
+309	144
+310	20
+311	468
+315	20
+316	468
+317	144
+318	468
+321	144
+322	144
+323	20
+325	144
+327	468
+33	20
+331	144
+332	20
+333	144
+335	20
+336	20
+338	20
+339	20
+34	20
+341	20
+342	144
+344	144
+345	20
+348	2100
+35	468
+351	20
+353	144
+356	20
+360	20
+362	20
+364	20
+365	20
+366	20
+367	144
+368	20
+369	468
+37	144
+373	20
+374	20
+375	20
+377	20
+378	20
+379	20
+382	144
+384	468
+386	20
+389	20
+392	20
+393	20
+394	20
+395	144
+396	468
+397	144
+399	144
+4	20
+400	20
+401	2100
+402	20
+403	468
+404	144
+406	1088
+407	20
+409	468
+41	20
+411	20
+413	144
+414	144
+417	468
+418	20
+419	20
+42	144
+421	20
+424	144
+427	20
+429	144
+43	20
+430	468
+431	468
+432	20
+435	20
+436	20
+437	20
+438	468
+439	144
+44	20
+443	20
+444	20
+446	20
+448	20
+449	20
+452	20
+453	20
+454	468
+455	20
+457	20
+458	144
+459	144
+460	20
+462	144
+463	144
+466	468
+467	20
+468	1088
+469	2100
+47	20
+470	20
+472	20
+475	20
+477	20
+478	144
+479	20
+480	468
+481	20
+482	20
+483	20
+484	20
+485	20
+487	20
+489	1088
+490	20
+491	20
+492	144
+493	20
+494	20
+495	20
+496	20
+497	20
+498	468
+5	468
+51	144
+53	20
+54	20
+57	20
+58	144
+64	20
+65	20
+66	20
+67	144
+69	20
+70	468
+72	144
+74	20
+76	144
+77	20
+78	20
+8	20
+80	20
+82	20
+83	144
+84	144
+85	20
+86	20
+87	20
+9	20
+90	468
+92	20
+95	144
+96	20
+97	144
+98	144



Mime
View raw message