hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ser...@apache.org
Subject svn commit: r1673969 [10/19] - in /hive/branches/llap: ./ beeline/src/java/org/apache/hive/beeline/ bin/ cli/src/java/org/apache/hadoop/hive/cli/ cli/src/test/org/apache/hadoop/hive/cli/ common/ common/src/java/org/apache/hadoop/hive/common/jsonexplain...
Date Wed, 15 Apr 2015 22:04:07 GMT
Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java Wed Apr 15 22:04:00 2015
@@ -173,7 +173,7 @@ public class TestVectorGroupByOperator {
     List<String> mapColumnNames = new ArrayList<String>();
     mapColumnNames.add("Key");
     mapColumnNames.add("Value");
-    VectorizationContext ctx = new VectorizationContext(mapColumnNames);
+    VectorizationContext ctx = new VectorizationContext("name", mapColumnNames);
 
     GroupByDesc desc = buildKeyGroupByDesc (ctx, "max",
         "Value", TypeInfoFactory.longTypeInfo,
@@ -1710,7 +1710,7 @@ public class TestVectorGroupByOperator {
 
     mapColumnNames.put("value", i);
     outputColumnNames.add("value");
-    VectorizationContext ctx = new VectorizationContext(outputColumnNames);
+    VectorizationContext ctx = new VectorizationContext("name", outputColumnNames);
 
     ArrayList<AggregationDesc> aggs = new ArrayList(1);
     aggs.add(
@@ -1821,7 +1821,7 @@ public class TestVectorGroupByOperator {
     List<String> mapColumnNames = new ArrayList<String>();
     mapColumnNames.add("Key");
     mapColumnNames.add("Value");
-    VectorizationContext ctx = new VectorizationContext(mapColumnNames);
+    VectorizationContext ctx = new VectorizationContext("name", mapColumnNames);
     Set<Object> keys = new HashSet<Object>();
 
     AggregationDesc agg = buildAggregationDesc(ctx, aggregateName,
@@ -2235,7 +2235,7 @@ public class TestVectorGroupByOperator {
       Object expected) throws HiveException {
     List<String> mapColumnNames = new ArrayList<String>();
     mapColumnNames.add("A");
-    VectorizationContext ctx = new VectorizationContext(mapColumnNames);
+    VectorizationContext ctx = new VectorizationContext("name", mapColumnNames);
 
     GroupByDesc desc = buildGroupByDescCountStar (ctx);
 
@@ -2264,7 +2264,7 @@ public class TestVectorGroupByOperator {
       Object expected) throws HiveException {
     List<String> mapColumnNames = new ArrayList<String>();
     mapColumnNames.add("A");
-    VectorizationContext ctx = new VectorizationContext(mapColumnNames);
+    VectorizationContext ctx = new VectorizationContext("name", mapColumnNames);
 
     GroupByDesc desc = buildGroupByDescType(ctx, "count", "A", TypeInfoFactory.longTypeInfo);
     VectorGroupByDesc vectorDesc = desc.getVectorDesc();
@@ -2296,7 +2296,7 @@ public class TestVectorGroupByOperator {
       Object expected) throws HiveException {
     List<String> mapColumnNames = new ArrayList<String>();
     mapColumnNames.add("A");
-    VectorizationContext ctx = new VectorizationContext(mapColumnNames);
+    VectorizationContext ctx = new VectorizationContext("name", mapColumnNames);
 
     GroupByDesc desc = buildGroupByDescType(ctx, aggregateName, "A",
         TypeInfoFactory.stringTypeInfo);
@@ -2322,11 +2322,12 @@ public class TestVectorGroupByOperator {
   }
 
   public void testAggregateDecimalIterable (
-String aggregateName, Iterable<VectorizedRowBatch> data,
-      Object expected) throws HiveException {
-    List<String> mapColumnNames = new ArrayList<String>();
-    mapColumnNames.add("A");
-    VectorizationContext ctx = new VectorizationContext(mapColumnNames);
+          String aggregateName,
+          Iterable<VectorizedRowBatch> data,
+          Object expected) throws HiveException {
+          List<String> mapColumnNames = new ArrayList<String>();
+          mapColumnNames.add("A");
+          VectorizationContext ctx = new VectorizationContext("name", mapColumnNames);
 
     GroupByDesc desc =
         buildGroupByDescType(ctx, aggregateName, "A", TypeInfoFactory.getDecimalTypeInfo(30, 4));
@@ -2358,7 +2359,7 @@ String aggregateName, Iterable<Vectorize
       Object expected) throws HiveException {
     List<String> mapColumnNames = new ArrayList<String>();
     mapColumnNames.add("A");
-    VectorizationContext ctx = new VectorizationContext(mapColumnNames);
+    VectorizationContext ctx = new VectorizationContext("name", mapColumnNames);
 
     GroupByDesc desc = buildGroupByDescType (ctx, aggregateName, "A",
         TypeInfoFactory.doubleTypeInfo);
@@ -2389,7 +2390,7 @@ String aggregateName, Iterable<Vectorize
       Object expected) throws HiveException {
     List<String> mapColumnNames = new ArrayList<String>();
     mapColumnNames.add("A");
-    VectorizationContext ctx = new VectorizationContext(mapColumnNames);
+    VectorizationContext ctx = new VectorizationContext("name", mapColumnNames);
 
     GroupByDesc desc = buildGroupByDescType(ctx, aggregateName, "A", TypeInfoFactory.longTypeInfo);
 
@@ -2420,7 +2421,7 @@ String aggregateName, Iterable<Vectorize
     List<String> mapColumnNames = new ArrayList<String>();
     mapColumnNames.add("Key");
     mapColumnNames.add("Value");
-    VectorizationContext ctx = new VectorizationContext(mapColumnNames);
+    VectorizationContext ctx = new VectorizationContext("name", mapColumnNames);
 
     Set<Object> keys = new HashSet<Object>();
 
@@ -2487,7 +2488,7 @@ String aggregateName, Iterable<Vectorize
     List<String> mapColumnNames = new ArrayList<String>();
     mapColumnNames.add("Key");
     mapColumnNames.add("Value");
-    VectorizationContext ctx = new VectorizationContext(mapColumnNames);
+    VectorizationContext ctx = new VectorizationContext("name", mapColumnNames);
     Set<Object> keys = new HashSet<Object>();
 
     GroupByDesc desc = buildKeyGroupByDesc (ctx, aggregateName, "Value",

Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java Wed Apr 15 22:04:00 2015
@@ -88,7 +88,7 @@ public class TestVectorSelectOperator {
     columns.add("a");
     columns.add("b");
     columns.add("c");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
 
     SelectDesc selDesc = new SelectDesc(false);
     List<ExprNodeDesc> colList = new ArrayList<ExprNodeDesc>();

Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java Wed Apr 15 22:04:00 2015
@@ -247,7 +247,7 @@ public class TestVectorizationContext {
     children5.add(col6Expr);
     modExpr.setChildren(children5);
 
-    VectorizationContext vc = new VectorizationContext();
+    VectorizationContext vc = new VectorizationContext("name");
     vc.addInitialColumn("col1");
     vc.addInitialColumn("col2");
     vc.addInitialColumn("col3");
@@ -297,7 +297,7 @@ public class TestVectorizationContext {
     columns.add("col0");
     columns.add("col1");
     columns.add("col2");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
 
     VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -322,7 +322,7 @@ public class TestVectorizationContext {
     columns.add("col0");
     columns.add("col1");
     columns.add("col2");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
 
     VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -341,7 +341,7 @@ public class TestVectorizationContext {
     children1.add(col2Expr);
     exprDesc.setChildren(children1);
 
-    vc = new VectorizationContext(columns);
+    vc = new VectorizationContext("name", columns);
 
     ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -360,7 +360,7 @@ public class TestVectorizationContext {
     children1.add(col2Expr);
     exprDesc.setChildren(children1);
 
-    vc = new VectorizationContext(columns);
+    vc = new VectorizationContext("name", columns);
 
     ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -378,7 +378,7 @@ public class TestVectorizationContext {
     children1.add(col2Expr);
     exprDesc.setChildren(children1);
 
-    vc = new VectorizationContext(columns);
+    vc = new VectorizationContext("name", columns);
 
     ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -395,7 +395,7 @@ public class TestVectorizationContext {
     children1.add(col2Expr);
     exprDesc.setChildren(children1);
 
-    vc = new VectorizationContext(columns);
+    vc = new VectorizationContext("name", columns);
 
     ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -412,7 +412,7 @@ public class TestVectorizationContext {
     children1.add(col2Expr);
     exprDesc.setChildren(children1);
 
-    vc = new VectorizationContext(columns);
+    vc = new VectorizationContext("name", columns);
 
     ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -434,7 +434,7 @@ public class TestVectorizationContext {
 
     List<String> columns = new ArrayList<String>();
     columns.add("col1");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
 
     VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
 
@@ -480,7 +480,7 @@ public class TestVectorizationContext {
     columns.add("col0");
     columns.add("col1");
     columns.add("col2");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
 
     VectorExpression ve = vc.getVectorExpression(andExprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -530,7 +530,7 @@ public class TestVectorizationContext {
     List<String> columns = new ArrayList<String>();
     columns.add("col1");
     columns.add("col2");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
     VectorExpression veAnd = vc.getVectorExpression(andExprDesc, VectorExpressionDescriptor.Mode.FILTER);
     assertEquals(veAnd.getClass(), FilterExprAndExpr.class);
     assertEquals(veAnd.getChildExpressions()[0].getClass(), FilterLongColGreaterLongScalar.class);
@@ -555,7 +555,7 @@ public class TestVectorizationContext {
     orExprDesc.setChildren(children4);
 
     //Allocate new Vectorization context to reset the intermediate columns.
-    vc = new VectorizationContext(columns);
+    vc = new VectorizationContext("name", columns);
     VectorExpression veOr = vc.getVectorExpression(orExprDesc, VectorExpressionDescriptor.Mode.FILTER);
     assertEquals(veOr.getClass(), FilterExprOrExpr.class);
     assertEquals(veOr.getChildExpressions()[0].getClass(), FilterLongColGreaterLongScalar.class);
@@ -596,7 +596,7 @@ public class TestVectorizationContext {
     columns.add("col0");
     columns.add("col1");
     columns.add("col2");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
 
     VectorExpression ve = vc.getVectorExpression(notExpr, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -633,7 +633,7 @@ public class TestVectorizationContext {
     List<String> columns = new ArrayList<String>();
     columns.add("col1");
     columns.add("col2");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
 
     VectorExpression ve = vc.getVectorExpression(isNullExpr, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -674,7 +674,7 @@ public class TestVectorizationContext {
     List<String> columns = new ArrayList<String>();
     columns.add("col1");
     columns.add("col2");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
 
     VectorExpression ve = vc.getVectorExpression(isNotNullExpr, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -703,7 +703,7 @@ public class TestVectorizationContext {
 
     List<String> columns = new ArrayList<String>();
     columns.add("a");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
     VectorExpression ve = vc.getVectorExpression(scalarMinusConstant, VectorExpressionDescriptor.Mode.PROJECTION);
 
     assertEquals(ve.getClass(), LongScalarSubtractLongColumn.class);
@@ -726,7 +726,7 @@ public class TestVectorizationContext {
     columns.add("col0");
     columns.add("col1");
     columns.add("col2");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
 
     VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
@@ -744,7 +744,7 @@ public class TestVectorizationContext {
     List<String> columns = new ArrayList<String>();
     columns.add("col0");
     columns.add("col1");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
 
     VectorExpression ve = vc.getVectorExpression(negExprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
 
@@ -762,7 +762,7 @@ public class TestVectorizationContext {
     List<String> columns = new ArrayList<String>();
     columns.add("col0");
     columns.add("col1");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
 
     VectorExpression ve = vc.getVectorExpression(negExprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
 
@@ -787,7 +787,7 @@ public class TestVectorizationContext {
 
     List<String> columns = new ArrayList<String>();
     columns.add("a");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
     VectorExpression ve = vc.getVectorExpression(scalarGreaterColExpr, VectorExpressionDescriptor.Mode.FILTER);
     assertEquals(FilterLongScalarGreaterLongColumn.class, ve.getClass());
   }
@@ -810,7 +810,7 @@ public class TestVectorizationContext {
 
     List<String> columns = new ArrayList<String>();
     columns.add("a");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
     VectorExpression ve = vc.getVectorExpression(colEqualScalar, VectorExpressionDescriptor.Mode.FILTER);
     assertEquals(FilterLongColEqualLongScalar.class, ve.getClass());
   }
@@ -833,7 +833,7 @@ public class TestVectorizationContext {
 
     List<String> columns = new ArrayList<String>();
     columns.add("a");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
     VectorExpression ve = vc.getVectorExpression(colEqualScalar, VectorExpressionDescriptor.Mode.PROJECTION);
     assertEquals(LongColEqualLongScalar.class, ve.getClass());
   }
@@ -850,7 +850,7 @@ public class TestVectorizationContext {
     List<String> columns = new ArrayList<String>();
     columns.add("b");
     columns.add("a");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
     GenericUDF stringLower = new GenericUDFLower();
     stringUnary.setGenericUDF(stringLower);
 
@@ -860,7 +860,7 @@ public class TestVectorizationContext {
     assertEquals(1, ((StringLower) ve).getColNum());
     assertEquals(2, ((StringLower) ve).getOutputColumn());
 
-    vc = new VectorizationContext(columns);
+    vc = new VectorizationContext("name", columns);
 
     ExprNodeGenericFuncDesc anotherUnary = new ExprNodeGenericFuncDesc();
     anotherUnary.setTypeInfo(TypeInfoFactory.stringTypeInfo);
@@ -895,7 +895,7 @@ public class TestVectorizationContext {
     List<String> columns = new ArrayList<String>();
     columns.add("b");
     columns.add("a");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
 
     // Sin(double)
     GenericUDFBridge gudfBridge = new GenericUDFBridge("sin", false, UDFSin.class.getName());
@@ -986,7 +986,7 @@ public class TestVectorizationContext {
     List<String> columns = new ArrayList<String>();
     columns.add("b");
     columns.add("a");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
 
     //UDFYear
     GenericUDFBridge gudfBridge = new GenericUDFBridge("year", false, UDFYear.class.getName());
@@ -1024,7 +1024,7 @@ public class TestVectorizationContext {
     columns.add("col0");
     columns.add("col1");
     columns.add("col2");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
 
     VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
     assertTrue(ve instanceof FilterStringColumnBetween);
@@ -1050,7 +1050,7 @@ public class TestVectorizationContext {
     exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf,
         children1);
 
-    vc = new VectorizationContext(columns);
+    vc = new VectorizationContext("name", columns);
     ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
     assertTrue(ve instanceof FilterCharColumnBetween);
 
@@ -1075,7 +1075,7 @@ public class TestVectorizationContext {
     exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf,
         children1);
 
-    vc = new VectorizationContext(columns);
+    vc = new VectorizationContext("name", columns);
     ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
     assertTrue(ve instanceof FilterVarCharColumnBetween);
 
@@ -1144,7 +1144,7 @@ public class TestVectorizationContext {
     columns.add("col0");
     columns.add("col1");
     columns.add("col2");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
     VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
     assertTrue(ve instanceof FilterStringColumnInList);
     ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
@@ -1199,7 +1199,7 @@ public class TestVectorizationContext {
     columns.add("col1");
     columns.add("col2");
     columns.add("col3");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
     VectorExpression ve = vc.getVectorExpression(exprDesc);
     assertTrue(ve instanceof IfExprLongColumnLongColumn);
 

Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java Wed Apr 15 22:04:00 2015
@@ -1289,9 +1289,6 @@ public class TestInputOutputFormat {
     }
     mapWork.setPathToAliases(aliasMap);
     mapWork.setPathToPartitionInfo(partMap);
-    mapWork.setAllColumnVectorMaps(new HashMap<String, Map<String, Integer>>());
-    mapWork.setAllScratchColumnVectorTypeMaps(new HashMap<String,
-        Map<Integer, String>>());
 
     // write the plan out
     FileSystem localFs = FileSystem.getLocal(conf).getRaw();

Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java Wed Apr 15 22:04:00 2015
@@ -187,10 +187,11 @@ public class TestOrcFile {
   }
 
   private static ByteBuffer byteBuf(int... items) {
-     ByteBuffer result = ByteBuffer.allocate(items.length);
+    ByteBuffer result = ByteBuffer.allocate(items.length);
     for(int item: items) {
       result.put((byte) item);
     }
+    result.flip();
     return result;
   }
 
@@ -703,12 +704,12 @@ public class TestOrcFile {
     assertEquals(0, items.get(0).getPositions(0));
     assertEquals(0, items.get(0).getPositions(1));
     assertEquals(0, items.get(0).getPositions(2));
-    assertEquals(1, 
+    assertEquals(1,
                  items.get(0).getStatistics().getIntStatistics().getMinimum());
     index = recordReader.readRowIndex(1, null, null).getRowGroupIndex();
     assertEquals(3, index.length);
     items = index[1].getEntryList();
-    assertEquals(2, 
+    assertEquals(2,
                  items.get(0).getStatistics().getIntStatistics().getMaximum());
   }
 

Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java Wed Apr 15 22:04:00 2015
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.metada
 
 import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
 
+import java.io.StringWriter;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -28,7 +29,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.regex.Pattern;
 
-import com.google.common.collect.ImmutableMap;
 import junit.framework.TestCase;
 
 import org.apache.hadoop.fs.FileStatus;
@@ -56,7 +56,14 @@ import org.apache.hadoop.mapred.Sequence
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.apache.log4j.PatternLayout;
+import org.apache.log4j.WriterAppender;
 import org.apache.thrift.protocol.TBinaryProtocol;
+import org.junit.Assert;
+
+import com.google.common.collect.ImmutableMap;
 
 /**
  * TestHive.
@@ -234,6 +241,46 @@ public class TestHive extends TestCase {
     }
   }
 
+
+  /**
+   * Test logging of timing for metastore api calls
+   *
+   * @throws Throwable
+   */
+  public void testMetaStoreApiTiming() throws Throwable {
+    // set log level to DEBUG, as this is logged at debug level
+    Logger logger = Logger.getLogger("hive.ql.metadata.Hive");
+    Level origLevel = logger.getLevel();
+    logger.setLevel(Level.DEBUG);
+
+    // create an appender to capture the logs in a string
+    StringWriter writer = new StringWriter();
+    WriterAppender appender = new WriterAppender(new PatternLayout(), writer);
+
+    try {
+      logger.addAppender(appender);
+
+      hm.clearMetaCallTiming();
+      hm.getAllDatabases();
+      hm.dumpAndClearMetaCallTiming("test");
+      String logStr = writer.toString();
+      String expectedString = "getAllDatabases_()=";
+      Assert.assertTrue(logStr + " should contain <" + expectedString,
+          logStr.contains(expectedString));
+
+      // reset the log buffer, verify new dump without any api call does not contain func
+      writer.getBuffer().setLength(0);
+      hm.dumpAndClearMetaCallTiming("test");
+      logStr = writer.toString();
+      Assert.assertFalse(logStr + " should not contain <" + expectedString,
+          logStr.contains(expectedString));
+
+    } finally {
+      logger.setLevel(origLevel);
+      logger.removeAppender(appender);
+    }
+  }
+
   /**
    * Gets a table from the metastore and compares it to the original Table
    *

Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java Wed Apr 15 22:04:00 2015
@@ -52,7 +52,7 @@ public class TestVectorizer {
     columns.add("col3");
 
     //Generate vectorized expression
-    vContext = new VectorizationContext(columns);
+    vContext = new VectorizationContext("name", columns);
   }
 
   @Description(name = "fake", value = "FAKE")

Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java Wed Apr 15 22:04:00 2015
@@ -17,15 +17,12 @@
  */
 package org.apache.hadoop.hive.ql.parse;
 
-import static org.junit.Assert.*;
-
 import java.io.File;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -44,7 +41,6 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.plan.ExplainWork;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.junit.Before;
-import org.junit.Ignore;
 import org.junit.Test;
 
 public class TestUpdateDeleteSemanticAnalyzer {
@@ -135,7 +131,7 @@ public class TestUpdateDeleteSemanticAna
   @Test
   public void testUpdateAllNonPartitioned() throws Exception {
     try {
-      ReturnInfo rc = parseAndAnalyze("update T set a = 5", "testUpdateAllNonPartitioned");
+      ReturnInfo rc = parseAndAnalyze("update T set b = 5", "testUpdateAllNonPartitioned");
       LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast.dump()));
     } finally {
       cleanupTables();
@@ -145,7 +141,7 @@ public class TestUpdateDeleteSemanticAna
   @Test
   public void testUpdateAllNonPartitionedWhere() throws Exception {
     try {
-      ReturnInfo rc = parseAndAnalyze("update T set a = 5 where b > 5",
+      ReturnInfo rc = parseAndAnalyze("update T set b = 5 where b > 5",
           "testUpdateAllNonPartitionedWhere");
       LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast.dump()));
     } finally {
@@ -156,7 +152,7 @@ public class TestUpdateDeleteSemanticAna
   @Test
   public void testUpdateAllPartitioned() throws Exception {
     try {
-      ReturnInfo rc = parseAndAnalyze("update U set a = 5", "testUpdateAllPartitioned");
+      ReturnInfo rc = parseAndAnalyze("update U set b = 5", "testUpdateAllPartitioned");
       LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast.dump()));
     } finally {
       cleanupTables();
@@ -166,7 +162,7 @@ public class TestUpdateDeleteSemanticAna
   @Test
   public void testUpdateAllPartitionedWhere() throws Exception {
     try {
-      ReturnInfo rc = parseAndAnalyze("update U set a = 5 where b > 5",
+      ReturnInfo rc = parseAndAnalyze("update U set b = 5 where b > 5",
           "testUpdateAllPartitionedWhere");
       LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast.dump()));
     } finally {
@@ -177,7 +173,7 @@ public class TestUpdateDeleteSemanticAna
   @Test
   public void testUpdateOnePartition() throws Exception {
     try {
-      ReturnInfo rc = parseAndAnalyze("update U set a = 5 where ds = 'today'",
+      ReturnInfo rc = parseAndAnalyze("update U set b = 5 where ds = 'today'",
           "testUpdateOnePartition");
       LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast.dump()));
     } finally {
@@ -188,7 +184,7 @@ public class TestUpdateDeleteSemanticAna
   @Test
   public void testUpdateOnePartitionWhere() throws Exception {
     try {
-      ReturnInfo rc = parseAndAnalyze("update U set a = 5 where ds = 'today' and b > 5",
+      ReturnInfo rc = parseAndAnalyze("update U set b = 5 where ds = 'today' and b > 5",
           "testUpdateOnePartitionWhere");
       LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast.dump()));
     } finally {
@@ -266,7 +262,7 @@ public class TestUpdateDeleteSemanticAna
     db = sem.getDb();
 
     // I have to create the tables here (rather than in setup()) because I need the Hive
-    // connection, which is conviently created by the semantic analyzer.
+    // connection, which is conveniently created by the semantic analyzer.
     Map<String, String> params = new HashMap<String, String>(1);
     params.put(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, "true");
     db.createTable("T", Arrays.asList("a", "b"), null, OrcInputFormat.class,
@@ -297,7 +293,7 @@ public class TestUpdateDeleteSemanticAna
     fs.create(tmp);
     fs.deleteOnExit(tmp);
     ExplainWork work = new ExplainWork(tmp, sem.getParseContext(), sem.getRootTasks(),
-        sem.getFetchTask(), astStringTree, sem, true, false, false, false, false);
+        sem.getFetchTask(), astStringTree, sem, true, false, false, false, false, false, null);
     ExplainTask task = new ExplainTask();
     task.setWork(work);
     task.initialize(conf, plan, null);

Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java Wed Apr 15 22:04:00 2015
@@ -17,16 +17,18 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import java.sql.Timestamp;
+
+import junit.framework.TestCase;
+
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
 
-import junit.framework.TestCase;
-
 public class TestGenericUDFLastDay extends TestCase {
 
   public void testLastDay() throws HiveException {
@@ -65,6 +67,31 @@ public class TestGenericUDFLastDay exten
     runAndVerify("2014-01-32 10:30:45", "2014-02-28", udf);
     runAndVerify("01/14/2014 10:30:45", null, udf);
     runAndVerify("2016-02-28T10:30:45", "2016-02-29", udf);
+    // negative Unix time
+    runAndVerifyTs("1966-01-31 00:00:01", "1966-01-31", udf);
+    runAndVerifyTs("1966-01-31 10:00:01", "1966-01-31", udf);
+    runAndVerifyTs("1966-01-31 23:59:59", "1966-01-31", udf);
+  }
+
+  public void testLastDayTs() throws HiveException {
+    GenericUDFLastDay udf = new GenericUDFLastDay();
+    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
+    ObjectInspector[] arguments = { valueOI0 };
+
+    udf.initialize(arguments);
+    // positive Unix time
+    runAndVerifyTs("2014-01-01 10:30:45", "2014-01-31", udf);
+    runAndVerifyTs("2014-01-14 10:30:45", "2014-01-31", udf);
+    runAndVerifyTs("2014-01-31 10:30:45.1", "2014-01-31", udf);
+    runAndVerifyTs("2014-02-02 10:30:45.100", "2014-02-28", udf);
+    runAndVerifyTs("2014-02-28 10:30:45.001", "2014-02-28", udf);
+    runAndVerifyTs("2016-02-03 10:30:45.000000001", "2016-02-29", udf);
+    runAndVerifyTs("2016-02-28 10:30:45", "2016-02-29", udf);
+    runAndVerifyTs("2016-02-29 10:30:45", "2016-02-29", udf);
+    // negative Unix time
+    runAndVerifyTs("1966-01-31 00:00:01", "1966-01-31", udf);
+    runAndVerifyTs("1966-01-31 10:00:01", "1966-01-31", udf);
+    runAndVerifyTs("1966-01-31 23:59:59", "1966-01-31", udf);
   }
 
   private void runAndVerify(String str, String expResult, GenericUDF udf)
@@ -73,5 +100,13 @@ public class TestGenericUDFLastDay exten
     DeferredObject[] args = { valueObj0 };
     Text output = (Text) udf.evaluate(args);
     assertEquals("last_day() test ", expResult, output != null ? output.toString() : null);
+  }
+
+  private void runAndVerifyTs(String str, String expResult, GenericUDF udf) throws HiveException {
+    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new TimestampWritable(
+        Timestamp.valueOf(str)) : null);
+    DeferredObject[] args = { valueObj0 };
+    Text output = (Text) udf.evaluate(args);
+    assertEquals("last_day() test ", expResult, output != null ? output.toString() : null);
   }
 }

Modified: hive/branches/llap/ql/src/test/queries/clientnegative/authorization_update_noupdatepriv.q
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/queries/clientnegative/authorization_update_noupdatepriv.q?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/queries/clientnegative/authorization_update_noupdatepriv.q (original)
+++ hive/branches/llap/ql/src/test/queries/clientnegative/authorization_update_noupdatepriv.q Wed Apr 15 22:04:00 2015
@@ -9,7 +9,7 @@ set hive.enforce.bucketing=true;
 
 
 -- check update without update priv
-create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true');
+create table auth_noupd(i int, j int) clustered by (j) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true');
 
 set user.name=user1;
 update auth_noupd set i = 0 where i > 0;

Modified: hive/branches/llap/ql/src/test/queries/clientpositive/authorization_update.q
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/queries/clientpositive/authorization_update.q?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/queries/clientpositive/authorization_update.q (original)
+++ hive/branches/llap/ql/src/test/queries/clientpositive/authorization_update.q Wed Apr 15 22:04:00 2015
@@ -9,7 +9,7 @@ set hive.enforce.bucketing=true;
 set user.name=user1;
 -- current user has been set (comment line before the set cmd is resulting in parse error!!)
 
-CREATE TABLE t_auth_up(i int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true');
+CREATE TABLE t_auth_up(i int, j int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true');
 
 CREATE TABLE t_select(i int);
 GRANT ALL ON TABLE t_select TO ROLE public;
@@ -24,4 +24,4 @@ SHOW GRANT ON TABLE t_auth_up;
 
 
 set user.name=userWIns;
-update t_auth_up set i = 0 where i > 0;
+update t_auth_up set j = 0 where i > 0;

Modified: hive/branches/llap/ql/src/test/queries/clientpositive/authorization_update_own_table.q
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/queries/clientpositive/authorization_update_own_table.q?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/queries/clientpositive/authorization_update_own_table.q (original)
+++ hive/branches/llap/ql/src/test/queries/clientpositive/authorization_update_own_table.q Wed Apr 15 22:04:00 2015
@@ -9,8 +9,8 @@ set hive.enforce.bucketing=true;
 
 
 set user.name=user1;
-create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true');
-update auth_noupd set i = 0 where i > 0;
+create table auth_noupd(i int, j int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true');
+update auth_noupd set j = 0 where i > 0;
 
 set user.name=hive_admin_user;
 set role admin;

Modified: hive/branches/llap/ql/src/test/queries/clientpositive/dynpart_sort_optimization_acid.q
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/queries/clientpositive/dynpart_sort_optimization_acid.q?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/queries/clientpositive/dynpart_sort_optimization_acid.q (original)
+++ hive/branches/llap/ql/src/test/queries/clientpositive/dynpart_sort_optimization_acid.q Wed Apr 15 22:04:00 2015
@@ -14,12 +14,12 @@ select count(*) from acid where ds='2008
 insert into table acid partition(ds='2008-04-08') values("foo", "bar");
 select count(*) from acid where ds='2008-04-08';
 
-explain update acid set key = 'foo' where value = 'bar' and ds='2008-04-08';
-update acid set key = 'foo' where value = 'bar' and ds='2008-04-08';
+explain update acid set value = 'bar' where key = 'foo' and ds='2008-04-08';
+update acid set value = 'bar' where key = 'foo' and ds='2008-04-08';
 select count(*) from acid where ds='2008-04-08';
 
-explain update acid set key = 'foo' where value = 'bar' and ds in ('2008-04-08');
-update acid set key = 'foo' where value = 'bar' and ds in ('2008-04-08');
+explain update acid set value = 'bar' where key = 'foo' and ds in ('2008-04-08');
+update acid set value = 'bar' where key = 'foo' and ds in ('2008-04-08');
 select count(*) from acid where ds in ('2008-04-08');
 
 delete from acid where key = 'foo' and ds='2008-04-08';
@@ -36,12 +36,12 @@ select count(*) from acid where ds='2008
 insert into table acid partition(ds='2008-04-08') values("foo", "bar");
 select count(*) from acid where ds='2008-04-08';
 
-explain update acid set key = 'foo' where value = 'bar' and ds='2008-04-08';
-update acid set key = 'foo' where value = 'bar' and ds='2008-04-08';
+explain update acid set value = 'bar' where key = 'foo' and ds='2008-04-08';
+update acid set value = 'bar' where key = 'foo' and ds='2008-04-08';
 select count(*) from acid where ds='2008-04-08';
 
-explain update acid set key = 'foo' where value = 'bar' and ds in ('2008-04-08');
-update acid set key = 'foo' where value = 'bar' and ds in ('2008-04-08');
+explain update acid set value = 'bar' where key = 'foo' and ds in ('2008-04-08');
+update acid set value = 'bar' where key = 'foo' and ds in ('2008-04-08');
 select count(*) from acid where ds in ('2008-04-08');
 
 delete from acid where key = 'foo' and ds='2008-04-08';
@@ -58,12 +58,12 @@ select count(*) from acid where ds='2008
 insert into table acid partition(ds='2008-04-08',hr=11) values("foo", "bar");
 select count(*) from acid where ds='2008-04-08' and hr=11;
 
-explain update acid set key = 'foo' where value = 'bar' and ds='2008-04-08' and hr=11;
-update acid set key = 'foo' where value = 'bar' and ds='2008-04-08' and hr=11;
+explain update acid set value = 'bar' where key = 'foo' and ds='2008-04-08' and hr=11;
+update acid set value = 'bar' where key = 'foo' and ds='2008-04-08' and hr=11;
 select count(*) from acid where ds='2008-04-08' and hr=11;
 
-explain update acid set key = 'foo' where value = 'bar' and ds='2008-04-08' and hr>=11;
-update acid set key = 'foo' where value = 'bar' and ds='2008-04-08' and hr>=11;
+explain update acid set value = 'bar' where key = 'foo' and ds='2008-04-08' and hr>=11;
+update acid set value = 'bar' where key = 'foo' and ds='2008-04-08' and hr>=11;
 select count(*) from acid where ds='2008-04-08' and hr>=11;
 
 delete from acid where key = 'foo' and ds='2008-04-08' and hr=11;
@@ -80,12 +80,12 @@ select count(*) from acid where ds='2008
 insert into table acid partition(ds='2008-04-08',hr=11) values("foo", "bar");
 select count(*) from acid where ds='2008-04-08' and hr=11;
 
-explain update acid set key = 'foo' where value = 'bar' and ds='2008-04-08' and hr=11;
-update acid set key = 'foo' where value = 'bar' and ds='2008-04-08' and hr=11;
+explain update acid set value = 'bar' where key = 'foo' and ds='2008-04-08' and hr=11;
+update acid set value = 'bar' where key = 'foo' and ds='2008-04-08' and hr=11;
 select count(*) from acid where ds='2008-04-08' and hr=11;
 
-explain update acid set key = 'foo' where value = 'bar' and ds='2008-04-08' and hr>=11;
-update acid set key = 'foo' where value = 'bar' and ds='2008-04-08' and hr>=11;
+explain update acid set value = 'bar' where key = 'foo' and ds='2008-04-08' and hr>=11;
+update acid set value = 'bar' where key = 'foo' and ds='2008-04-08' and hr>=11;
 select count(*) from acid where ds='2008-04-08' and hr>=11;
 
 delete from acid where key = 'foo' and ds='2008-04-08' and hr=11;
@@ -103,12 +103,12 @@ select count(*) from acid where ds='2008
 insert into table acid partition(ds='2008-04-08',hr=11) values("foo", "bar");
 select count(*) from acid where ds='2008-04-08' and hr=11;
 
-explain update acid set key = 'foo' where value = 'bar' and ds='2008-04-08' and hr=11;
-update acid set key = 'foo' where value = 'bar' and ds='2008-04-08' and hr=11;
+explain update acid set value = 'bar' where key = 'foo' and ds='2008-04-08' and hr=11;
+update acid set value = 'bar' where key = 'foo' and ds='2008-04-08' and hr=11;
 select count(*) from acid where ds='2008-04-08' and hr=11;
 
-explain update acid set key = 'foo' where value = 'bar' and ds='2008-04-08' and hr>=11;
-update acid set key = 'foo' where value = 'bar' and ds='2008-04-08' and hr>=11;
+explain update acid set value = 'bar' where key = 'foo' and ds='2008-04-08' and hr>=11;
+update acid set value = 'bar' where key = 'foo' and ds='2008-04-08' and hr>=11;
 select count(*) from acid where ds='2008-04-08' and hr>=11;
 
 delete from acid where key = 'foo' and ds='2008-04-08' and hr=11;

Modified: hive/branches/llap/ql/src/test/queries/clientpositive/tez_union.q
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/queries/clientpositive/tez_union.q?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/queries/clientpositive/tez_union.q (original)
+++ hive/branches/llap/ql/src/test/queries/clientpositive/tez_union.q Wed Apr 15 22:04:00 2015
@@ -92,3 +92,21 @@ right outer join src s on u.key = s.key;
 
 select * from ut order by ukey, skey limit 20;
 drop table ut;
+
+set hive.vectorized.execution.enabled=true;
+
+create table TABLE1(EMP_NAME STRING, EMP_ID INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',';
+
+create table table2 (EMP_NAME STRING) PARTITIONED BY (EMP_ID INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',';
+
+CREATE OR REPLACE VIEW TABLE3 as select EMP_NAME, EMP_ID from TABLE1;
+
+explain formatted select count(*) from TABLE3;
+
+drop table table2;
+
+create table table2 (EMP_NAME STRING) PARTITIONED BY (EMP_ID INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',';
+
+CREATE OR REPLACE VIEW TABLE3 as select EMP_NAME, EMP_ID from TABLE1 UNION ALL select EMP_NAME,EMP_ID from TABLE2;
+
+explain formatted select count(*) from TABLE3;

Modified: hive/branches/llap/ql/src/test/queries/clientpositive/update_all_types.q
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/queries/clientpositive/update_all_types.q?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/queries/clientpositive/update_all_types.q (original)
+++ hive/branches/llap/ql/src/test/queries/clientpositive/update_all_types.q Wed Apr 15 22:04:00 2015
@@ -5,6 +5,7 @@ set hive.enforce.bucketing=true;
 create table acid_uat(ti tinyint,
                  si smallint,
                  i int,
+                 j int,
                  bi bigint,
                  f float,
                  d double,
@@ -20,6 +21,7 @@ insert into table acid_uat
     select ctinyint,
            csmallint,
            cint,
+           cint j,
            cbigint,
            cfloat,
            cdouble,
@@ -37,7 +39,7 @@ select * from acid_uat order by i;
 update acid_uat set
     ti = 1,
     si = 2,
-    i = 3,
+    j = 3,
     bi = 4,
     f = 3.14,
     d = 6.28,

Modified: hive/branches/llap/ql/src/test/queries/clientpositive/update_tmp_table.q
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/queries/clientpositive/update_tmp_table.q?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/queries/clientpositive/update_tmp_table.q (original)
+++ hive/branches/llap/ql/src/test/queries/clientpositive/update_tmp_table.q Wed Apr 15 22:04:00 2015
@@ -8,7 +8,7 @@ insert into table acid_utt select cint,
 
 select a,b from acid_utt order by a;
 
-update acid_utt set b = 'fred' where b = '0ruyd6Y50JpdGRf6HqD';
+update acid_utt set a = 'fred' where b = '0ruyd6Y50JpdGRf6HqD';
 
 select * from acid_utt order by a;
 

Modified: hive/branches/llap/ql/src/test/results/clientnegative/alter_table_wrong_regex.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientnegative/alter_table_wrong_regex.q.out?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientnegative/alter_table_wrong_regex.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientnegative/alter_table_wrong_regex.q.out Wed Apr 15 22:04:00 2015
@@ -18,6 +18,4 @@ PREHOOK: query: alter table aa set serde
 PREHOOK: type: ALTERTABLE_SERDEPROPERTIES
 PREHOOK: Input: default@aa
 PREHOOK: Output: default@aa
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unclosed character class near index 7
-[^\](.*)
-       ^
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. at least one column must be specified for the table

Modified: hive/branches/llap/ql/src/test/results/clientnegative/authorization_update_noupdatepriv.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientnegative/authorization_update_noupdatepriv.q.out?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientnegative/authorization_update_noupdatepriv.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientnegative/authorization_update_noupdatepriv.q.out Wed Apr 15 22:04:00 2015
@@ -1,10 +1,10 @@
 PREHOOK: query: -- check update without update priv
-create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+create table auth_noupd(i int, j int) clustered by (j) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@auth_noupd
 POSTHOOK: query: -- check update without update priv
-create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+create table auth_noupd(i int, j int) clustered by (j) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@auth_noupd

Modified: hive/branches/llap/ql/src/test/results/clientnegative/join_nonexistent_part.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientnegative/join_nonexistent_part.q.out?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientnegative/join_nonexistent_part.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientnegative/join_nonexistent_part.q.out Wed Apr 15 22:04:00 2015
@@ -1,2 +1,2 @@
-Warning: Shuffle Join JOIN[8][tables = [$hdt$_0]] in Stage 'Stage-1:MAPRED' is a cross product
+Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
 Authorization failed:No privilege 'Select' found for inputs { database:default, table:srcpart, columnName:key}. Use SHOW GRANT to get more details.

Modified: hive/branches/llap/ql/src/test/results/clientpositive/annotate_stats_join.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientpositive/annotate_stats_join.q.out?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientpositive/annotate_stats_join.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientpositive/annotate_stats_join.q.out Wed Apr 15 22:04:00 2015
@@ -456,22 +456,6 @@ STAGE PLANS:
                   Statistics: Num rows: 48 Data size: 4752 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: string), _col2 (type: int)
           TableScan
-            alias: e
-            Statistics: Num rows: 48 Data size: 552 Basic stats: COMPLETE Column stats: COMPLETE
-            Filter Operator
-              predicate: deptid is not null (type: boolean)
-              Statistics: Num rows: 48 Data size: 4752 Basic stats: COMPLETE Column stats: COMPLETE
-              Select Operator
-                expressions: lastname (type: string), deptid (type: int), locid (type: int)
-                outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 48 Data size: 4752 Basic stats: COMPLETE Column stats: COMPLETE
-                Reduce Output Operator
-                  key expressions: _col1 (type: int)
-                  sort order: +
-                  Map-reduce partition columns: _col1 (type: int)
-                  Statistics: Num rows: 48 Data size: 4752 Basic stats: COMPLETE Column stats: COMPLETE
-                  value expressions: _col0 (type: string), _col2 (type: int)
-          TableScan
             alias: d
             Statistics: Num rows: 6 Data size: 62 Basic stats: COMPLETE Column stats: COMPLETE
             Filter Operator
@@ -487,6 +471,22 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: int)
                   Statistics: Num rows: 6 Data size: 570 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col1 (type: string)
+          TableScan
+            alias: e
+            Statistics: Num rows: 48 Data size: 552 Basic stats: COMPLETE Column stats: COMPLETE
+            Filter Operator
+              predicate: deptid is not null (type: boolean)
+              Statistics: Num rows: 48 Data size: 4752 Basic stats: COMPLETE Column stats: COMPLETE
+              Select Operator
+                expressions: lastname (type: string), deptid (type: int), locid (type: int)
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 48 Data size: 4752 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  key expressions: _col1 (type: int)
+                  sort order: +
+                  Map-reduce partition columns: _col1 (type: int)
+                  Statistics: Num rows: 48 Data size: 4752 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col0 (type: string), _col2 (type: int)
       Reduce Operator Tree:
         Join Operator
           condition map:

Modified: hive/branches/llap/ql/src/test/results/clientpositive/annotate_stats_part.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientpositive/annotate_stats_part.q.out?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientpositive/annotate_stats_part.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientpositive/annotate_stats_part.q.out Wed Apr 15 22:04:00 2015
@@ -481,106 +481,70 @@ POSTHOOK: query: -- This is to test filt
 explain select locid from loc_orc where locid>0 and year='2001'
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
+  Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: loc_orc
-            Statistics: Num rows: 7 Data size: 678 Basic stats: COMPLETE Column stats: COMPLETE
-            Filter Operator
-              predicate: (locid > 0) (type: boolean)
-              Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-              Select Operator
-                expressions: locid (type: int)
-                outputColumnNames: _col0
-                Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
       Processor Tree:
-        ListSink
+        TableScan
+          alias: loc_orc
+          Statistics: Num rows: 7 Data size: 678 Basic stats: COMPLETE Column stats: COMPLETE
+          Filter Operator
+            predicate: (locid > 0) (type: boolean)
+            Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              expressions: locid (type: int)
+              outputColumnNames: _col0
+              Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+              ListSink
 
 PREHOOK: query: explain select locid,year from loc_orc where locid>0 and year='2001'
 PREHOOK: type: QUERY
 POSTHOOK: query: explain select locid,year from loc_orc where locid>0 and year='2001'
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
+  Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: loc_orc
-            Statistics: Num rows: 7 Data size: 678 Basic stats: COMPLETE Column stats: COMPLETE
-            Filter Operator
-              predicate: (locid > 0) (type: boolean)
-              Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-              Select Operator
-                expressions: locid (type: int), '2001' (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 2 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 2 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
       Processor Tree:
-        ListSink
+        TableScan
+          alias: loc_orc
+          Statistics: Num rows: 7 Data size: 678 Basic stats: COMPLETE Column stats: COMPLETE
+          Filter Operator
+            predicate: (locid > 0) (type: boolean)
+            Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              expressions: locid (type: int), '2001' (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 2 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
+              ListSink
 
 PREHOOK: query: explain select * from (select locid,year from loc_orc) test where locid>0 and year='2001'
 PREHOOK: type: QUERY
 POSTHOOK: query: explain select * from (select locid,year from loc_orc) test where locid>0 and year='2001'
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
+  Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: loc_orc
-            Statistics: Num rows: 7 Data size: 678 Basic stats: COMPLETE Column stats: COMPLETE
-            Filter Operator
-              predicate: (locid > 0) (type: boolean)
-              Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-              Select Operator
-                expressions: locid (type: int), '2001' (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 2 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 2 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
       Processor Tree:
-        ListSink
+        TableScan
+          alias: loc_orc
+          Statistics: Num rows: 7 Data size: 678 Basic stats: COMPLETE Column stats: COMPLETE
+          Filter Operator
+            predicate: (locid > 0) (type: boolean)
+            Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              expressions: locid (type: int), '2001' (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 2 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE
+              ListSink
 

Modified: hive/branches/llap/ql/src/test/results/clientpositive/authorization_update.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientpositive/authorization_update.q.out?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientpositive/authorization_update.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientpositive/authorization_update.q.out Wed Apr 15 22:04:00 2015
@@ -1,12 +1,12 @@
 PREHOOK: query: -- current user has been set (comment line before the set cmd is resulting in parse error!!)
 
-CREATE TABLE t_auth_up(i int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+CREATE TABLE t_auth_up(i int, j int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@t_auth_up
 POSTHOOK: query: -- current user has been set (comment line before the set cmd is resulting in parse error!!)
 
-CREATE TABLE t_auth_up(i int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+CREATE TABLE t_auth_up(i int, j int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@t_auth_up
@@ -52,11 +52,11 @@ default	t_auth_up			user1	USER	SELECT	tr
 default	t_auth_up			user1	USER	UPDATE	true	-1	user1
 default	t_auth_up			userWIns	USER	SELECT	false	-1	user1
 default	t_auth_up			userWIns	USER	UPDATE	false	-1	user1
-PREHOOK: query: update t_auth_up set i = 0 where i > 0
+PREHOOK: query: update t_auth_up set j = 0 where i > 0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@t_auth_up
 PREHOOK: Output: default@t_auth_up
-POSTHOOK: query: update t_auth_up set i = 0 where i > 0
+POSTHOOK: query: update t_auth_up set j = 0 where i > 0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@t_auth_up
 POSTHOOK: Output: default@t_auth_up

Modified: hive/branches/llap/ql/src/test/results/clientpositive/authorization_update_own_table.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientpositive/authorization_update_own_table.q.out?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientpositive/authorization_update_own_table.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientpositive/authorization_update_own_table.q.out Wed Apr 15 22:04:00 2015
@@ -1,16 +1,16 @@
-PREHOOK: query: create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+PREHOOK: query: create table auth_noupd(i int, j int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@auth_noupd
-POSTHOOK: query: create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+POSTHOOK: query: create table auth_noupd(i int, j int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@auth_noupd
-PREHOOK: query: update auth_noupd set i = 0 where i > 0
+PREHOOK: query: update auth_noupd set j = 0 where i > 0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@auth_noupd
 PREHOOK: Output: default@auth_noupd
-POSTHOOK: query: update auth_noupd set i = 0 where i > 0
+POSTHOOK: query: update auth_noupd set j = 0 where i > 0
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@auth_noupd
 POSTHOOK: Output: default@auth_noupd

Modified: hive/branches/llap/ql/src/test/results/clientpositive/auto_join32.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientpositive/auto_join32.q.out?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientpositive/auto_join32.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientpositive/auto_join32.q.out Wed Apr 15 22:04:00 2015
@@ -391,6 +391,36 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-1
     Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: v
+            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Filter Operator
+              predicate: ((p = 'bar') and name is not null) (type: boolean)
+              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Select Operator
+                expressions: name (type: string), registration (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Sorted Merge Bucket Map Join Operator
+                  condition map:
+                       Inner Join 0 to 1
+                  keys:
+                    0 _col0 (type: string)
+                    1 _col0 (type: string)
+                  outputColumnNames: _col1, _col3
+                  Select Operator
+                    expressions: _col3 (type: string), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Group By Operator
+                      aggregations: count(DISTINCT _col1)
+                      keys: _col0 (type: string), _col1 (type: string)
+                      mode: hash
+                      outputColumnNames: _col0, _col1, _col2
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string), _col1 (type: string)
+                        sort order: ++
+                        Map-reduce partition columns: _col0 (type: string)
       Reduce Operator Tree:
         Group By Operator
           aggregations: count(DISTINCT KEY._col1:0._col0)

Modified: hive/branches/llap/ql/src/test/results/clientpositive/bucketmapjoin1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientpositive/bucketmapjoin1.q.out?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientpositive/bucketmapjoin1.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientpositive/bucketmapjoin1.q.out Wed Apr 15 22:04:00 2015
@@ -125,6 +125,50 @@ STAGE PLANS:
 
   Stage: Stage-1
     Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: a
+            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            GatherStats: false
+            Filter Operator
+              isSamplingPred: false
+              predicate: key is not null (type: boolean)
+              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 key (type: int)
+                  1 key (type: int)
+                outputColumnNames: _col0, _col1, _col7
+                Position of Big Table: 0
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                BucketMapJoin: true
+                Select Operator
+                  expressions: _col0 (type: int), _col1 (type: string), _col7 (type: string)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 0
+#### A masked pattern was here ####
+                    NumFilesPerFileSink: 1
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+#### A masked pattern was here ####
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        properties:
+                          columns _col0,_col1,_col2
+                          columns.types int:string:string
+                          escape.delim \
+                          hive.serialization.extend.additional.nesting.levels true
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    TotalFiles: 1
+                    GatherStats: false
+                    MultiFileSpray: false
       Local Work:
         Map Reduce Local Work
 
@@ -249,6 +293,50 @@ STAGE PLANS:
 
   Stage: Stage-1
     Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: b
+            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            GatherStats: false
+            Filter Operator
+              isSamplingPred: false
+              predicate: (key is not null and (ds = '2008-04-08')) (type: boolean)
+              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Map Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 key (type: int)
+                  1 key (type: int)
+                outputColumnNames: _col0, _col1, _col7
+                Position of Big Table: 1
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                BucketMapJoin: true
+                Select Operator
+                  expressions: _col0 (type: int), _col1 (type: string), _col7 (type: string)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 0
+#### A masked pattern was here ####
+                    NumFilesPerFileSink: 1
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+#### A masked pattern was here ####
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        properties:
+                          columns _col0,_col1,_col2
+                          columns.types int:string:string
+                          escape.delim \
+                          hive.serialization.extend.additional.nesting.levels true
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    TotalFiles: 1
+                    GatherStats: false
+                    MultiFileSpray: false
       Local Work:
         Map Reduce Local Work
 

Modified: hive/branches/llap/ql/src/test/results/clientpositive/correlationoptimizer3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientpositive/correlationoptimizer3.q.out?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientpositive/correlationoptimizer3.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientpositive/correlationoptimizer3.q.out Wed Apr 15 22:04:00 2015
@@ -300,21 +300,6 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: string)
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           TableScan
-            alias: y
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: key is not null (type: boolean)
-              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-          TableScan
             alias: x
             Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
@@ -331,6 +316,21 @@ STAGE PLANS:
                   Statistics: Num rows: 13 Data size: 99 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col1 (type: string)
           TableScan
+            alias: y
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: key is not null (type: boolean)
+              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+          TableScan
             alias: x
             Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
@@ -1004,21 +1004,6 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: string)
                   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
           TableScan
-            alias: y
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: key is not null (type: boolean)
-              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-          TableScan
             alias: x
             Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
@@ -1035,6 +1020,21 @@ STAGE PLANS:
                   Statistics: Num rows: 13 Data size: 99 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col1 (type: string)
           TableScan
+            alias: y
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: key is not null (type: boolean)
+              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+          TableScan
             alias: x
             Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
             Filter Operator

Modified: hive/branches/llap/ql/src/test/results/clientpositive/correlationoptimizer6.q.out
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/results/clientpositive/correlationoptimizer6.q.out?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/results/clientpositive/correlationoptimizer6.q.out (original)
+++ hive/branches/llap/ql/src/test/results/clientpositive/correlationoptimizer6.q.out Wed Apr 15 22:04:00 2015
@@ -3032,22 +3032,6 @@ STAGE PLANS:
                     Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     value expressions: _col1 (type: bigint)
           TableScan
-            alias: x
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: key is not null (type: boolean)
-              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: string)
-          TableScan
             alias: y
             Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
@@ -3069,6 +3053,22 @@ STAGE PLANS:
                     Map-reduce partition columns: _col0 (type: string)
                     Statistics: Num rows: 13 Data size: 99 Basic stats: COMPLETE Column stats: NONE
                     value expressions: _col1 (type: bigint)
+          TableScan
+            alias: x
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: key is not null (type: boolean)
+              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string), value (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col1 (type: string)
       Reduce Operator Tree:
         Demux Operator
           Statistics: Num rows: 513 Data size: 5411 Basic stats: COMPLETE Column stats: NONE



Mime
View raw message