hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ser...@apache.org
Subject [1/4] hive git commit: HIVE-12758 : Parallel compilation: Operator::resetId() is not thread-safe (Sergey Shelukhin, reviewed by Gopal V)
Date Tue, 19 Jan 2016 02:47:35 GMT
Repository: hive
Updated Branches:
  refs/heads/master 4eab2dfe8 -> 88fceacaa


http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
index 7ab94a2..eaeb66b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.WindowsPathUtil;
 import org.apache.hadoop.hive.ql.exec.mr.ExecDriver;
@@ -77,6 +78,7 @@ public class TestExecDriver extends TestCase {
   private static final Path tmppath;
   private static Hive db;
   private static FileSystem fs;
+  private static CompilationOpContext ctx = null;
 
   static {
     try {
@@ -153,6 +155,7 @@ public class TestExecDriver extends TestCase {
   @Override
   protected void setUp() {
     mr = PlanUtils.getMapRedWork();
+    ctx = new CompilationOpContext();
   }
 
   public static void addMapWork(MapredWork mr, Table tbl, String alias, Operator<?> work) {
@@ -210,10 +213,9 @@ public class TestExecDriver extends TestCase {
   @SuppressWarnings("unchecked")
   private void populateMapPlan1(Table src) throws Exception {
 
-    Operator<FileSinkDesc> op2 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator
+    Operator<FileSinkDesc> op2 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(tmpdir + File.separator
         + "mapplan1.out"), Utilities.defaultTd, true));
-    Operator<FilterDesc> op1 = OperatorFactory.get(getTestFilterDesc("key"),
-        op2);
+    Operator<FilterDesc> op1 = OperatorFactory.get(getTestFilterDesc("key"), op2);
 
     addMapWork(mr, src, "a", op1);
   }
@@ -221,7 +223,7 @@ public class TestExecDriver extends TestCase {
   @SuppressWarnings("unchecked")
   private void populateMapPlan2(Table src) throws Exception {
 
-    Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator
+    Operator<FileSinkDesc> op3 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(tmpdir + File.separator
         + "mapplan2.out"), Utilities.defaultTd, false));
 
     Operator<ScriptDesc> op2 = OperatorFactory.get(new ScriptDesc("cat",
@@ -244,7 +246,7 @@ public class TestExecDriver extends TestCase {
       outputColumns.add("_col" + i);
     }
     // map-side work
-    Operator<ReduceSinkDesc> op1 = OperatorFactory.get(PlanUtils
+    Operator<ReduceSinkDesc> op1 = OperatorFactory.get(ctx, PlanUtils
         .getReduceSinkDesc(Utilities.makeList(getStringColumn("key")),
         Utilities.makeList(getStringColumn("value")), outputColumns, true,
         -1, 1, -1, AcidUtils.Operation.NOT_ACID));
@@ -257,7 +259,7 @@ public class TestExecDriver extends TestCase {
     mr.setReduceWork(rWork);
 
     // reduce side work
-    Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator
+    Operator<FileSinkDesc> op3 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(tmpdir + File.separator
         + "mapredplan1.out"), Utilities.defaultTd, false));
 
     List<ExprNodeDesc> cols = new ArrayList<ExprNodeDesc>();
@@ -276,7 +278,7 @@ public class TestExecDriver extends TestCase {
       outputColumns.add("_col" + i);
     }
     // map-side work
-    Operator<ReduceSinkDesc> op1 = OperatorFactory.get(PlanUtils
+    Operator<ReduceSinkDesc> op1 = OperatorFactory.get(ctx, PlanUtils
         .getReduceSinkDesc(Utilities.makeList(getStringColumn("key")),
         Utilities
         .makeList(getStringColumn("key"), getStringColumn("value")),
@@ -290,7 +292,7 @@ public class TestExecDriver extends TestCase {
     mr.setReduceWork(rWork);
 
     // reduce side work
-    Operator<FileSinkDesc> op4 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator
+    Operator<FileSinkDesc> op4 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(tmpdir + File.separator
         + "mapredplan2.out"), Utilities.defaultTd, false));
 
     Operator<FilterDesc> op3 = OperatorFactory.get(getTestFilterDesc("0"), op4);
@@ -313,14 +315,14 @@ public class TestExecDriver extends TestCase {
       outputColumns.add("_col" + i);
     }
     // map-side work
-    Operator<ReduceSinkDesc> op1 = OperatorFactory.get(PlanUtils
+    Operator<ReduceSinkDesc> op1 = OperatorFactory.get(ctx, PlanUtils
         .getReduceSinkDesc(Utilities.makeList(getStringColumn("key")),
         Utilities.makeList(getStringColumn("value")), outputColumns, true,
         Byte.valueOf((byte) 0), 1, -1, AcidUtils.Operation.NOT_ACID));
 
     addMapWork(mr, src, "a", op1);
 
-    Operator<ReduceSinkDesc> op2 = OperatorFactory.get(PlanUtils
+    Operator<ReduceSinkDesc> op2 = OperatorFactory.get(ctx, PlanUtils
         .getReduceSinkDesc(Utilities.makeList(getStringColumn("key")),
         Utilities.makeList(getStringColumn("key")), outputColumns, true,
         Byte.valueOf((byte) 1), Integer.MAX_VALUE, -1, AcidUtils.Operation.NOT_ACID));
@@ -336,7 +338,7 @@ public class TestExecDriver extends TestCase {
     rWork.getTagToValueDesc().add(op2.getConf().getValueSerializeInfo());
 
     // reduce side work
-    Operator<FileSinkDesc> op4 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator
+    Operator<FileSinkDesc> op4 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(tmpdir + File.separator
         + "mapredplan3.out"), Utilities.defaultTd, false));
 
     Operator<SelectDesc> op5 = OperatorFactory.get(new SelectDesc(Utilities
@@ -356,7 +358,7 @@ public class TestExecDriver extends TestCase {
     for (int i = 0; i < 2; i++) {
       outputColumns.add("_col" + i);
     }
-    Operator<ReduceSinkDesc> op1 = OperatorFactory.get(PlanUtils
+    Operator<ReduceSinkDesc> op1 = OperatorFactory.get(ctx, PlanUtils
         .getReduceSinkDesc(Utilities.makeList(getStringColumn("tkey")),
         Utilities.makeList(getStringColumn("tkey"),
         getStringColumn("tvalue")), outputColumns, false, -1, 1, -1, AcidUtils.Operation.NOT_ACID));
@@ -379,7 +381,7 @@ public class TestExecDriver extends TestCase {
     mr.setReduceWork(rWork);
 
     // reduce side work
-    Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator
+    Operator<FileSinkDesc> op3 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(tmpdir + File.separator
         + "mapredplan4.out"), Utilities.defaultTd, false));
     List<ExprNodeDesc> cols = new ArrayList<ExprNodeDesc>();
     cols.add(getStringColumn(Utilities.ReduceField.KEY + ".reducesinkkey" + 0));
@@ -401,7 +403,7 @@ public class TestExecDriver extends TestCase {
     for (int i = 0; i < 2; i++) {
       outputColumns.add("_col" + i);
     }
-    Operator<ReduceSinkDesc> op0 = OperatorFactory.get(PlanUtils
+    Operator<ReduceSinkDesc> op0 = OperatorFactory.get(ctx, PlanUtils
         .getReduceSinkDesc(Utilities.makeList(getStringColumn("0")), Utilities
         .makeList(getStringColumn("0"), getStringColumn("1")),
         outputColumns, false, -1, 1, -1, AcidUtils.Operation.NOT_ACID));
@@ -418,7 +420,7 @@ public class TestExecDriver extends TestCase {
     rWork.getTagToValueDesc().add(op0.getConf().getValueSerializeInfo());
 
     // reduce side work
-    Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator
+    Operator<FileSinkDesc> op3 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(tmpdir + File.separator
         + "mapredplan5.out"), Utilities.defaultTd, false));
 
     List<ExprNodeDesc> cols = new ArrayList<ExprNodeDesc>();
@@ -436,7 +438,7 @@ public class TestExecDriver extends TestCase {
     for (int i = 0; i < 2; i++) {
       outputColumns.add("_col" + i);
     }
-    Operator<ReduceSinkDesc> op1 = OperatorFactory.get(PlanUtils
+    Operator<ReduceSinkDesc> op1 = OperatorFactory.get(ctx, PlanUtils
         .getReduceSinkDesc(Utilities.makeList(getStringColumn("tkey")),
         Utilities.makeList(getStringColumn("tkey"),
         getStringColumn("tvalue")), outputColumns, false, -1, 1, -1, AcidUtils.Operation.NOT_ACID));
@@ -460,7 +462,7 @@ public class TestExecDriver extends TestCase {
     rWork.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
 
     // reduce side work
-    Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator
+    Operator<FileSinkDesc> op3 = OperatorFactory.get(ctx, new FileSinkDesc(new Path(tmpdir + File.separator
         + "mapredplan6.out"), Utilities.defaultTd, false));
 
     Operator<FilterDesc> op2 = OperatorFactory.get(getTestFilterDesc("0"), op3);
@@ -478,7 +480,7 @@ public class TestExecDriver extends TestCase {
     MapRedTask mrtask = new MapRedTask();
     DriverContext dctx = new DriverContext ();
     mrtask.setWork(mr);
-    mrtask.initialize(conf, null, dctx);
+    mrtask.initialize(conf, null, dctx, null);
     int exitVal =  mrtask.execute(dctx);
 
     if (exitVal != 0) {

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java
index 68c598a..a8d7c9c 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.common.ValidTxnList;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
+import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.io.AcidInputFormat;
 import org.apache.hadoop.hive.ql.io.AcidOutputFormat;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
@@ -293,7 +294,8 @@ public class TestFileSinkOperator {
     if (txnId > 0) desc.setTransactionId(txnId);
     if (writeType != AcidUtils.Operation.NOT_ACID) desc.setTransactionId(1L);
 
-    FileSinkOperator op = (FileSinkOperator)OperatorFactory.get(FileSinkDesc.class);
+    FileSinkOperator op = (FileSinkOperator)OperatorFactory.get(
+        new CompilationOpContext(), FileSinkDesc.class);
     op.setConf(desc);
     op.initialize(jc, new ObjectInspector[]{inspector});
     return op;

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
index 000b46b..2ccb05a 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
@@ -31,6 +31,7 @@ import junit.framework.TestCase;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.io.IOContextMap;
 import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
@@ -155,7 +156,7 @@ public class TestOperators extends TestCase {
    */
   public void testScriptOperatorEnvVarsProcessing() throws Throwable {
     try {
-      ScriptOperator scriptOperator = new ScriptOperator();
+      ScriptOperator scriptOperator = new ScriptOperator(new CompilationOpContext());
 
       //Environment Variables name
       assertEquals("a_b_c", scriptOperator.safeEnvVarName("a.b.c"));
@@ -194,7 +195,7 @@ public class TestOperators extends TestCase {
   }
 
   public void testScriptOperatorBlacklistedEnvVarsProcessing() {
-    ScriptOperator scriptOperator = new ScriptOperator();
+    ScriptOperator scriptOperator = new ScriptOperator(new CompilationOpContext());
 
     Configuration hconf = new JobConf(ScriptOperator.class);
 
@@ -229,7 +230,7 @@ public class TestOperators extends TestCase {
         outputCols.add("_col" + i);
       }
       SelectDesc selectCtx = new SelectDesc(earr, outputCols);
-      Operator<SelectDesc> op = OperatorFactory.get(SelectDesc.class);
+      Operator<SelectDesc> op = OperatorFactory.get(new CompilationOpContext(), SelectDesc.class);
       op.setConf(selectCtx);
 
       // scriptOperator to echo the output of the select
@@ -245,8 +246,7 @@ public class TestOperators extends TestCase {
 
       // Collect operator to observe the output of the script
       CollectDesc cd = new CollectDesc(Integer.valueOf(10));
-      CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(
-          cd, sop);
+      CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(cd, sop);
 
       op.initialize(new JobConf(TestOperators.class),
           new ObjectInspector[]{r[0].oi});
@@ -308,12 +308,13 @@ public class TestOperators extends TestCase {
       pathToPartitionInfo.put("hdfs:///testDir", pd);
 
       // initialize aliasToWork
+      CompilationOpContext ctx = new CompilationOpContext();
       CollectDesc cd = new CollectDesc(Integer.valueOf(1));
       CollectOperator cdop1 = (CollectOperator) OperatorFactory
-          .get(CollectDesc.class);
+          .get(ctx, CollectDesc.class);
       cdop1.setConf(cd);
       CollectOperator cdop2 = (CollectOperator) OperatorFactory
-          .get(CollectDesc.class);
+          .get(ctx, CollectDesc.class);
       cdop2.setConf(cd);
       LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork =
         new LinkedHashMap<String, Operator<? extends OperatorDesc>>();
@@ -327,7 +328,7 @@ public class TestOperators extends TestCase {
       mrwork.getMapWork().setAliasToWork(aliasToWork);
 
       // get map operator and initialize it
-      MapOperator mo = new MapOperator();
+      MapOperator mo = new MapOperator(new CompilationOpContext());
       mo.initializeAsRoot(hconf, mrwork.getMapWork());
 
       Text tw = new Text();

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
index 64db486..06e5e07 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
@@ -24,6 +24,7 @@ import java.util.ArrayList;
 import java.util.LinkedHashMap;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -58,7 +59,7 @@ public class TestPlan extends TestCase {
           .getFuncExprNodeDesc("==", expr1, expr2);
 
       FilterDesc filterCtx = new FilterDesc(filterExpr, false);
-      Operator<FilterDesc> op = OperatorFactory.get(FilterDesc.class);
+      Operator<FilterDesc> op = OperatorFactory.get(new CompilationOpContext(), FilterDesc.class);
       op.setConf(filterCtx);
 
       ArrayList<String> aliasList = new ArrayList<String>();

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java
index 3968c50..d3bb84d 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java
@@ -24,6 +24,7 @@ import java.util.List;
 import junit.framework.Assert;
 
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.FilterExprAndExpr;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColEqualDoubleScalar;
@@ -91,7 +92,7 @@ public class TestVectorFilterOperator {
     VectorizationContext vc = new VectorizationContext("name", columns);
     FilterDesc fdesc = new FilterDesc();
     fdesc.setPredicate(col1Expr);
-    return new VectorFilterOperator(vc, fdesc);
+    return new VectorFilterOperator(new CompilationOpContext(), vc, fdesc);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
index fdcf103..451947b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
@@ -38,6 +38,7 @@ import java.util.Set;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.exec.vector.util.FakeCaptureOutputOperator;
 import org.apache.hadoop.hive.ql.exec.vector.util.FakeVectorRowBatchFromConcat;
 import org.apache.hadoop.hive.ql.exec.vector.util.FakeVectorRowBatchFromLongIterables;
@@ -186,9 +187,10 @@ public class TestVectorGroupByOperator {
     float treshold = 100.0f*1024.0f/maxMemory;
     desc.setMemoryThreshold(treshold);
 
-    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
+    CompilationOpContext cCtx = new CompilationOpContext();
+    VectorGroupByOperator vgo = new VectorGroupByOperator(cCtx, ctx, desc);
 
-    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
+    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(cCtx, vgo);
     vgo.initialize(hconf, null);
 
     this.outputRowCount = 0;
@@ -1729,9 +1731,10 @@ public class TestVectorGroupByOperator {
     desc.setAggregators(aggs);
     desc.setKeys(keysDesc);
 
-    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
+    CompilationOpContext cCtx = new CompilationOpContext();
+    VectorGroupByOperator vgo = new VectorGroupByOperator(cCtx, ctx, desc);
 
-    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
+    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(cCtx, vgo);
     vgo.initialize(hconf, null);
     out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {
 
@@ -1843,9 +1846,10 @@ public class TestVectorGroupByOperator {
     keysDesc.add(keyExp);
     desc.setKeys(keysDesc);
 
-    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
+    CompilationOpContext cCtx = new CompilationOpContext();
+    VectorGroupByOperator vgo = new VectorGroupByOperator(cCtx, ctx, desc);
 
-    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
+    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(cCtx, vgo);
     vgo.initialize(hconf, null);
     out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {
 
@@ -2239,9 +2243,10 @@ public class TestVectorGroupByOperator {
 
     GroupByDesc desc = buildGroupByDescCountStar (ctx);
 
-    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
+    CompilationOpContext cCtx = new CompilationOpContext();
+    VectorGroupByOperator vgo = new VectorGroupByOperator(cCtx, ctx, desc);
 
-    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
+    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(cCtx, vgo);
     vgo.initialize(hconf, null);
 
     for (VectorizedRowBatch unit: data) {
@@ -2269,10 +2274,10 @@ public class TestVectorGroupByOperator {
     GroupByDesc desc = buildGroupByDescType(ctx, "count", "A", TypeInfoFactory.longTypeInfo);
     VectorGroupByDesc vectorDesc = desc.getVectorDesc();
     vectorDesc.setIsReduceMergePartial(true);
+    CompilationOpContext cCtx = new CompilationOpContext();
+    VectorGroupByOperator vgo = new VectorGroupByOperator(cCtx, ctx, desc);
 
-    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
-
-    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
+    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(cCtx, vgo);
     vgo.initialize(hconf, null);
 
     for (VectorizedRowBatch unit: data) {
@@ -2301,9 +2306,10 @@ public class TestVectorGroupByOperator {
     GroupByDesc desc = buildGroupByDescType(ctx, aggregateName, "A",
         TypeInfoFactory.stringTypeInfo);
 
-    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
+    CompilationOpContext cCtx = new CompilationOpContext();
+    VectorGroupByOperator vgo = new VectorGroupByOperator(cCtx, ctx, desc);
 
-    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
+    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(cCtx, vgo);
     vgo.initialize(hconf, null);
 
     for (VectorizedRowBatch unit: data) {
@@ -2332,9 +2338,10 @@ public class TestVectorGroupByOperator {
     GroupByDesc desc =
         buildGroupByDescType(ctx, aggregateName, "A", TypeInfoFactory.getDecimalTypeInfo(30, 4));
 
-    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
+    CompilationOpContext cCtx = new CompilationOpContext();
+    VectorGroupByOperator vgo = new VectorGroupByOperator(cCtx, ctx, desc);
 
-    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
+    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(cCtx, vgo);
     vgo.initialize(hconf, null);
 
     for (VectorizedRowBatch unit : data) {
@@ -2364,9 +2371,10 @@ public class TestVectorGroupByOperator {
     GroupByDesc desc = buildGroupByDescType (ctx, aggregateName, "A",
         TypeInfoFactory.doubleTypeInfo);
 
-    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
+    CompilationOpContext cCtx = new CompilationOpContext();
+    VectorGroupByOperator vgo = new VectorGroupByOperator(cCtx, ctx, desc);
 
-    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
+    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(cCtx, vgo);
     vgo.initialize(hconf, null);
 
     for (VectorizedRowBatch unit: data) {
@@ -2394,9 +2402,10 @@ public class TestVectorGroupByOperator {
 
     GroupByDesc desc = buildGroupByDescType(ctx, aggregateName, "A", TypeInfoFactory.longTypeInfo);
 
-    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
+    CompilationOpContext cCtx = new CompilationOpContext();
+    VectorGroupByOperator vgo = new VectorGroupByOperator(cCtx, ctx, desc);
 
-    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
+    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(cCtx, vgo);
     vgo.initialize(null, null);
 
     for (VectorizedRowBatch unit: data) {
@@ -2428,13 +2437,13 @@ public class TestVectorGroupByOperator {
     GroupByDesc desc = buildKeyGroupByDesc (ctx, aggregateName, "Value",
         TypeInfoFactory.longTypeInfo, "Key", TypeInfoFactory.longTypeInfo);
 
-    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
+    CompilationOpContext cCtx = new CompilationOpContext();
+    VectorGroupByOperator vgo = new VectorGroupByOperator(cCtx, ctx, desc);
 
-    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
+    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(cCtx, vgo);
     vgo.initialize(hconf, null);
     out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {
 
-      private int rowIndex;
       private String aggregateName;
       private HashMap<Object,Object> expected;
       private Set<Object> keys;
@@ -2494,9 +2503,10 @@ public class TestVectorGroupByOperator {
     GroupByDesc desc = buildKeyGroupByDesc (ctx, aggregateName, "Value",
        dataTypeInfo, "Key", TypeInfoFactory.stringTypeInfo);
 
-    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
+    CompilationOpContext cCtx = new CompilationOpContext();
+    VectorGroupByOperator vgo = new VectorGroupByOperator(cCtx, ctx, desc);
 
-    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
+    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(cCtx, vgo);
     vgo.initialize(hconf, null);
     out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {
 

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorLimitOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorLimitOperator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorLimitOperator.java
index aa1d89d..428f456 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorLimitOperator.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorLimitOperator.java
@@ -23,6 +23,7 @@ import java.util.Arrays;
 import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.exec.vector.util.FakeVectorRowBatchFromObjectIterables;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.LimitDesc;
@@ -63,7 +64,7 @@ public class TestVectorLimitOperator {
 
     // Create limit desc with limit value
     LimitDesc ld = new LimitDesc(limit);
-    VectorLimitOperator lo = new VectorLimitOperator(null, ld);
+    VectorLimitOperator lo = new VectorLimitOperator(new CompilationOpContext(), null, ld);
     lo.initialize(new Configuration(), null);
 
     // Process the batch

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java
index 59961c5..779177a 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java
@@ -25,6 +25,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.exec.vector.util.VectorizedRowGroupGenUtil;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
@@ -47,9 +48,9 @@ public class TestVectorSelectOperator {
 
     private static final long serialVersionUID = 1L;
 
-    public ValidatorVectorSelectOperator(VectorizationContext ctxt, OperatorDesc conf)
-        throws HiveException {
-      super(ctxt, conf);
+    public ValidatorVectorSelectOperator(CompilationOpContext ctx,
+        VectorizationContext ctxt, OperatorDesc conf) throws HiveException {
+      super(ctx, ctxt, conf);
       initializeOp(null);
     }
 
@@ -114,7 +115,8 @@ public class TestVectorSelectOperator {
     outputColNames.add("_col1");
     selDesc.setOutputColumnNames(outputColNames);
 
-    ValidatorVectorSelectOperator vso = new ValidatorVectorSelectOperator(vc, selDesc);
+    ValidatorVectorSelectOperator vso = new ValidatorVectorSelectOperator(
+        new CompilationOpContext(), vc, selDesc);
 
     VectorizedRowBatch vrg = VectorizedRowGroupGenUtil.getVectorizedRowBatch(
         VectorizedRowBatch.DEFAULT_SIZE, 4, 17);

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeCaptureOutputOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeCaptureOutputOperator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeCaptureOutputOperator.java
index 7454b01..74e077b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeCaptureOutputOperator.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeCaptureOutputOperator.java
@@ -25,6 +25,7 @@ import java.util.List;
 import java.util.concurrent.Future;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
@@ -54,9 +55,9 @@ public class FakeCaptureOutputOperator extends Operator<FakeCaptureOutputDesc>
 
   private transient List<Object> rows;
 
-  public static FakeCaptureOutputOperator addCaptureOutputChild(
+  public static FakeCaptureOutputOperator addCaptureOutputChild(CompilationOpContext ctx,
       Operator<? extends OperatorDesc> op) {
-    FakeCaptureOutputOperator out = new FakeCaptureOutputOperator();
+    FakeCaptureOutputOperator out = new FakeCaptureOutputOperator(ctx);
     List<Operator<? extends OperatorDesc>> listParents =
         new ArrayList<Operator<? extends OperatorDesc>>(1);
     listParents.add(op);
@@ -73,6 +74,15 @@ public class FakeCaptureOutputOperator extends Operator<FakeCaptureOutputDesc>
     return rows;
   }
 
+  /** Kryo ctor. */
+  protected FakeCaptureOutputOperator() {
+    super();
+  }
+
+  public FakeCaptureOutputOperator(CompilationOpContext ctx) {
+    super(ctx);
+  }
+
   @Override
   public void initializeOp(Configuration conf) throws HiveException {
     super.initializeOp(conf);

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorDataSourceOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorDataSourceOperator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorDataSourceOperator.java
index 3bea307..d06d214 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorDataSourceOperator.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorDataSourceOperator.java
@@ -25,6 +25,7 @@ import java.util.List;
 import java.util.concurrent.Future;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -44,7 +45,8 @@ public class FakeVectorDataSourceOperator extends Operator<FakeVectorDataSourceO
   public static FakeVectorDataSourceOperator addFakeVectorDataSourceParent(
       Iterable<VectorizedRowBatch> source,
       Operator<? extends OperatorDesc> op) {
-    FakeVectorDataSourceOperator parent = new FakeVectorDataSourceOperator(source);
+    FakeVectorDataSourceOperator parent = new FakeVectorDataSourceOperator(
+        new CompilationOpContext(), source);
     List<Operator<? extends OperatorDesc>> listParents =
         new ArrayList<Operator<? extends OperatorDesc>>(1);
     listParents.add(parent);
@@ -56,11 +58,21 @@ public class FakeVectorDataSourceOperator extends Operator<FakeVectorDataSourceO
     return parent;
   }
 
-  public FakeVectorDataSourceOperator(
+  public FakeVectorDataSourceOperator(CompilationOpContext ctx,
     Iterable<VectorizedRowBatch> source) {
+    super(ctx);
     this.source = source;
   }
 
+  /** Kryo ctor. */
+  protected FakeVectorDataSourceOperator() {
+    super();
+  }
+
+  public FakeVectorDataSourceOperator(CompilationOpContext ctx) {
+    super(ctx);
+  }
+
   @Override
   public void initializeOp(Configuration conf) throws HiveException {
     super.initializeOp(conf);

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java
index f9a0e79..5628959 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java
@@ -25,6 +25,7 @@ import java.util.Map;
 
 import junit.framework.Assert;
 
+import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.exec.*;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorGroupByOperator;
@@ -104,7 +105,7 @@ public class TestVectorizer {
     grpByKeys.add(colExprB);
     desc.setKeys(grpByKeys);
 
-    GroupByOperator gbyOp = new GroupByOperator();
+    GroupByOperator gbyOp = new GroupByOperator(new CompilationOpContext());
     gbyOp.setConf(desc);
 
     Vectorizer v = new Vectorizer();
@@ -181,7 +182,7 @@ public class TestVectorizer {
   */
   @Test
   public void testValidateMapJoinOperator() {
-    MapJoinOperator map = new MapJoinOperator();
+    MapJoinOperator map = new MapJoinOperator(new CompilationOpContext());
     MapJoinDesc mjdesc = new MapJoinDesc();
     
     prepareAbstractMapJoin(map, mjdesc);
@@ -197,7 +198,7 @@ public class TestVectorizer {
   */
   @Test
   public void testValidateSMBJoinOperator() {
-      SMBMapJoinOperator map = new SMBMapJoinOperator();
+      SMBMapJoinOperator map = new SMBMapJoinOperator(new CompilationOpContext());
       SMBJoinDesc mjdesc = new SMBJoinDesc();
       
       prepareAbstractMapJoin(map, mjdesc);

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java
index 9e5db23..be51edc 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java
@@ -28,6 +28,7 @@ import java.util.Collections;
 import java.util.LinkedHashMap;
 
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
@@ -85,11 +86,12 @@ public class TestGenTezWork {
       }
     });
 
-    fs = new FileSinkOperator();
+    CompilationOpContext cCtx = new CompilationOpContext();
+    fs = new FileSinkOperator(cCtx);
     fs.setConf(new FileSinkDesc());
-    rs = new ReduceSinkOperator();
+    rs = new ReduceSinkOperator(cCtx);
     rs.setConf(new ReduceSinkDesc());
-    ts = new TableScanOperator();
+    ts = new TableScanOperator(cCtx);
     ts.setConf(new TableScanDesc(null));
     ts.getChildOperators().add(rs);
     rs.getParentOperators().add(ts);

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
index 2b33da1..d795324 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
@@ -297,7 +297,7 @@ public class TestUpdateDeleteSemanticAnalyzer {
         sem.getFetchTask(), astTree, sem, true, false, false, false, false, false, null);
     ExplainTask task = new ExplainTask();
     task.setWork(work);
-    task.initialize(conf, plan, null);
+    task.initialize(conf, plan, null, null);
     task.execute(null);
     FSDataInputStream in = fs.open(tmp);
     StringBuilder builder = new StringBuilder();

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/org/apache/hadoop/hive/ql/testutil/BaseScalarUdfTest.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/testutil/BaseScalarUdfTest.java b/ql/src/test/org/apache/hadoop/hive/ql/testutil/BaseScalarUdfTest.java
index f7a45f7..5e29d13 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/testutil/BaseScalarUdfTest.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/testutil/BaseScalarUdfTest.java
@@ -22,6 +22,7 @@ import java.util.List;
 
 import junit.framework.TestCase;
 
+import org.apache.hadoop.hive.ql.CompilationOpContext;
 import org.apache.hadoop.hive.ql.exec.CollectOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorFactory;
@@ -83,7 +84,7 @@ public abstract class BaseScalarUdfTest extends TestCase {
     List<ExprNodeDesc> expressionList = getExpressionList();
     SelectDesc selectCtx = new SelectDesc(expressionList,
         OperatorTestUtils.createOutputColumnNames(expressionList));
-    Operator<SelectDesc> op = OperatorFactory.get(SelectDesc.class);
+    Operator<SelectDesc> op = OperatorFactory.get(new CompilationOpContext(), SelectDesc.class);
     op.setConf(selectCtx);
     CollectDesc cd = new CollectDesc(Integer.valueOf(10));
     CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(cd, op);

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/results/clientpositive/auto_join0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/auto_join0.q.out b/ql/src/test/results/clientpositive/auto_join0.q.out
index 10859b8..957a458 100644
--- a/ql/src/test/results/clientpositive/auto_join0.q.out
+++ b/ql/src/test/results/clientpositive/auto_join0.q.out
@@ -1,5 +1,5 @@
-Warning: Map Join MAPJOIN[45][bigTable=?] in task 'Stage-7:MAPRED' is a cross product
-Warning: Map Join MAPJOIN[38][bigTable=?] in task 'Stage-6:MAPRED' is a cross product
+Warning: Map Join MAPJOIN[33][bigTable=?] in task 'Stage-7:MAPRED' is a cross product
+Warning: Map Join MAPJOIN[32][bigTable=?] in task 'Stage-6:MAPRED' is a cross product
 Warning: Shuffle Join JOIN[12][tables = [src1, src2]] in Stage 'Stage-2:MAPRED' is a cross product
 PREHOOK: query: -- SORT_QUERY_RESULTS
 
@@ -254,8 +254,8 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-Warning: Map Join MAPJOIN[45][bigTable=?] in task 'Stage-7:MAPRED' is a cross product
-Warning: Map Join MAPJOIN[38][bigTable=?] in task 'Stage-6:MAPRED' is a cross product
+Warning: Map Join MAPJOIN[33][bigTable=?] in task 'Stage-7:MAPRED' is a cross product
+Warning: Map Join MAPJOIN[32][bigTable=?] in task 'Stage-6:MAPRED' is a cross product
 Warning: Shuffle Join JOIN[12][tables = [src1, src2]] in Stage 'Stage-2:MAPRED' is a cross product
 PREHOOK: query: select sum(hash(a.k1,a.v1,a.k2, a.v2))
 from (

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/results/clientpositive/cbo_rp_cross_product_check_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/cbo_rp_cross_product_check_2.q.out b/ql/src/test/results/clientpositive/cbo_rp_cross_product_check_2.q.out
index ff3b9f6..f1707eb 100644
--- a/ql/src/test/results/clientpositive/cbo_rp_cross_product_check_2.q.out
+++ b/ql/src/test/results/clientpositive/cbo_rp_cross_product_check_2.q.out
@@ -454,8 +454,8 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-Warning: Map Join MAPJOIN[43][bigTable=?] in task 'Stage-7:MAPRED' is a cross product
-Warning: Map Join MAPJOIN[36][bigTable=?] in task 'Stage-6:MAPRED' is a cross product
+Warning: Map Join MAPJOIN[31][bigTable=?] in task 'Stage-7:MAPRED' is a cross product
+Warning: Map Join MAPJOIN[30][bigTable=?] in task 'Stage-6:MAPRED' is a cross product
 Warning: Shuffle Join JOIN[20][tables = [, ]] in Stage 'Stage-2:MAPRED' is a cross product
 PREHOOK: query: explain select * from 
 (select A.key from A group by key) ss join 

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/results/clientpositive/cross_product_check_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/cross_product_check_2.q.out b/ql/src/test/results/clientpositive/cross_product_check_2.q.out
index 625a465..a36560f 100644
--- a/ql/src/test/results/clientpositive/cross_product_check_2.q.out
+++ b/ql/src/test/results/clientpositive/cross_product_check_2.q.out
@@ -446,8 +446,8 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-Warning: Map Join MAPJOIN[47][bigTable=?] in task 'Stage-7:MAPRED' is a cross product
-Warning: Map Join MAPJOIN[40][bigTable=?] in task 'Stage-6:MAPRED' is a cross product
+Warning: Map Join MAPJOIN[35][bigTable=?] in task 'Stage-7:MAPRED' is a cross product
+Warning: Map Join MAPJOIN[34][bigTable=?] in task 'Stage-6:MAPRED' is a cross product
 Warning: Shuffle Join JOIN[23][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-2:MAPRED' is a cross product
 PREHOOK: query: explain select * from 
 (select A.key from A group by key) ss join 

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.7.out b/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.7.out
index 9cba332..279843b 100644
--- a/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.7.out
+++ b/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.7.out
@@ -490,7 +490,7 @@ POSTHOOK: Input: default@src_5
 199	val_199
 199	val_199
 2	val_2
-Warning: Map Join MAPJOIN[62][bigTable=b] in task 'Stage-13:MAPRED' is a cross product
+Warning: Map Join MAPJOIN[55][bigTable=b] in task 'Stage-13:MAPRED' is a cross product
 Warning: Shuffle Join JOIN[31][tables = [b, sq_2_notin_nullcheck]] in Stage 'Stage-2:MAPRED' is a cross product
 PREHOOK: query: explain
 from src b 
@@ -801,7 +801,7 @@ STAGE PLANS:
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                 serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
 
-Warning: Map Join MAPJOIN[62][bigTable=b] in task 'Stage-13:MAPRED' is a cross product
+Warning: Map Join MAPJOIN[55][bigTable=b] in task 'Stage-13:MAPRED' is a cross product
 Warning: Shuffle Join JOIN[31][tables = [b, sq_2_notin_nullcheck]] in Stage 'Stage-2:MAPRED' is a cross product
 PREHOOK: query: from src b 
 INSERT OVERWRITE TABLE src_4 

http://git-wip-us.apache.org/repos/asf/hive/blob/88fceaca/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.8.out b/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.8.out
index 4c1d06c..899723f 100644
--- a/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.8.out
+++ b/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.8.out
@@ -103,16 +103,14 @@ STAGE PLANS:
           Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
           Filter Operator
             predicate: (_col0 = 0) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
             Select Operator
-              expressions: 0 (type: bigint)
-              outputColumnNames: _col0
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
-                keys: _col0 (type: bigint)
+                keys: 0 (type: bigint)
                 mode: hash
                 outputColumnNames: _col0
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
                   table:
@@ -139,7 +137,7 @@ STAGE PLANS:
           TableScan
             Reduce Output Operator
               sort order: 
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
       Reduce Operator Tree:
         Join Operator
           condition map:
@@ -254,21 +252,21 @@ STAGE PLANS:
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: ((key > '9') and value is not null) (type: boolean)
-              Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
                   keys: _col0 (type: string), _col1 (type: string)
                   mode: hash
                   outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                   Reduce Output Operator
                     key expressions: _col0 (type: string), _col1 (type: string)
                     sort order: ++
                     Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
-                    Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
       Reduce Operator Tree:
         Join Operator
           condition map:
@@ -333,10 +331,10 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@src_4
 POSTHOOK: Output: default@src_5
-POSTHOOK: Lineage: src_4.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_4.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
 RUN: Stage-10:MAPRED
 RUN: Stage-2:MAPRED
 RUN: Stage-3:MAPRED
@@ -492,7 +490,7 @@ POSTHOOK: Input: default@src_5
 199	val_199
 199	val_199
 2	val_2
-Warning: Map Join MAPJOIN[107][bigTable=b] in task 'Stage-13:MAPRED' is a cross product
+Warning: Map Join MAPJOIN[55][bigTable=b] in task 'Stage-13:MAPRED' is a cross product
 Warning: Shuffle Join JOIN[31][tables = [b, sq_2_notin_nullcheck]] in Stage 'Stage-2:MAPRED' is a cross product
 PREHOOK: query: explain
 from src b 
@@ -566,16 +564,14 @@ STAGE PLANS:
           Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
           Filter Operator
             predicate: (_col0 = 0) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
             Select Operator
-              expressions: 0 (type: bigint)
-              outputColumnNames: _col0
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
               Group By Operator
-                keys: _col0 (type: bigint)
+                keys: 0 (type: bigint)
                 mode: hash
                 outputColumnNames: _col0
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
                   table:
@@ -605,6 +601,7 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: b
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             Map Join Operator
               condition map:
                    Left Semi Join 0 to 1
@@ -612,6 +609,7 @@ STAGE PLANS:
                 0 
                 1 
               outputColumnNames: _col0, _col1
+              Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
               File Output Operator
                 compressed: false
                 table:
@@ -716,16 +714,16 @@ STAGE PLANS:
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: ((key > '9') and value is not null) (type: boolean)
-              Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
                   keys: _col0 (type: string), _col1 (type: string)
                   mode: hash
                   outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                   HashTable Sink Operator
                     keys:
                       0 key (type: string), value (type: string)
@@ -786,7 +784,7 @@ STAGE PLANS:
           TableScan
             Reduce Output Operator
               sort order: 
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
       Reduce Operator Tree:
         Join Operator
           condition map:
@@ -803,7 +801,7 @@ STAGE PLANS:
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                 serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
 
-Warning: Map Join MAPJOIN[107][bigTable=b] in task 'Stage-13:MAPRED' is a cross product
+Warning: Map Join MAPJOIN[55][bigTable=b] in task 'Stage-13:MAPRED' is a cross product
 Warning: Shuffle Join JOIN[31][tables = [b, sq_2_notin_nullcheck]] in Stage 'Stage-2:MAPRED' is a cross product
 PREHOOK: query: from src b 
 INSERT OVERWRITE TABLE src_4 
@@ -837,10 +835,10 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@src_4
 POSTHOOK: Output: default@src_5
-POSTHOOK: Lineage: src_4.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_4.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
 RUN: Stage-10:MAPRED
 RUN: Stage-14:CONDITIONAL
 RUN: Stage-17:MAPREDLOCAL


Mime
View raw message