hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ser...@apache.org
Subject svn commit: r1673969 [7/19] - in /hive/branches/llap: ./ beeline/src/java/org/apache/hive/beeline/ bin/ cli/src/java/org/apache/hadoop/hive/cli/ cli/src/test/org/apache/hadoop/hive/cli/ common/ common/src/java/org/apache/hadoop/hive/common/jsonexplain/...
Date Wed, 15 Apr 2015 22:04:07 GMT
Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java Wed Apr 15 22:04:00 2015
@@ -136,7 +136,8 @@ public class Vectorizer implements Physi
 
   Set<String> supportedAggregationUdfs = new HashSet<String>();
 
-  private PhysicalContext physicalContext = null;;
+  private PhysicalContext physicalContext = null;
+  private HiveConf hiveConf;
 
   public Vectorizer() {
 
@@ -286,13 +287,13 @@ public class Vectorizer implements Physi
 
   class VectorizationDispatcher implements Dispatcher {
 
-    private final PhysicalContext pctx;
+    private final PhysicalContext physicalContext;
 
     private List<String> reduceColumnNames;
     private List<TypeInfo> reduceTypeInfos;
 
-    public VectorizationDispatcher(PhysicalContext pctx) {
-      this.pctx = pctx;
+    public VectorizationDispatcher(PhysicalContext physicalContext) {
+      this.physicalContext = physicalContext;
       reduceColumnNames = null;
       reduceTypeInfos = null;
     }
@@ -310,7 +311,7 @@ public class Vectorizer implements Physi
             convertMapWork((MapWork) w, true);
           } else if (w instanceof ReduceWork) {
             // We are only vectorizing Reduce under Tez.
-            if (HiveConf.getBoolVar(pctx.getConf(),
+            if (HiveConf.getBoolVar(hiveConf,
                         HiveConf.ConfVars.HIVE_VECTORIZATION_REDUCE_ENABLED)) {
               convertReduceWork((ReduceWork) w);
             }
@@ -322,7 +323,7 @@ public class Vectorizer implements Physi
           if (baseWork instanceof MapWork) {
             convertMapWork((MapWork) baseWork, false);
           } else if (baseWork instanceof ReduceWork
-              && HiveConf.getBoolVar(pctx.getConf(),
+              && HiveConf.getBoolVar(hiveConf,
                   HiveConf.ConfVars.HIVE_VECTORIZATION_REDUCE_ENABLED)) {
             convertReduceWork((ReduceWork) baseWork);
           }
@@ -364,6 +365,17 @@ public class Vectorizer implements Physi
       addMapWorkRules(opRules, vnp);
       Dispatcher disp = new DefaultRuleDispatcher(vnp, opRules, null);
       GraphWalker ogw = new DefaultGraphWalker(disp);
+      if ((mapWork.getAliasToWork() == null) || (mapWork.getAliasToWork().size() == 0)) {
+        return false;
+      } else {
+        for (Operator<?> op : mapWork.getAliasToWork().values()) {
+          if (op == null) {
+            LOG.warn("Map work has invalid aliases to work with. Fail validation!");
+            return false;
+          }
+        }
+      }
+
       // iterator the mapper operator tree
       ArrayList<Node> topNodes = new ArrayList<Node>();
       topNodes.addAll(mapWork.getAliasToWork().values());
@@ -393,13 +405,12 @@ public class Vectorizer implements Physi
       HashMap<Node, Object> nodeOutput = new HashMap<Node, Object>();
       ogw.startWalking(topNodes, nodeOutput);
 
-      Map<String, Map<Integer, String>> allScratchColumnVectorTypeMaps = vnp.getAllScratchColumnVectorTypeMaps();
-      mapWork.setAllScratchColumnVectorTypeMaps(allScratchColumnVectorTypeMaps);
-      Map<String, Map<String, Integer>> allColumnVectorMaps = vnp.getAllColumnVectorMaps();
-      mapWork.setAllColumnVectorMaps(allColumnVectorMaps);
+      mapWork.setVectorColumnNameMap(vnp.getVectorColumnNameMap());
+      mapWork.setVectorColumnTypeMap(vnp.getVectorColumnTypeMap());
+      mapWork.setVectorScratchColumnTypeMap(vnp.getVectorScratchColumnTypeMap());
 
       if (LOG.isDebugEnabled()) {
-        debugDisplayAllMaps(allColumnVectorMaps, allScratchColumnVectorTypeMaps);
+        debugDisplayAllMaps(mapWork);
       }
 
       return;
@@ -495,7 +506,7 @@ public class Vectorizer implements Physi
       // VectorizationContext...  Do we use PreOrderWalker instead of DefaultGraphWalker.
       Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
       ReduceWorkVectorizationNodeProcessor vnp =
-              new ReduceWorkVectorizationNodeProcessor(reduceColumnNames);
+              new ReduceWorkVectorizationNodeProcessor(reduceColumnNames, reduceTypeInfos);
       addReduceWorkRules(opRules, vnp);
       Dispatcher disp = new DefaultRuleDispatcher(vnp, opRules, null);
       GraphWalker ogw = new PreOrderWalker(disp);
@@ -510,14 +521,12 @@ public class Vectorizer implements Physi
       // Necessary since we are vectorizing the root operator in reduce.
       reduceWork.setReducer(vnp.getRootVectorOp());
 
-      Map<String, Map<Integer, String>> allScratchColumnVectorTypeMaps = vnp.getAllScratchColumnVectorTypeMaps();
-      reduceWork.setAllScratchColumnVectorTypeMaps(allScratchColumnVectorTypeMaps);
-      Map<String, Map<String, Integer>> allColumnVectorMaps = vnp.getAllColumnVectorMaps();
-      reduceWork.setAllColumnVectorMaps(allColumnVectorMaps);
-
+      reduceWork.setVectorColumnNameMap(vnp.getVectorColumnNameMap());
+      reduceWork.setVectorColumnTypeMap(vnp.getVectorColumnTypeMap());
+      reduceWork.setVectorScratchColumnTypeMap(vnp.getVectorScratchColumnTypeMap());
 
       if (LOG.isDebugEnabled()) {
-        debugDisplayAllMaps(allColumnVectorMaps, allScratchColumnVectorTypeMaps);
+        debugDisplayAllMaps(reduceWork);
       }
     }
   }
@@ -574,37 +583,33 @@ public class Vectorizer implements Physi
   // ReduceWorkVectorizationNodeProcessor.
   class VectorizationNodeProcessor implements NodeProcessor {
 
-    // This is used to extract scratch column types for each file key
-    protected final Map<String, VectorizationContext> scratchColumnContext =
-        new HashMap<String, VectorizationContext>();
+    // The vectorization context for the Map or Reduce task.
+    protected VectorizationContext taskVectorizationContext;
 
-    protected final Map<Operator<? extends OperatorDesc>, VectorizationContext> vContextsByOp =
-        new HashMap<Operator<? extends OperatorDesc>, VectorizationContext>();
+    // The input projection column type name map for the Map or Reduce task.
+    protected Map<Integer, String> taskColumnTypeNameMap;
+
+    VectorizationNodeProcessor() {
+      taskColumnTypeNameMap = new HashMap<Integer, String>();
+    }
+
+    public Map<String, Integer> getVectorColumnNameMap() {
+      return taskVectorizationContext.getProjectionColumnMap();
+    }
+
+    public Map<Integer, String> getVectorColumnTypeMap() {
+      return taskColumnTypeNameMap;
+    }
+
+    public Map<Integer, String> getVectorScratchColumnTypeMap() {
+      return taskVectorizationContext.getScratchColumnTypeMap();
+    }
 
     protected final Set<Operator<? extends OperatorDesc>> opsDone =
         new HashSet<Operator<? extends OperatorDesc>>();
 
-    public Map<String, Map<Integer, String>> getAllScratchColumnVectorTypeMaps() {
-      Map<String, Map<Integer, String>> allScratchColumnVectorTypeMaps =
-          new HashMap<String, Map<Integer, String>>();
-      for (String onefile : scratchColumnContext.keySet()) {
-        VectorizationContext vc = scratchColumnContext.get(onefile);
-        Map<Integer, String> cmap = vc.getScratchColumnTypeMap();
-        allScratchColumnVectorTypeMaps.put(onefile, cmap);
-      }
-      return allScratchColumnVectorTypeMaps;
-    }
-
-    public Map<String, Map<String, Integer>> getAllColumnVectorMaps() {
-      Map<String, Map<String, Integer>> allColumnVectorMaps =
-          new HashMap<String, Map<String, Integer>>();
-      for(String oneFile: scratchColumnContext.keySet()) {
-        VectorizationContext vc = scratchColumnContext.get(oneFile);
-        Map<String, Integer> cmap = vc.getProjectionColumnMap();
-        allColumnVectorMaps.put(oneFile, cmap);
-      }
-      return allColumnVectorMaps;
-    }
+    protected final Map<Operator<? extends OperatorDesc>, Operator<? extends OperatorDesc>> opToVectorOpMap =
+        new HashMap<Operator<? extends OperatorDesc>, Operator<? extends OperatorDesc>>();
 
     public VectorizationContext walkStackToFindVectorizationContext(Stack<Node> stack,
             Operator<? extends OperatorDesc> op) throws SemanticException {
@@ -622,7 +627,18 @@ public class Vectorizer implements Physi
           return null;
         }
         Operator<? extends OperatorDesc> opParent = (Operator<? extends OperatorDesc>) stack.get(i);
-        vContext = vContextsByOp.get(opParent);
+        Operator<? extends OperatorDesc> vectorOpParent = opToVectorOpMap.get(opParent);
+        if (vectorOpParent != null) {
+          if (vectorOpParent instanceof VectorizationContextRegion) {
+            VectorizationContextRegion vcRegion = (VectorizationContextRegion) vectorOpParent;
+            vContext = vcRegion.getOuputVectorizationContext();
+            LOG.info("walkStackToFindVectorizationContext " + vectorOpParent.getName() + " has new vectorization context " + vContext.toString());
+          } else {
+            LOG.info("walkStackToFindVectorizationContext " + vectorOpParent.getName() + " does not have new vectorization context");
+          }
+        } else {
+          LOG.info("walkStackToFindVectorizationContext " + opParent.getName() + " is not vectorized");
+        }
         --i;
       }
       return vContext;
@@ -636,14 +652,9 @@ public class Vectorizer implements Physi
           vectorOp = vectorizeOperator(op, vContext);
           opsDone.add(op);
           if (vectorOp != op) {
+            opToVectorOpMap.put(op, vectorOp);
             opsDone.add(vectorOp);
           }
-          if (vectorOp instanceof VectorizationContextRegion) {
-            VectorizationContextRegion vcRegion = (VectorizationContextRegion) vectorOp;
-            VectorizationContext vOutContext = vcRegion.getOuputVectorizationContext();
-            vContextsByOp.put(op, vOutContext);
-            scratchColumnContext.put(vOutContext.getFileKey(), vOutContext);
-          }
         }
       } catch (HiveException e) {
         throw new SemanticException(e);
@@ -663,6 +674,7 @@ public class Vectorizer implements Physi
     private final MapWork mWork;
 
     public MapWorkVectorizationNodeProcessor(MapWork mWork) {
+      super();
       this.mWork = mWork;
     }
 
@@ -671,41 +683,26 @@ public class Vectorizer implements Physi
         Object... nodeOutputs) throws SemanticException {
 
       Operator<? extends OperatorDesc> op = (Operator<? extends OperatorDesc>) nd;
-      LOG.info("MapWorkVectorizationNodeProcessor processing Operator: " + op.getName() + "...");
 
       VectorizationContext vContext = null;
 
       if (op instanceof TableScanOperator) {
-        vContext = getVectorizationContext(op, physicalContext);
-        for (String onefile : mWork.getPathToAliases().keySet()) {
-          List<String> aliases = mWork.getPathToAliases().get(onefile);
-          for (String alias : aliases) {
-            Operator<? extends OperatorDesc> opRoot = mWork.getAliasToWork().get(alias);
-            if (op == opRoot) {
-              // The same vectorization context is copied multiple times into
-              // the MapWork scratch columnMap
-              // Each partition gets a copy
-              //
-              vContext.setFileKey(onefile);
-              scratchColumnContext.put(onefile, vContext);
-              if (LOG.isDebugEnabled()) {
-                LOG.debug("Vectorized MapWork operator " + op.getName() + " vectorization context " + vContext.toString());
-              }
-              break;
-            }
-          }
+        if (taskVectorizationContext == null) {
+          taskVectorizationContext = getVectorizationContext(op.getSchema(), op.getName(),
+                  taskColumnTypeNameMap);
         }
-        vContextsByOp.put(op, vContext);
+        vContext = taskVectorizationContext;
       } else {
+        LOG.info("MapWorkVectorizationNodeProcessor process going to walk the operator stack to get vectorization context for " + op.getName());
         vContext = walkStackToFindVectorizationContext(stack, op);
         if (vContext == null) {
-          throw new SemanticException(
-              String.format("Did not find vectorization context for operator %s in operator stack",
-                      op.getName()));
+          // No operator has "pushed" a new context -- so use the task vectorization context.
+          vContext = taskVectorizationContext;
         }
       }
 
       assert vContext != null;
+      LOG.info("MapWorkVectorizationNodeProcessor process operator " + op.getName() + " using vectorization context" + vContext.toString());
 
       // When Vectorized GROUPBY outputs rows instead of vectorized row batchs, we don't
       // vectorize the operators below it.
@@ -720,9 +717,10 @@ public class Vectorizer implements Physi
       Operator<? extends OperatorDesc> vectorOp = doVectorize(op, vContext);
 
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Vectorized MapWork operator " + vectorOp.getName() + " vectorization context " + vContext.toString());
         if (vectorOp instanceof VectorizationContextRegion) {
-          LOG.debug("Vectorized MapWork operator " + vectorOp.getName() + " added vectorization context " + vContext.toString());
+          VectorizationContextRegion vcRegion = (VectorizationContextRegion) vectorOp;
+          VectorizationContext vNewContext = vcRegion.getOuputVectorizationContext();
+          LOG.debug("Vectorized MapWork operator " + vectorOp.getName() + " added vectorization context " + vNewContext.toString());
         }
       }
 
@@ -733,8 +731,7 @@ public class Vectorizer implements Physi
   class ReduceWorkVectorizationNodeProcessor extends VectorizationNodeProcessor {
 
     private final List<String> reduceColumnNames;
-
-    private VectorizationContext reduceShuffleVectorizationContext;
+    private final List<TypeInfo> reduceTypeInfos;
 
     private Operator<? extends OperatorDesc> rootVectorOp;
 
@@ -742,10 +739,12 @@ public class Vectorizer implements Physi
       return rootVectorOp;
     }
 
-    public ReduceWorkVectorizationNodeProcessor(List<String> reduceColumnNames) {
+    public ReduceWorkVectorizationNodeProcessor(List<String> reduceColumnNames,
+            List<TypeInfo> reduceTypeInfos) {
+      super();
       this.reduceColumnNames =  reduceColumnNames;
+      this.reduceTypeInfos = reduceTypeInfos;
       rootVectorOp = null;
-      reduceShuffleVectorizationContext = null;
     }
 
     @Override
@@ -753,8 +752,6 @@ public class Vectorizer implements Physi
         Object... nodeOutputs) throws SemanticException {
 
       Operator<? extends OperatorDesc> op = (Operator<? extends OperatorDesc>) nd;
-      LOG.info("ReduceWorkVectorizationNodeProcessor processing Operator: " +
-              op.getName() + "...");
 
       VectorizationContext vContext = null;
 
@@ -763,25 +760,30 @@ public class Vectorizer implements Physi
       if (op.getParentOperators().size() == 0) {
         LOG.info("ReduceWorkVectorizationNodeProcessor process reduceColumnNames " + reduceColumnNames.toString());
 
-        vContext = new VectorizationContext(reduceColumnNames);
-        vContext.setFileKey("_REDUCE_SHUFFLE_");
-        scratchColumnContext.put("_REDUCE_SHUFFLE_", vContext);
-        reduceShuffleVectorizationContext = vContext;
+        vContext = new VectorizationContext("__Reduce_Shuffle__", reduceColumnNames);
+        taskVectorizationContext = vContext;
+        int i = 0;
+        for (TypeInfo typeInfo : reduceTypeInfos) {
+          taskColumnTypeNameMap.put(i, typeInfo.getTypeName());
+          i++;
+        }
         saveRootVectorOp = true;
 
         if (LOG.isDebugEnabled()) {
           LOG.debug("Vectorized ReduceWork reduce shuffle vectorization context " + vContext.toString());
         }
       } else {
+        LOG.info("ReduceWorkVectorizationNodeProcessor process going to walk the operator stack to get vectorization context for " + op.getName());
         vContext = walkStackToFindVectorizationContext(stack, op);
         if (vContext == null) {
           // If we didn't find a context among the operators, assume the top -- reduce shuffle's
           // vectorization context.
-          vContext = reduceShuffleVectorizationContext;
+          vContext = taskVectorizationContext;
         }
       }
 
       assert vContext != null;
+      LOG.info("ReduceWorkVectorizationNodeProcessor process operator " + op.getName() + " using vectorization context" + vContext.toString());
 
       // When Vectorized GROUPBY outputs rows instead of vectorized row batchs, we don't
       // vectorize the operators below it.
@@ -796,9 +798,10 @@ public class Vectorizer implements Physi
       Operator<? extends OperatorDesc> vectorOp = doVectorize(op, vContext);
 
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Vectorized ReduceWork operator " + vectorOp.getName() + " vectorization context " + vContext.toString());
         if (vectorOp instanceof VectorizationContextRegion) {
-          LOG.debug("Vectorized ReduceWork operator " + vectorOp.getName() + " added vectorization context " + vContext.toString());
+          VectorizationContextRegion vcRegion = (VectorizationContextRegion) vectorOp;
+          VectorizationContext vNewContext = vcRegion.getOuputVectorizationContext();
+          LOG.debug("Vectorized ReduceWork operator " + vectorOp.getName() + " added vectorization context " + vNewContext.toString());
         }
       }
       if (vectorOp instanceof VectorGroupByOperator) {
@@ -816,7 +819,7 @@ public class Vectorizer implements Physi
 
   private static class ValidatorVectorizationContext extends VectorizationContext {
     private ValidatorVectorizationContext() {
-      super();
+      super("No Name");
     }
 
     @Override
@@ -831,25 +834,27 @@ public class Vectorizer implements Physi
   }
 
   @Override
-  public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException {
-    this.physicalContext  = pctx;
-    boolean vectorPath = HiveConf.getBoolVar(pctx.getConf(),
+  public PhysicalContext resolve(PhysicalContext physicalContext) throws SemanticException {
+    this.physicalContext  = physicalContext;
+    hiveConf = physicalContext.getConf();
+
+    boolean vectorPath = HiveConf.getBoolVar(hiveConf,
         HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED);
     if (!vectorPath) {
       LOG.info("Vectorization is disabled");
-      return pctx;
+      return physicalContext;
     }
     // create dispatcher and graph walker
-    Dispatcher disp = new VectorizationDispatcher(pctx);
+    Dispatcher disp = new VectorizationDispatcher(physicalContext);
     TaskGraphWalker ogw = new TaskGraphWalker(disp);
 
     // get all the tasks nodes from root task
     ArrayList<Node> topNodes = new ArrayList<Node>();
-    topNodes.addAll(pctx.getRootTasks());
+    topNodes.addAll(physicalContext.getRootTasks());
 
     // begin to walk through the task tree.
     ogw.startWalking(topNodes, null);
-    return pctx;
+    return physicalContext;
   }
 
   boolean validateMapWorkOperator(Operator<? extends OperatorDesc> op, MapWork mWork, boolean isTez) {
@@ -901,7 +906,7 @@ public class Vectorizer implements Physi
         }
         break;
       case GROUPBY:
-        if (HiveConf.getBoolVar(physicalContext.getConf(),
+        if (HiveConf.getBoolVar(hiveConf,
                     HiveConf.ConfVars.HIVE_VECTORIZATION_REDUCE_GROUPBY_ENABLED)) {
           ret = validateGroupByOperator((GroupByOperator) op, true, true);
         } else {
@@ -1263,20 +1268,24 @@ public class Vectorizer implements Physi
     return supportedDataTypesPattern.matcher(type.toLowerCase()).matches();
   }
 
-  private VectorizationContext getVectorizationContext(Operator op,
-      PhysicalContext pctx) {
-    RowSchema rs = op.getSchema();
+  private VectorizationContext getVectorizationContext(RowSchema rowSchema, String contextName,
+    Map<Integer, String> typeNameMap) {
+
+    VectorizationContext vContext = new VectorizationContext(contextName);
 
     // Add all non-virtual columns to make a vectorization context for
     // the TableScan operator.
-    VectorizationContext vContext = new VectorizationContext();
-    for (ColumnInfo c : rs.getSignature()) {
+    int i = 0;
+    for (ColumnInfo c : rowSchema.getSignature()) {
       // Earlier, validation code should have eliminated virtual columns usage (HIVE-5560).
       if (!isVirtualColumn(c)) {
         vContext.addInitialColumn(c.getInternalName());
+        typeNameMap.put(i, c.getTypeName());
+        i++;
       }
     }
     vContext.finishedAddingInitialColumns();
+
     return vContext;
   }
 
@@ -1334,40 +1343,14 @@ public class Vectorizer implements Physi
     return false;
   }
 
-  public void debugDisplayAllMaps(Map<String, Map<String, Integer>> allColumnVectorMaps,
-          Map<String, Map<Integer, String>> allScratchColumnVectorTypeMaps) {
+  public void debugDisplayAllMaps(BaseWork work) {
 
-    // Context keys grow in length since they are a path...
-    Comparator<String> comparerShorterString = new Comparator<String>() {
-      @Override
-      public int compare(String o1, String o2) {
-        Integer length1 = o1.length();
-        Integer length2 = o2.length();
-        return length1.compareTo(length2);
-      }};
-
-    Comparator<Integer> comparerInteger = new Comparator<Integer>() {
-      @Override
-      public int compare(Integer o1, Integer o2) {
-        return o1.compareTo(o2);
-      }};
-
-    Map<String, Map<Integer, String>> sortedAllColumnVectorMaps = new TreeMap<String, Map<Integer, String>>(comparerShorterString);
-    for (Map.Entry<String, Map<String, Integer>> entry : allColumnVectorMaps.entrySet()) {
-      Map<Integer, String> sortedColumnMap = new TreeMap<Integer, String>(comparerInteger);
-      for (Map.Entry<String, Integer> innerEntry : entry.getValue().entrySet()) {
-        sortedColumnMap.put(innerEntry.getValue(), innerEntry.getKey());
-      }
-      sortedAllColumnVectorMaps.put(entry.getKey(), sortedColumnMap);
-    }
-    LOG.debug("sortedAllColumnVectorMaps " + sortedAllColumnVectorMaps);
-
-    Map<String, Map<Integer, String>> sortedAllScratchColumnVectorTypeMap = new TreeMap<String, Map<Integer, String>>(comparerShorterString);
-    for (Map.Entry<String, Map<Integer, String>> entry : allScratchColumnVectorTypeMaps.entrySet()) {
-      Map<Integer, String> sortedScratchColumnTypeMap = new TreeMap<Integer, String>(comparerInteger);
-      sortedScratchColumnTypeMap.putAll(entry.getValue());
-      sortedAllScratchColumnVectorTypeMap.put(entry.getKey(), sortedScratchColumnTypeMap);
-    }
-    LOG.debug("sortedAllScratchColumnVectorTypeMap " + sortedAllScratchColumnVectorTypeMap);
+    Map<String, Integer> columnNameMap = work.getVectorColumnNameMap();
+    Map<Integer, String> columnTypeMap = work.getVectorColumnTypeMap();
+    Map<Integer, String> scratchColumnTypeMap = work.getVectorScratchColumnTypeMap();
+
+    LOG.debug("debugDisplayAllMaps columnNameMap " + columnNameMap.toString());
+    LOG.debug("debugDisplayAllMaps columnTypeMap " + columnTypeMap.toString());
+    LOG.debug("debugDisplayAllMaps scratchColumnTypeMap " + scratchColumnTypeMap.toString());
   }
 }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java Wed Apr 15 22:04:00 2015
@@ -196,18 +196,13 @@ public class PartitionPruner implements
     // Remove all parts that are not partition columns. See javadoc for details.
     ExprNodeDesc compactExpr = compactExpr(prunerExpr.clone());
     String oldFilter = prunerExpr.getExprString();
-    if (isBooleanExpr(compactExpr)) {
-    	// For null and true values, return every partition
-    	if (!isFalseExpr(compactExpr)) {
-    		// Non-strict mode, and all the predicates are on non-partition columns - get everything.
-        if (LOG.isDebugEnabled()) {
-    		  LOG.debug("Filter " + oldFilter + " was null after compacting");
-        }
-    		return getAllPartsFromCacheOrServer(tab, key, true, prunedPartitionsMap);
-    	} else {
-    		return new PrunedPartitionList(tab, new LinkedHashSet<Partition>(new ArrayList<Partition>()),
-    				new ArrayList<String>(), false);
-    	}
+    if (compactExpr == null || isBooleanExpr(compactExpr)) {
+      if (isFalseExpr(compactExpr)) {
+        return new PrunedPartitionList(
+            tab, new LinkedHashSet<Partition>(0), new ArrayList<String>(0), false);
+      }
+      // For null and true values, return every partition
+      return getAllPartsFromCacheOrServer(tab, key, true, prunedPartitionsMap);
     }
     if (LOG.isDebugEnabled()) {
       LOG.debug("Filter w/ compacting: " + compactExpr.getExprString()
@@ -241,22 +236,22 @@ public class PartitionPruner implements
     partsCache.put(key, ppList);
     return ppList;
   }
-  
+
   static private boolean isBooleanExpr(ExprNodeDesc expr) {
-	  return  expr != null && expr instanceof ExprNodeConstantDesc && 
+    return  expr != null && expr instanceof ExprNodeConstantDesc &&
               ((ExprNodeConstantDesc)expr).getTypeInfo() instanceof PrimitiveTypeInfo &&
               ((PrimitiveTypeInfo)(((ExprNodeConstantDesc)expr).getTypeInfo())).
-              getTypeName().equals(serdeConstants.BOOLEAN_TYPE_NAME);	  
+              getTypeName().equals(serdeConstants.BOOLEAN_TYPE_NAME);
   }
   static private boolean isTrueExpr(ExprNodeDesc expr) {
-      return  isBooleanExpr(expr) &&  
-              ((ExprNodeConstantDesc)expr).getValue() != null &&
-              ((ExprNodeConstantDesc)expr).getValue().equals(Boolean.TRUE);
+      return  isBooleanExpr(expr) &&
+          ((ExprNodeConstantDesc)expr).getValue() != null &&
+          ((ExprNodeConstantDesc)expr).getValue().equals(Boolean.TRUE);
   }
   static private boolean isFalseExpr(ExprNodeDesc expr) {
-      return  isBooleanExpr(expr) && 
+      return  isBooleanExpr(expr) &&
               ((ExprNodeConstantDesc)expr).getValue() != null &&
-              ((ExprNodeConstantDesc)expr).getValue().equals(Boolean.FALSE);	  
+              ((ExprNodeConstantDesc)expr).getValue().equals(Boolean.FALSE);
   }
 
   /**
@@ -268,42 +263,48 @@ public class PartitionPruner implements
    */
   static private ExprNodeDesc compactExpr(ExprNodeDesc expr) {
     // If this is a constant boolean expression, return the value.
-	if (expr == null) {
-		return null;
-	}
-	if (expr instanceof ExprNodeConstantDesc) {
-      if (isBooleanExpr(expr)) {
-        return expr;
-      } else {
-        throw new IllegalStateException("Unexpected non-null ExprNodeConstantDesc: "
-          + expr.getExprString());
+    if (expr == null) {
+      return null;
+    }
+    if (expr instanceof ExprNodeConstantDesc) {
+      if (((ExprNodeConstantDesc)expr).getValue() == null) return null;
+      if (!isBooleanExpr(expr)) {
+        throw new IllegalStateException("Unexpected non-boolean ExprNodeConstantDesc: "
+            + expr.getExprString());
       }
+      return expr;
     } else if (expr instanceof ExprNodeGenericFuncDesc) {
       GenericUDF udf = ((ExprNodeGenericFuncDesc)expr).getGenericUDF();
       boolean isAnd = udf instanceof GenericUDFOPAnd;
       boolean isOr = udf instanceof GenericUDFOPOr;
-      
+
       if (isAnd || isOr) {
         List<ExprNodeDesc> children = expr.getChildren();
-        ExprNodeDesc left = children.get(0);
-        children.set(0, compactExpr(left));
-        ExprNodeDesc right = children.get(1);
-        children.set(1, compactExpr(right));
-
-        if (isTrueExpr(children.get(0)) && isTrueExpr(children.get(1))) {
-        	return new ExprNodeConstantDesc(Boolean.TRUE);
-        } else if (isTrueExpr(children.get(0)))  {
-        	return isAnd ? children.get(1) :  new ExprNodeConstantDesc(Boolean.TRUE);
-        } else if (isTrueExpr(children.get(1))) {
-        	return isAnd ? children.get(0) : new ExprNodeConstantDesc(Boolean.TRUE);
-        } else if (isFalseExpr(children.get(0)) && isFalseExpr(children.get(1))) {
-        	return new ExprNodeConstantDesc(Boolean.FALSE);
-        } else if (isFalseExpr(children.get(0)))  {
-            return isAnd ? new ExprNodeConstantDesc(Boolean.FALSE) : children.get(1);
-        } else if (isFalseExpr(children.get(1))) {
-            return isAnd ? new ExprNodeConstantDesc(Boolean.FALSE) : children.get(0);
-        } 
-        
+        ExprNodeDesc left = compactExpr(children.get(0));
+        ExprNodeDesc right = compactExpr(children.get(1));
+        // Non-partition expressions are converted to nulls.
+        if (left == null && right == null) {
+          return null;
+        } else if (left == null) {
+          return isAnd ? right : null;
+        } else if (right == null) {
+          return isAnd ? left : null;
+        }
+        // Handle boolean expressions
+        boolean isLeftFalse = isFalseExpr(left), isRightFalse = isFalseExpr(right),
+            isLeftTrue = isTrueExpr(left), isRightTrue = isTrueExpr(right);
+        if ((isRightTrue && isLeftTrue) || (isOr && (isLeftTrue || isRightTrue))) {
+          return new ExprNodeConstantDesc(Boolean.TRUE);
+        } else if ((isRightFalse && isLeftFalse) || (isAnd && (isLeftFalse || isRightFalse))) {
+          return new ExprNodeConstantDesc(Boolean.FALSE);
+        } else if ((isAnd && isLeftTrue) || (isOr && isLeftFalse)) {
+          return right;
+        } else if ((isAnd && isRightTrue) || (isOr && isRightFalse)) {
+          return left;
+        }
+        // Nothing to compact, update expr with compacted children.
+        children.set(0, left);
+        children.set(1, right);
       }
       return expr;
     } else {
@@ -328,9 +329,9 @@ public class PartitionPruner implements
       if (!partCols.contains(column)) {
         // Column doesn't appear to be a partition column for the table.
         return new ExprNodeConstantDesc(expr.getTypeInfo(), null);
-      } 
+      }
       referred.add(column);
-    }	        
+    }
     if (expr instanceof ExprNodeGenericFuncDesc) {
       List<ExprNodeDesc> children = expr.getChildren();
       for (int i = 0; i < children.size(); ++i) {

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java Wed Apr 15 22:04:00 2015
@@ -26,8 +26,10 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.ListBucketingCtx;
 import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
-@Explain(displayName = "Alter Table Partition Merge Files")
+
+@Explain(displayName = "Alter Table Partition Merge Files", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class AlterTablePartMergeFilesDesc {
 
   private String tableName;

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java Wed Apr 15 22:04:00 2015
@@ -121,6 +121,7 @@ import org.apache.hadoop.hive.ql.optimiz
 import org.apache.hadoop.hive.ql.optimizer.calcite.HiveTypeSystemImpl;
 import org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable;
 import org.apache.hadoop.hive.ql.optimizer.calcite.TraitsUtil;
+import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException.UnsupportedFeature;
 import org.apache.hadoop.hive.ql.optimizer.calcite.cost.HiveVolcanoPlanner;
 import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveAggregate;
 import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFilter;
@@ -249,13 +250,29 @@ public class CalcitePlanner extends Sema
           disableJoinMerge = false;
           sinkOp = genPlan(getQB());
           LOG.info("CBO Succeeded; optimized logical plan.");
+          this.ctx.setCboInfo("Plan optimized by CBO.");
           LOG.debug(newAST.dump());
         } catch (Exception e) {
           boolean isMissingStats = noColsMissingStats.get() > 0;
           if (isMissingStats) {
             LOG.error("CBO failed due to missing column stats (see previous errors), skipping CBO");
+            this.ctx
+                .setCboInfo("Plan not optimized by CBO due to missing statistics. Please check log for more details.");
           } else {
             LOG.error("CBO failed, skipping CBO. ", e);
+            if (e instanceof CalciteSemanticException) {
+              CalciteSemanticException calciteSemanticException = (CalciteSemanticException) e;
+              UnsupportedFeature unsupportedFeature = calciteSemanticException
+                  .getUnsupportedFeature();
+              if (unsupportedFeature != null) {
+                this.ctx.setCboInfo("Plan not optimized by CBO due to missing feature ["
+                    + unsupportedFeature + "].");
+              } else {
+                this.ctx.setCboInfo("Plan not optimized by CBO.");
+              }
+            } else {
+              this.ctx.setCboInfo("Plan not optimized by CBO.");
+            }
           }
           if (!conf.getBoolVar(ConfVars.HIVE_IN_TEST) || isMissingStats
               || e instanceof CalciteSemanticException) {
@@ -280,6 +297,7 @@ public class CalcitePlanner extends Sema
           }
         }
       } else {
+        this.ctx.setCboInfo("Plan not optimized by CBO.");
         skipCalcitePlan = true;
       }
     }
@@ -1081,7 +1099,7 @@ public class CalcitePlanner extends Sema
         String msg = String.format("UNIQUE JOIN is currently not supported in CBO,"
             + " turn off cbo to use UNIQUE JOIN.");
         LOG.debug(msg);
-        throw new CalciteSemanticException(msg);
+        throw new CalciteSemanticException(msg, UnsupportedFeature.Unique_join);
       }
 
       // 1. Determine Join Type
@@ -1164,7 +1182,7 @@ public class CalcitePlanner extends Sema
               + " Currently we don't support Table Sample clauses in CBO,"
               + " turn off cbo for queries on tableSamples.", tableAlias);
           LOG.debug(msg);
-          throw new CalciteSemanticException(msg);
+          throw new CalciteSemanticException(msg, UnsupportedFeature.Table_sample_clauses);
         }
 
         // 2. Get Table Metadata
@@ -1261,7 +1279,8 @@ public class CalcitePlanner extends Sema
         // fail on compile time
         // for such queries, its an arcane corner case, not worth of adding that
         // complexity.
-        throw new CalciteSemanticException("Filter expression with non-boolean return type.");
+        throw new CalciteSemanticException("Filter expression with non-boolean return type.",
+            UnsupportedFeature.Filter_expression_with_non_boolean_return_type);
       }
       ImmutableMap<String, Integer> hiveColNameCalcitePosMap = this.relToHiveColNameCalcitePosMap
           .get(srcRel);
@@ -1772,7 +1791,8 @@ public class CalcitePlanner extends Sema
                 grpbyExpr, new TypeCheckCtx(groupByInputRowResolver));
             ExprNodeDesc grpbyExprNDesc = astToExprNDescMap.get(grpbyExpr);
             if (grpbyExprNDesc == null)
-              throw new CalciteSemanticException("Invalid Column Reference: " + grpbyExpr.dump());
+              throw new CalciteSemanticException("Invalid Column Reference: " + grpbyExpr.dump(),
+                  UnsupportedFeature.Invalid_column_reference);
 
             addToGBExpr(groupByOutputRowResolver, groupByInputRowResolver, grpbyExpr,
                 grpbyExprNDesc, gbExprNDescLst, outputColumnNames);
@@ -1960,7 +1980,8 @@ public class CalcitePlanner extends Sema
           RowResolver obSyntheticProjectRR = new RowResolver();
           if (!RowResolver.add(obSyntheticProjectRR, inputRR)) {
             throw new CalciteSemanticException(
-                "Duplicates detected when adding columns to RR: see previous message");
+                "Duplicates detected when adding columns to RR: see previous message",
+                UnsupportedFeature.Duplicates_in_RR);
           }
           int vcolPos = inputRR.getRowSchema().getSignature().size();
           for (Pair<ASTNode, TypeInfo> astTypePair : vcASTTypePairs) {
@@ -1975,20 +1996,23 @@ public class CalcitePlanner extends Sema
           if (outermostOB) {
             if (!RowResolver.add(outputRR, inputRR)) {
               throw new CalciteSemanticException(
-                  "Duplicates detected when adding columns to RR: see previous message");
+                  "Duplicates detected when adding columns to RR: see previous message",
+                  UnsupportedFeature.Duplicates_in_RR);
             }
 
           } else {
             if (!RowResolver.add(outputRR, obSyntheticProjectRR)) {
               throw new CalciteSemanticException(
-                  "Duplicates detected when adding columns to RR: see previous message");
+                  "Duplicates detected when adding columns to RR: see previous message",
+                  UnsupportedFeature.Duplicates_in_RR);
             }
             originalOBChild = srcRel;
           }
         } else {
           if (!RowResolver.add(outputRR, inputRR)) {
             throw new CalciteSemanticException(
-                "Duplicates detected when adding columns to RR: see previous message");
+                "Duplicates detected when adding columns to RR: see previous message",
+                UnsupportedFeature.Duplicates_in_RR);
           }
         }
 
@@ -2026,7 +2050,8 @@ public class CalcitePlanner extends Sema
         RowResolver outputRR = new RowResolver();
         if (!RowResolver.add(outputRR, relToHiveRR.get(srcRel))) {
           throw new CalciteSemanticException(
-              "Duplicates detected when adding columns to RR: see previous message");
+              "Duplicates detected when adding columns to RR: see previous message",
+              UnsupportedFeature.Duplicates_in_RR);
         }
         ImmutableMap<String, Integer> hiveColNameCalcitePosMap = buildHiveToCalciteColumnMap(
             outputRR, sortRel);
@@ -2317,7 +2342,7 @@ public class CalcitePlanner extends Sema
         String msg = String.format("Hint specified for %s."
             + " Currently we don't support hints in CBO, turn off cbo to use hints.", hint);
         LOG.debug(msg);
-        throw new CalciteSemanticException(msg);
+        throw new CalciteSemanticException(msg, UnsupportedFeature.Hint);
       }
 
       // 4. Bailout if select involves Transform
@@ -2326,7 +2351,7 @@ public class CalcitePlanner extends Sema
         String msg = String.format("SELECT TRANSFORM is currently not supported in CBO,"
             + " turn off cbo to use TRANSFORM.");
         LOG.debug(msg);
-        throw new CalciteSemanticException(msg);
+        throw new CalciteSemanticException(msg, UnsupportedFeature.Select_transform);
       }
 
       // 5. Bailout if select involves UDTF
@@ -2339,7 +2364,7 @@ public class CalcitePlanner extends Sema
           String msg = String.format("UDTF " + funcName + " is currently not supported in CBO,"
               + " turn off cbo to use UDTF " + funcName);
           LOG.debug(msg);
-          throw new CalciteSemanticException(msg);
+          throw new CalciteSemanticException(msg, UnsupportedFeature.UDTF);
         }
       }
 
@@ -2408,7 +2433,8 @@ public class CalcitePlanner extends Sema
         } else if (expr.toStringTree().contains("TOK_FUNCTIONDI")
             && !(srcRel instanceof HiveAggregate)) {
           // Likely a malformed query eg, select hash(distinct c1) from t1;
-          throw new CalciteSemanticException("Distinct without an aggreggation.");
+          throw new CalciteSemanticException("Distinct without an aggreggation.",
+              UnsupportedFeature.Distinct_without_an_aggreggation);
         } else {
           // Case when this is an expression
           TypeCheckCtx tcCtx = new TypeCheckCtx(inputRR);
@@ -2427,7 +2453,8 @@ public class CalcitePlanner extends Sema
               .isSkewedCol() : false);
           if (!out_rwsch.putWithCheck(tabAlias, colAlias, null, colInfo)) {
             throw new CalciteSemanticException("Cannot add column to RR: " + tabAlias + "."
-                + colAlias + " => " + colInfo + " due to duplication, see previous warnings");
+                + colAlias + " => " + colInfo + " due to duplication, see previous warnings",
+                UnsupportedFeature.Duplicates_in_RR);
           }
 
           if (exp instanceof ExprNodeColumnDesc) {
@@ -2491,7 +2518,7 @@ public class CalcitePlanner extends Sema
         if (LOG.isDebugEnabled()) {
           LOG.debug(msg + " because it: " + reason);
         }
-        throw new CalciteSemanticException(msg);
+        throw new CalciteSemanticException(msg, UnsupportedFeature.Subquery);
       }
 
       // 1. Build Rel For Src (SubQuery, TS, Join)
@@ -2520,7 +2547,7 @@ public class CalcitePlanner extends Sema
         // table
         // So, for now lets just disable this. Anyway there is nothing much to
         // optimize in such cases.
-        throw new CalciteSemanticException("Unsupported");
+        throw new CalciteSemanticException("Unsupported", UnsupportedFeature.Others);
 
       }
       // 1.3 process join
@@ -2628,7 +2655,8 @@ public class CalcitePlanner extends Sema
       if (havingClause != null) {
         if (!(srcRel instanceof HiveAggregate)) {
           // ill-formed query like select * from t1 having c1 > 0;
-          throw new CalciteSemanticException("Having clause without any group-by.");
+          throw new CalciteSemanticException("Having clause without any group-by.",
+              UnsupportedFeature.Having_clause_without_any_groupby);
         }
         validateNoHavingReferenceToAlias(qb, (ASTNode) havingClause.getChild(0));
         gbFilter = genFilterRelNode(qb, (ASTNode) havingClause.getChild(0), srcRel, aliasToRel,
@@ -2690,7 +2718,7 @@ public class CalcitePlanner extends Sema
               + " This non standard behavior is not supported with cbo on."
               + " Turn off cbo for these queries.", aliasToCheck, havingClause);
           LOG.debug(msg);
-          throw new CalciteSemanticException(msg);
+          throw new CalciteSemanticException(msg, UnsupportedFeature.Select_alias_in_having_clause);
         }
       }
 
@@ -2726,7 +2754,7 @@ public class CalcitePlanner extends Sema
         String msg = String.format("Multi Insert is currently not supported in CBO,"
             + " turn off cbo to use Multi Insert.");
         LOG.debug(msg);
-        throw new CalciteSemanticException(msg);
+        throw new CalciteSemanticException(msg, UnsupportedFeature.Multi_insert);
       }
       return qbp;
     }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java Wed Apr 15 22:04:00 2015
@@ -36,6 +36,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde.serdeConstants;
 
 /**
  * ColumnStatsSemanticAnalyzer.
@@ -186,15 +187,7 @@ public class ColumnStatsSemanticAnalyzer
         } else {
           whereClause.append(" and ");
         }
-        whereClause.append(partKey);
-        whereClause.append(" = ");
-        if (getColTypeOf(partKey).equalsIgnoreCase("string")) {
-          whereClause.append("'");
-        }
-        whereClause.append(value);
-        if (getColTypeOf(partKey).equalsIgnoreCase("string")) {
-          whereClause.append("'");
-        }
+        whereClause.append(partKey).append(" = ").append(genPartValueString(partKey, value));
       }
     }
 
@@ -211,11 +204,39 @@ public class ColumnStatsSemanticAnalyzer
     return predPresent ? whereClause.append(groupByClause) : groupByClause;
   }
 
+  private String genPartValueString (String partKey, String partVal) throws SemanticException {
+    String returnVal = partVal;
+    String partColType = getColTypeOf(partKey);
+    if (partColType.equals(serdeConstants.STRING_TYPE_NAME) ||
+        partColType.contains(serdeConstants.VARCHAR_TYPE_NAME) ||
+        partColType.contains(serdeConstants.CHAR_TYPE_NAME)) {
+      returnVal = "'" + partVal + "'";
+    } else if (partColType.equals(serdeConstants.TINYINT_TYPE_NAME)) {
+      returnVal = partVal+"Y";
+    } else if (partColType.equals(serdeConstants.SMALLINT_TYPE_NAME)) {
+      returnVal = partVal+"S";
+    } else if (partColType.equals(serdeConstants.INT_TYPE_NAME)) {
+      returnVal = partVal;
+    } else if (partColType.equals(serdeConstants.BIGINT_TYPE_NAME)) {
+      returnVal = partVal+"L";
+    } else if (partColType.contains(serdeConstants.DECIMAL_TYPE_NAME)) {
+      returnVal = partVal + "BD";
+    } else if (partColType.equals(serdeConstants.DATE_TYPE_NAME) ||
+        partColType.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) {
+      returnVal = partColType + " '" + partVal + "'";
+    } else {
+      //for other usually not used types, just quote the value
+      returnVal = "'" + partVal + "'";
+    }
+    
+    return returnVal;
+  }
+  
   private String getColTypeOf (String partKey) throws SemanticException{
 
     for (FieldSchema fs : tbl.getPartitionKeys()) {
       if (partKey.equalsIgnoreCase(fs.getName())) {
-        return fs.getType();
+        return fs.getType().toLowerCase();
       }
     }
     throw new SemanticException ("Unknown partition key : " + partKey);

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Wed Apr 15 22:04:00 2015
@@ -186,6 +186,8 @@ public class DDLSemanticAnalyzer extends
     TokenToTypeName.put(HiveParser.TOK_DATE, serdeConstants.DATE_TYPE_NAME);
     TokenToTypeName.put(HiveParser.TOK_DATETIME, serdeConstants.DATETIME_TYPE_NAME);
     TokenToTypeName.put(HiveParser.TOK_TIMESTAMP, serdeConstants.TIMESTAMP_TYPE_NAME);
+    TokenToTypeName.put(HiveParser.TOK_INTERVAL_YEAR_MONTH, serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME);
+    TokenToTypeName.put(HiveParser.TOK_INTERVAL_DAY_TIME, serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME);
     TokenToTypeName.put(HiveParser.TOK_DECIMAL, serdeConstants.DECIMAL_TYPE_NAME);
   }
 

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java Wed Apr 15 22:04:00 2015
@@ -91,6 +91,8 @@ public class ExplainSemanticAnalyzer ext
       pCtx = ((SemanticAnalyzer)sem).getParseContext();
     }
 
+    boolean userLevelExplain = !extended && !formatted && !dependency && !logical && !authorize
+        && HiveConf.getBoolVar(ctx.getConf(), HiveConf.ConfVars.HIVE_EXPLAIN_USER);
     ExplainWork work = new ExplainWork(ctx.getResFile(),
         pCtx,
         tasks,
@@ -101,7 +103,9 @@ public class ExplainSemanticAnalyzer ext
         formatted,
         dependency,
         logical,
-        authorize);
+        authorize,
+        userLevelExplain,
+        ctx.getCboInfo());
 
     work.setAppendTaskType(
         HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVEEXPLAINDEPENDENCYAPPENDTASKTYPES));

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Apr 15 22:04:00 2015
@@ -115,6 +115,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
 import org.apache.hadoop.hive.ql.optimizer.Optimizer;
 import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException;
+import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException.UnsupportedFeature;
 import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec.SpecType;
 import org.apache.hadoop.hive.ql.parse.CalcitePlanner.ASTSearcher;
@@ -2995,7 +2996,8 @@ public class SemanticAnalyzer extends Ba
         if (ensureUniqueCols) {
           if (!output.putWithCheck(tmp[0], tmp[1], null, oColInfo)) {
             throw new CalciteSemanticException("Cannot add column to RR: " + tmp[0] + "." + tmp[1]
-                + " => " + oColInfo + " due to duplication, see previous warnings");
+                + " => " + oColInfo + " due to duplication, see previous warnings",
+                UnsupportedFeature.Duplicates_in_RR);
           }
         } else {
           output.put(tmp[0], tmp[1], oColInfo);
@@ -11011,9 +11013,10 @@ public class SemanticAnalyzer extends Ba
 
   // Process the position alias in GROUPBY and ORDERBY
   private void processPositionAlias(ASTNode ast) throws SemanticException {
+    boolean isByPos = false;
     if (HiveConf.getBoolVar(conf,
-          HiveConf.ConfVars.HIVE_GROUPBY_ORDERBY_POSITION_ALIAS) == false) {
-      return;
+          HiveConf.ConfVars.HIVE_GROUPBY_ORDERBY_POSITION_ALIAS) == true) {
+      isByPos = true;
     }
 
     if (ast.getChildCount()  == 0) {
@@ -11047,15 +11050,20 @@ public class SemanticAnalyzer extends Ba
         for (int child_pos = 0; child_pos < groupbyNode.getChildCount(); ++child_pos) {
           ASTNode node = (ASTNode) groupbyNode.getChild(child_pos);
           if (node.getToken().getType() == HiveParser.Number) {
-            int pos = Integer.parseInt(node.getText());
-            if (pos > 0 && pos <= selectExpCnt) {
-              groupbyNode.setChild(child_pos,
-                selectNode.getChild(pos - 1).getChild(0));
+            if (isByPos) {
+              int pos = Integer.parseInt(node.getText());
+              if (pos > 0 && pos <= selectExpCnt) {
+                groupbyNode.setChild(child_pos,
+                  selectNode.getChild(pos - 1).getChild(0));
+              } else {
+                throw new SemanticException(
+                  ErrorMsg.INVALID_POSITION_ALIAS_IN_GROUPBY.getMsg(
+                  "Position alias: " + pos + " does not exist\n" +
+                  "The Select List is indexed from 1 to " + selectExpCnt));
+              }
             } else {
-              throw new SemanticException(
-                ErrorMsg.INVALID_POSITION_ALIAS_IN_GROUPBY.getMsg(
-                "Position alias: " + pos + " does not exist\n" +
-                "The Select List is indexed from 1 to " + selectExpCnt));
+              warn("Using constant number  " + node.getText() +
+                " in group by. If you try to use position alias when hive.groupby.orderby.position.alias is false, the position alias will be ignored.");
             }
           }
         }
@@ -11074,19 +11082,24 @@ public class SemanticAnalyzer extends Ba
           ASTNode colNode = (ASTNode) orderbyNode.getChild(child_pos);
           ASTNode node = (ASTNode) colNode.getChild(0);
           if (node.getToken().getType() == HiveParser.Number) {
-            if (!isAllCol) {
-              int pos = Integer.parseInt(node.getText());
-              if (pos > 0 && pos <= selectExpCnt) {
-                colNode.setChild(0, selectNode.getChild(pos - 1).getChild(0));
+            if( isByPos ) {
+              if (!isAllCol) {
+                int pos = Integer.parseInt(node.getText());
+                if (pos > 0 && pos <= selectExpCnt) {
+                  colNode.setChild(0, selectNode.getChild(pos - 1).getChild(0));
+                } else {
+                  throw new SemanticException(
+                    ErrorMsg.INVALID_POSITION_ALIAS_IN_ORDERBY.getMsg(
+                    "Position alias: " + pos + " does not exist\n" +
+                    "The Select List is indexed from 1 to " + selectExpCnt));
+                }
               } else {
                 throw new SemanticException(
-                  ErrorMsg.INVALID_POSITION_ALIAS_IN_ORDERBY.getMsg(
-                  "Position alias: " + pos + " does not exist\n" +
-                  "The Select List is indexed from 1 to " + selectExpCnt));
+                  ErrorMsg.NO_SUPPORTED_ORDERBY_ALLCOLREF_POS.getMsg());
               }
-            } else {
-              throw new SemanticException(
-                ErrorMsg.NO_SUPPORTED_ORDERBY_ALLCOLREF_POS.getMsg());
+            } else { //if not using position alias and it is a number.
+              warn("Using constant number " + node.getText() +
+                " in order by. If you try to use position alias when hive.groupby.orderby.position.alias is false, the position alias will be ignored.");
             }
           }
         }
@@ -12087,4 +12100,8 @@ public class SemanticAnalyzer extends Ba
       queryProperties.setOuterQueryLimit(qb.getParseInfo().getOuterQueryLimit());
     }
   }
+  private void warn(String msg) {
+    SessionState.getConsole().printInfo(
+        String.format("Warning: %s", msg));
+  }
 }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java Wed Apr 15 22:04:00 2015
@@ -140,6 +140,7 @@ public class UpdateDeleteSemanticAnalyze
     }
 
     List<FieldSchema> partCols = mTable.getPartCols();
+    List<String> bucketingCols = mTable.getBucketCols();
 
     rewrittenQueryStr.append("insert into table ");
     rewrittenQueryStr.append(getDotName(tableName));
@@ -199,7 +200,10 @@ public class UpdateDeleteSemanticAnalyze
             }
           }
         }
-
+        //updating bucket column should move row from one file to another - not supported
+        if(bucketingCols != null && bucketingCols.contains(columnName)) {
+          throw new SemanticException(ErrorMsg.UPDATE_CANNOT_UPDATE_BUCKET_VALUE,columnName);
+        }
         // This means that in UPDATE T SET x = _something_
         // _something_ can be whatever is supported in SELECT _something_
         setCols.put(columnName, (ASTNode)assignment.getChildren().get(1));

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java Wed Apr 15 22:04:00 2015
@@ -22,6 +22,7 @@ package org.apache.hadoop.hive.ql.plan;
 import java.util.Map;
 
 import org.apache.hadoop.hive.ql.exec.PTFUtils;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 public class AbstractOperatorDesc implements OperatorDesc {
 
@@ -35,7 +36,7 @@ public class AbstractOperatorDesc implem
   }
 
   @Override
-  @Explain(skipHeader = true, displayName = "Statistics")
+  @Explain(skipHeader = true, displayName = "Statistics", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public Statistics getStatistics() {
     return statistics;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java Wed Apr 15 22:04:00 2015
@@ -21,11 +21,13 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 import java.util.Map;
 
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 /**
  * AlterDatabaseDesc.
  *
  */
-@Explain(displayName = "Create Database")
+@Explain(displayName = "Create Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class AlterDatabaseDesc extends DDLDesc implements Serializable {
 
   private static final long serialVersionUID = 1L;
@@ -68,7 +70,7 @@ public class AlterDatabaseDesc extends D
     this.dbProperties = dbProps;
   }
 
-  @Explain(displayName="name")
+  @Explain(displayName="name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getDatabaseName() {
     return databaseName;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterIndexDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterIndexDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterIndexDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterIndexDesc.java Wed Apr 15 22:04:00 2015
@@ -21,11 +21,13 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 import java.util.Map;
 
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 /**
  * AlterIndexDesc.
  *
  */
-@Explain(displayName = "Alter Index")
+@Explain(displayName = "Alter Index", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class AlterIndexDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private String indexName;
@@ -53,7 +55,7 @@ public class AlterIndexDesc extends DDLD
   /**
    * @return the name of the index
    */
-  @Explain(displayName = "name")
+  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getIndexName() {
     return indexName;
   }
@@ -69,7 +71,7 @@ public class AlterIndexDesc extends DDLD
   /**
    * @return the baseTable
    */
-  @Explain(displayName = "new name")
+  @Explain(displayName = "new name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getBaseTableName() {
     return baseTable;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java Wed Apr 15 22:04:00 2015
@@ -32,12 +32,13 @@ import org.apache.hadoop.hive.ql.exec.Ut
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.ParseUtils;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 /**
  * AlterTableDesc.
  *
  */
-@Explain(displayName = "Alter Table")
+@Explain(displayName = "Alter Table", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class AlterTableDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
@@ -261,12 +262,12 @@ public class AlterTableDesc extends DDLD
     this.numberBuckets = numBuckets;
   }
 
-  @Explain(displayName = "new columns")
+  @Explain(displayName = "new columns", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public List<String> getNewColsString() {
     return Utilities.getFieldSchemaString(getNewCols());
   }
 
-  @Explain(displayName = "type")
+  @Explain(displayName = "type", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getAlterTableTypeString() {
     return op.getName();
   }
@@ -274,7 +275,7 @@ public class AlterTableDesc extends DDLD
   /**
    * @return the old name of the table
    */
-  @Explain(displayName = "old name")
+  @Explain(displayName = "old name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getOldName() {
     return oldName;
   }
@@ -290,7 +291,7 @@ public class AlterTableDesc extends DDLD
   /**
    * @return the newName
    */
-  @Explain(displayName = "new name")
+  @Explain(displayName = "new name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getNewName() {
     return newName;
   }
@@ -368,7 +369,7 @@ public class AlterTableDesc extends DDLD
   /**
    * @return the input format
    */
-  @Explain(displayName = "input format")
+  @Explain(displayName = "input format", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getInputFormat() {
     return inputFormat;
   }
@@ -384,7 +385,7 @@ public class AlterTableDesc extends DDLD
   /**
    * @return the output format
    */
-  @Explain(displayName = "output format")
+  @Explain(displayName = "output format", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getOutputFormat() {
     return outputFormat;
   }
@@ -400,7 +401,7 @@ public class AlterTableDesc extends DDLD
   /**
    * @return the storage handler
    */
-  @Explain(displayName = "storage handler")
+  @Explain(displayName = "storage handler", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getStorageHandler() {
     return storageHandler;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ArchiveWork.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ArchiveWork.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ArchiveWork.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ArchiveWork.java Wed Apr 15 22:04:00 2015
@@ -19,12 +19,14 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * ArchiveWork.
  *
  */
-@Explain(displayName = "Map Reduce")
+@Explain(displayName = "Map Reduce", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class ArchiveWork implements Serializable {
   private static final long serialVersionUID = 1L;
   private ArchiveActionType type;

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java Wed Apr 15 22:04:00 2015
@@ -29,6 +29,8 @@ import java.util.Stack;
 import org.apache.hadoop.hive.ql.exec.HashTableDummyOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * BaseWork. Base class for any "work" that's being done on the cluster. Items like stats
@@ -58,8 +60,9 @@ public abstract class BaseWork extends A
   private String name;
 
   // Vectorization.
-  protected Map<String, Map<Integer, String>> allScratchColumnVectorTypeMaps = null;
-  protected Map<String, Map<String, Integer>> allColumnVectorMaps = null;
+  protected Map<String, Integer> vectorColumnNameMap;
+  protected Map<Integer, String> vectorColumnTypeMap;
+  protected Map<Integer, String> vectorScratchColumnTypeMap;
 
   protected boolean llapMode = false;
   protected boolean uberMode = false;
@@ -144,27 +147,34 @@ public abstract class BaseWork extends A
     return returnSet;
   }
 
-  public Map<String, Map<Integer, String>> getAllScratchColumnVectorTypeMaps() {
-    return allScratchColumnVectorTypeMaps;
+  public Map<String, Integer> getVectorColumnNameMap() {
+    return vectorColumnNameMap;
+  }
+
+  public void setVectorColumnNameMap(Map<String, Integer> vectorColumnNameMap) {
+    this.vectorColumnNameMap = vectorColumnNameMap;
+  }
+
+  public Map<Integer, String> getVectorColumnTypeMap() {
+    return vectorColumnTypeMap;
   }
 
-  public void setAllScratchColumnVectorTypeMaps(
-      Map<String, Map<Integer, String>> allScratchColumnVectorTypeMaps) {
-    this.allScratchColumnVectorTypeMaps = allScratchColumnVectorTypeMaps;
+  public void setVectorColumnTypeMap(Map<Integer, String> vectorColumnTypeMap) {
+    this.vectorColumnTypeMap = vectorColumnTypeMap;
   }
 
-  public Map<String, Map<String, Integer>> getAllColumnVectorMaps() {
-    return allColumnVectorMaps;
+  public Map<Integer, String> getVectorScratchColumnTypeMap() {
+    return vectorScratchColumnTypeMap;
   }
 
-  public void setAllColumnVectorMaps(Map<String, Map<String, Integer>> allColumnVectorMaps) {
-    this.allColumnVectorMaps = allColumnVectorMaps;
+  public void setVectorScratchColumnTypeMap(Map<Integer, String> vectorScratchColumnTypeMap) {
+    this.vectorScratchColumnTypeMap = vectorScratchColumnTypeMap;
   }
 
   /**
    * @return the mapredLocalWork
    */
-  @Explain(displayName = "Local Work")
+  @Explain(displayName = "Local Work", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public MapredLocalWork getMapRedLocalWork() {
     return mrLocalWork;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/BucketMapJoinContext.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/BucketMapJoinContext.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/BucketMapJoinContext.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/BucketMapJoinContext.java Wed Apr 15 22:04:00 2015
@@ -30,6 +30,7 @@ import java.util.regex.Pattern;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.ql.exec.BucketMatcher;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 /**
  * was inner class of MapreLocalWork. context for bucket mapjoin (or smb join)
@@ -130,7 +131,7 @@ public class BucketMapJoinContext implem
     this.bucketMatcherClass = bucketMatcherClass;
   }
 
-  @Explain(displayName = "Alias Bucket File Name Mapping", normalExplain = false)
+  @Explain(displayName = "Alias Bucket File Name Mapping", explainLevels = { Level.EXTENDED })
   public Map<String, Map<String, List<String>>> getAliasBucketFileNameMapping() {
     return aliasBucketFileNameMapping;
   }
@@ -149,7 +150,7 @@ public class BucketMapJoinContext implem
     }
   }
 
-  @Explain(displayName = "Alias Bucket Base File Name Mapping", normalExplain = false)
+  @Explain(displayName = "Alias Bucket Base File Name Mapping", explainLevels = { Level.EXTENDED })
   public Map<String, Map<String, List<String>>> getAliasBucketBaseFileNameMapping() {
     return aliasBucketBaseFileNameMapping;
   }
@@ -159,7 +160,7 @@ public class BucketMapJoinContext implem
     this.aliasBucketBaseFileNameMapping = aliasBucketBaseFileNameMapping;
   }
 
-  @Explain(displayName = "Alias Bucket Output File Name Mapping", normalExplain = false)
+  @Explain(displayName = "Alias Bucket Output File Name Mapping", explainLevels = { Level.EXTENDED })
   public Map<String, Integer> getBucketFileNameMapping() {
     return bucketFileNameMapping;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java Wed Apr 15 22:04:00 2015
@@ -17,13 +17,15 @@
  */
 
 package org.apache.hadoop.hive.ql.plan;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 
 /**
  * CollectDesc.
  *
  */
-@Explain(displayName = "Collect")
+@Explain(displayName = "Collect", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class CollectDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
   Integer bufferSize;

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsDesc.java Wed Apr 15 22:04:00 2015
@@ -20,6 +20,8 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 import java.util.List;
 
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 /**
  * Contains the information needed to persist column level statistics
  */
@@ -51,7 +53,7 @@ public class ColumnStatsDesc extends DDL
     this.tableName = tableName;
   }
 
-  @Explain(displayName = "Is Table Level Stats", normalExplain=false)
+  @Explain(displayName = "Is Table Level Stats", explainLevels = { Level.EXTENDED })
   public boolean isTblLevel() {
     return isTblLevel;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsUpdateWork.java Wed Apr 15 22:04:00 2015
@@ -21,6 +21,8 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 import java.util.List;
 import java.util.Map;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * ColumnStatsUpdateWork implementation. ColumnStatsUpdateWork will persist the
@@ -30,7 +32,7 @@ import java.util.Map;
  * PARTITION(partitionId=100) UPDATE STATISTICS for column value SET
  * ('maxColLen'='4444','avgColLen'='44.4');
  */
-@Explain(displayName = "Column Stats Update Work")
+@Explain(displayName = "Column Stats Update Work", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class ColumnStatsUpdateWork implements Serializable {
   private static final long serialVersionUID = 1L;
   private ColumnStatsDesc colStats;

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsWork.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsWork.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsWork.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ColumnStatsWork.java Wed Apr 15 22:04:00 2015
@@ -21,12 +21,14 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 
 import org.apache.hadoop.hive.ql.exec.ListSinkOperator;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * ColumnStats Work.
  *
  */
-@Explain(displayName = "Column Stats Work")
+@Explain(displayName = "Column Stats Work", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class ColumnStatsWork implements Serializable {
   private static final long serialVersionUID = 1L;
   private FetchWork fWork;

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CommonMergeJoinDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CommonMergeJoinDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CommonMergeJoinDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CommonMergeJoinDesc.java Wed Apr 15 22:04:00 2015
@@ -19,8 +19,10 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
-@Explain(displayName = "Merge Join Operator")
+
+@Explain(displayName = "Merge Join Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class CommonMergeJoinDesc extends MapJoinDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private int numBuckets;

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java Wed Apr 15 22:04:00 2015
@@ -21,12 +21,13 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 /**
  * CopyWork.
  *
  */
-@Explain(displayName = "Copy")
+@Explain(displayName = "Copy", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class CopyWork implements Serializable {
   private static final long serialVersionUID = 1L;
   private Path fromPath;
@@ -46,12 +47,12 @@ public class CopyWork implements Seriali
     this.setErrorOnSrcEmpty(errorOnSrcEmpty);
   }
   
-  @Explain(displayName = "source")
+  @Explain(displayName = "source", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public Path getFromPath() {
     return fromPath;
   }
 
-  @Explain(displayName = "destination")
+  @Explain(displayName = "destination", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public Path getToPath() {
     return toPath;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateDatabaseDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateDatabaseDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateDatabaseDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateDatabaseDesc.java Wed Apr 15 22:04:00 2015
@@ -21,11 +21,13 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 import java.util.Map;
 
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 /**
  * CreateDatabaseDesc.
  *
  */
-@Explain(displayName = "Create Database")
+@Explain(displayName = "Create Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class CreateDatabaseDesc extends DDLDesc implements Serializable {
 
   private static final long serialVersionUID = 1L;
@@ -75,7 +77,7 @@ public class CreateDatabaseDesc extends
     this.dbProperties = dbProps;
   }
 
-  @Explain(displayName="name")
+  @Explain(displayName="name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getName() {
     return databaseName;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateFunctionDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateFunctionDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateFunctionDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateFunctionDesc.java Wed Apr 15 22:04:00 2015
@@ -22,12 +22,13 @@ import java.io.Serializable;
 import java.util.List;
 
 import org.apache.hadoop.hive.metastore.api.ResourceUri;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 /**
  * CreateFunctionDesc.
  *
  */
-@Explain(displayName = "Create Function")
+@Explain(displayName = "Create Function", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class CreateFunctionDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
@@ -50,7 +51,7 @@ public class CreateFunctionDesc implemen
     this.resources = resources;
   }
 
-  @Explain(displayName = "name")
+  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getFunctionName() {
     return functionName;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateMacroDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateMacroDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateMacroDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateMacroDesc.java Wed Apr 15 22:04:00 2015
@@ -21,13 +21,14 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 import java.util.List;
 
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 
 /**
  * CreateMacroDesc.
  *
  */
-@Explain(displayName = "Create Macro")
+@Explain(displayName = "Create Macro", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class CreateMacroDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
@@ -52,7 +53,7 @@ public class CreateMacroDesc implements
     this.body = body;
   }
 
-  @Explain(displayName = "name")
+  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getMacroName() {
     return macroName;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java Wed Apr 15 22:04:00 2015
@@ -40,12 +40,14 @@ import org.apache.hadoop.hive.ql.parse.S
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.mapred.OutputFormat;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * CreateTableDesc.
  *
  */
-@Explain(displayName = "Create Table")
+@Explain(displayName = "Create Table", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class CreateTableDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private static Log LOG = LogFactory.getLog(CreateTableDesc.class);
@@ -140,12 +142,12 @@ public class CreateTableDesc extends DDL
     return copy == null ? null : new ArrayList<T>(copy);
   }
 
-  @Explain(displayName = "columns")
+  @Explain(displayName = "columns", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public List<String> getColsString() {
     return Utilities.getFieldSchemaString(getCols());
   }
 
-  @Explain(displayName = "partition columns")
+  @Explain(displayName = "partition columns", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public List<String> getPartColsString() {
     return Utilities.getFieldSchemaString(getPartCols());
   }
@@ -159,7 +161,7 @@ public class CreateTableDesc extends DDL
     this.ifNotExists = ifNotExists;
   }
 
-  @Explain(displayName = "name")
+  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getTableName() {
     return tableName;
   }
@@ -188,7 +190,7 @@ public class CreateTableDesc extends DDL
     this.partCols = partCols;
   }
 
-  @Explain(displayName = "bucket columns")
+  @Explain(displayName = "bucket columns", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public List<String> getBucketCols() {
     return bucketCols;
   }
@@ -197,7 +199,7 @@ public class CreateTableDesc extends DDL
     this.bucketCols = bucketCols;
   }
 
-  @Explain(displayName = "# buckets")
+  @Explain(displayName = "# buckets", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public Integer getNumBucketsExplain() {
     if (numBuckets == -1) {
       return null;
@@ -268,7 +270,7 @@ public class CreateTableDesc extends DDL
     this.comment = comment;
   }
 
-  @Explain(displayName = "input format")
+  @Explain(displayName = "input format", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getInputFormat() {
     return inputFormat;
   }
@@ -277,7 +279,7 @@ public class CreateTableDesc extends DDL
     this.inputFormat = inputFormat;
   }
 
-  @Explain(displayName = "output format")
+  @Explain(displayName = "output format", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getOutputFormat() {
     return outputFormat;
   }
@@ -286,7 +288,7 @@ public class CreateTableDesc extends DDL
     this.outputFormat = outputFormat;
   }
 
-  @Explain(displayName = "storage handler")
+  @Explain(displayName = "storage handler", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getStorageHandler() {
     return storageHandler;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java Wed Apr 15 22:04:00 2015
@@ -21,11 +21,13 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 import java.util.Map;
 
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 /**
  * CreateTableLikeDesc.
  *
  */
-@Explain(displayName = "Create Table")
+@Explain(displayName = "Create Table", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class CreateTableLikeDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   String tableName;
@@ -71,7 +73,7 @@ public class CreateTableLikeDesc extends
     this.ifNotExists = ifNotExists;
   }
 
-  @Explain(displayName = "name")
+  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getTableName() {
     return tableName;
   }
@@ -80,7 +82,7 @@ public class CreateTableLikeDesc extends
     this.tableName = tableName;
   }
 
-  @Explain(displayName = "default input format")
+  @Explain(displayName = "default input format", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getDefaultInputFormat() {
     return defaultInputFormat;
   }
@@ -89,7 +91,7 @@ public class CreateTableLikeDesc extends
     this.defaultInputFormat = inputFormat;
   }
 
-  @Explain(displayName = "default output format")
+  @Explain(displayName = "default output format", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getDefaultOutputFormat() {
     return defaultOutputFormat;
   }
@@ -148,7 +150,7 @@ public class CreateTableLikeDesc extends
     this.defaultSerdeProps = serdeProps;
   }
 
-  @Explain(displayName = "like")
+  @Explain(displayName = "like", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getLikeTableName() {
     return likeTableName;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java Wed Apr 15 22:04:00 2015
@@ -24,12 +24,14 @@ import java.util.Map;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * CreateViewDesc.
  *
  */
-@Explain(displayName = "Create View")
+@Explain(displayName = "Create View", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class CreateViewDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
@@ -65,7 +67,7 @@ public class CreateViewDesc extends DDLD
     this.isAlterViewAs = isAlterViewAs;
   }
 
-  @Explain(displayName = "name")
+  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getViewName() {
     return viewName;
   }
@@ -74,7 +76,7 @@ public class CreateViewDesc extends DDLD
     this.viewName = viewName;
   }
 
-  @Explain(displayName = "original text")
+  @Explain(displayName = "original text", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getViewOriginalText() {
     return originalText;
   }



Mime
View raw message