Return-Path: X-Original-To: apmail-hive-commits-archive@www.apache.org Delivered-To: apmail-hive-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id A630817D46 for ; Tue, 14 Apr 2015 14:47:37 +0000 (UTC) Received: (qmail 72453 invoked by uid 500); 14 Apr 2015 14:47:37 -0000 Delivered-To: apmail-hive-commits-archive@hive.apache.org Received: (qmail 72283 invoked by uid 500); 14 Apr 2015 14:47:37 -0000 Mailing-List: contact commits-help@hive.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hive.apache.org Delivered-To: mailing list commits@hive.apache.org Received: (qmail 71660 invoked by uid 99); 14 Apr 2015 14:47:36 -0000 Received: from eris.apache.org (HELO hades.apache.org) (140.211.11.105) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 14 Apr 2015 14:47:36 +0000 Received: from hades.apache.org (localhost [127.0.0.1]) by hades.apache.org (ASF Mail Server at hades.apache.org) with ESMTP id 4D88AAC08F4 for ; Tue, 14 Apr 2015 14:47:36 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1673437 [4/8] - in /hive/branches/hbase-metastore: ./ beeline/src/java/org/apache/hive/beeline/ bin/ cli/src/java/org/apache/hadoop/hive/cli/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/org/apache/hadoop/hive/conf/ ... Date: Tue, 14 Apr 2015 14:47:33 -0000 To: commits@hive.apache.org From: gates@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20150414144736.4D88AAC08F4@hades.apache.org> Modified: hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java (original) +++ hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java Tue Apr 14 14:47:30 2015 @@ -136,7 +136,8 @@ public class Vectorizer implements Physi Set supportedAggregationUdfs = new HashSet(); - private PhysicalContext physicalContext = null;; + private PhysicalContext physicalContext = null; + private HiveConf hiveConf; public Vectorizer() { @@ -286,13 +287,13 @@ public class Vectorizer implements Physi class VectorizationDispatcher implements Dispatcher { - private final PhysicalContext pctx; + private final PhysicalContext physicalContext; private List reduceColumnNames; private List reduceTypeInfos; - public VectorizationDispatcher(PhysicalContext pctx) { - this.pctx = pctx; + public VectorizationDispatcher(PhysicalContext physicalContext) { + this.physicalContext = physicalContext; reduceColumnNames = null; reduceTypeInfos = null; } @@ -310,7 +311,7 @@ public class Vectorizer implements Physi convertMapWork((MapWork) w, true); } else if (w instanceof ReduceWork) { // We are only vectorizing Reduce under Tez. - if (HiveConf.getBoolVar(pctx.getConf(), + if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_REDUCE_ENABLED)) { convertReduceWork((ReduceWork) w); } @@ -322,7 +323,7 @@ public class Vectorizer implements Physi if (baseWork instanceof MapWork) { convertMapWork((MapWork) baseWork, false); } else if (baseWork instanceof ReduceWork - && HiveConf.getBoolVar(pctx.getConf(), + && HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_REDUCE_ENABLED)) { convertReduceWork((ReduceWork) baseWork); } @@ -393,13 +394,12 @@ public class Vectorizer implements Physi HashMap nodeOutput = new HashMap(); ogw.startWalking(topNodes, nodeOutput); - Map> allScratchColumnVectorTypeMaps = vnp.getAllScratchColumnVectorTypeMaps(); - mapWork.setAllScratchColumnVectorTypeMaps(allScratchColumnVectorTypeMaps); - Map> allColumnVectorMaps = vnp.getAllColumnVectorMaps(); - mapWork.setAllColumnVectorMaps(allColumnVectorMaps); + mapWork.setVectorColumnNameMap(vnp.getVectorColumnNameMap()); + mapWork.setVectorColumnTypeMap(vnp.getVectorColumnTypeMap()); + mapWork.setVectorScratchColumnTypeMap(vnp.getVectorScratchColumnTypeMap()); if (LOG.isDebugEnabled()) { - debugDisplayAllMaps(allColumnVectorMaps, allScratchColumnVectorTypeMaps); + debugDisplayAllMaps(mapWork); } return; @@ -495,7 +495,7 @@ public class Vectorizer implements Physi // VectorizationContext... Do we use PreOrderWalker instead of DefaultGraphWalker. Map opRules = new LinkedHashMap(); ReduceWorkVectorizationNodeProcessor vnp = - new ReduceWorkVectorizationNodeProcessor(reduceColumnNames); + new ReduceWorkVectorizationNodeProcessor(reduceColumnNames, reduceTypeInfos); addReduceWorkRules(opRules, vnp); Dispatcher disp = new DefaultRuleDispatcher(vnp, opRules, null); GraphWalker ogw = new PreOrderWalker(disp); @@ -510,14 +510,12 @@ public class Vectorizer implements Physi // Necessary since we are vectorizing the root operator in reduce. reduceWork.setReducer(vnp.getRootVectorOp()); - Map> allScratchColumnVectorTypeMaps = vnp.getAllScratchColumnVectorTypeMaps(); - reduceWork.setAllScratchColumnVectorTypeMaps(allScratchColumnVectorTypeMaps); - Map> allColumnVectorMaps = vnp.getAllColumnVectorMaps(); - reduceWork.setAllColumnVectorMaps(allColumnVectorMaps); - + reduceWork.setVectorColumnNameMap(vnp.getVectorColumnNameMap()); + reduceWork.setVectorColumnTypeMap(vnp.getVectorColumnTypeMap()); + reduceWork.setVectorScratchColumnTypeMap(vnp.getVectorScratchColumnTypeMap()); if (LOG.isDebugEnabled()) { - debugDisplayAllMaps(allColumnVectorMaps, allScratchColumnVectorTypeMaps); + debugDisplayAllMaps(reduceWork); } } } @@ -574,37 +572,33 @@ public class Vectorizer implements Physi // ReduceWorkVectorizationNodeProcessor. class VectorizationNodeProcessor implements NodeProcessor { - // This is used to extract scratch column types for each file key - protected final Map scratchColumnContext = - new HashMap(); + // The vectorization context for the Map or Reduce task. + protected VectorizationContext taskVectorizationContext; + + // The input projection column type name map for the Map or Reduce task. + protected Map taskColumnTypeNameMap; + + VectorizationNodeProcessor() { + taskColumnTypeNameMap = new HashMap(); + } - protected final Map, VectorizationContext> vContextsByOp = - new HashMap, VectorizationContext>(); + public Map getVectorColumnNameMap() { + return taskVectorizationContext.getProjectionColumnMap(); + } + + public Map getVectorColumnTypeMap() { + return taskColumnTypeNameMap; + } + + public Map getVectorScratchColumnTypeMap() { + return taskVectorizationContext.getScratchColumnTypeMap(); + } protected final Set> opsDone = new HashSet>(); - public Map> getAllScratchColumnVectorTypeMaps() { - Map> allScratchColumnVectorTypeMaps = - new HashMap>(); - for (String onefile : scratchColumnContext.keySet()) { - VectorizationContext vc = scratchColumnContext.get(onefile); - Map cmap = vc.getScratchColumnTypeMap(); - allScratchColumnVectorTypeMaps.put(onefile, cmap); - } - return allScratchColumnVectorTypeMaps; - } - - public Map> getAllColumnVectorMaps() { - Map> allColumnVectorMaps = - new HashMap>(); - for(String oneFile: scratchColumnContext.keySet()) { - VectorizationContext vc = scratchColumnContext.get(oneFile); - Map cmap = vc.getProjectionColumnMap(); - allColumnVectorMaps.put(oneFile, cmap); - } - return allColumnVectorMaps; - } + protected final Map, Operator> opToVectorOpMap = + new HashMap, Operator>(); public VectorizationContext walkStackToFindVectorizationContext(Stack stack, Operator op) throws SemanticException { @@ -622,7 +616,18 @@ public class Vectorizer implements Physi return null; } Operator opParent = (Operator) stack.get(i); - vContext = vContextsByOp.get(opParent); + Operator vectorOpParent = opToVectorOpMap.get(opParent); + if (vectorOpParent != null) { + if (vectorOpParent instanceof VectorizationContextRegion) { + VectorizationContextRegion vcRegion = (VectorizationContextRegion) vectorOpParent; + vContext = vcRegion.getOuputVectorizationContext(); + LOG.info("walkStackToFindVectorizationContext " + vectorOpParent.getName() + " has new vectorization context " + vContext.toString()); + } else { + LOG.info("walkStackToFindVectorizationContext " + vectorOpParent.getName() + " does not have new vectorization context"); + } + } else { + LOG.info("walkStackToFindVectorizationContext " + opParent.getName() + " is not vectorized"); + } --i; } return vContext; @@ -636,14 +641,9 @@ public class Vectorizer implements Physi vectorOp = vectorizeOperator(op, vContext); opsDone.add(op); if (vectorOp != op) { + opToVectorOpMap.put(op, vectorOp); opsDone.add(vectorOp); } - if (vectorOp instanceof VectorizationContextRegion) { - VectorizationContextRegion vcRegion = (VectorizationContextRegion) vectorOp; - VectorizationContext vOutContext = vcRegion.getOuputVectorizationContext(); - vContextsByOp.put(op, vOutContext); - scratchColumnContext.put(vOutContext.getFileKey(), vOutContext); - } } } catch (HiveException e) { throw new SemanticException(e); @@ -663,6 +663,7 @@ public class Vectorizer implements Physi private final MapWork mWork; public MapWorkVectorizationNodeProcessor(MapWork mWork) { + super(); this.mWork = mWork; } @@ -671,41 +672,26 @@ public class Vectorizer implements Physi Object... nodeOutputs) throws SemanticException { Operator op = (Operator) nd; - LOG.info("MapWorkVectorizationNodeProcessor processing Operator: " + op.getName() + "..."); VectorizationContext vContext = null; if (op instanceof TableScanOperator) { - vContext = getVectorizationContext(op, physicalContext); - for (String onefile : mWork.getPathToAliases().keySet()) { - List aliases = mWork.getPathToAliases().get(onefile); - for (String alias : aliases) { - Operator opRoot = mWork.getAliasToWork().get(alias); - if (op == opRoot) { - // The same vectorization context is copied multiple times into - // the MapWork scratch columnMap - // Each partition gets a copy - // - vContext.setFileKey(onefile); - scratchColumnContext.put(onefile, vContext); - if (LOG.isDebugEnabled()) { - LOG.debug("Vectorized MapWork operator " + op.getName() + " vectorization context " + vContext.toString()); - } - break; - } - } + if (taskVectorizationContext == null) { + taskVectorizationContext = getVectorizationContext(op.getSchema(), op.getName(), + taskColumnTypeNameMap); } - vContextsByOp.put(op, vContext); + vContext = taskVectorizationContext; } else { + LOG.info("MapWorkVectorizationNodeProcessor process going to walk the operator stack to get vectorization context for " + op.getName()); vContext = walkStackToFindVectorizationContext(stack, op); if (vContext == null) { - throw new SemanticException( - String.format("Did not find vectorization context for operator %s in operator stack", - op.getName())); + // No operator has "pushed" a new context -- so use the task vectorization context. + vContext = taskVectorizationContext; } } assert vContext != null; + LOG.info("MapWorkVectorizationNodeProcessor process operator " + op.getName() + " using vectorization context" + vContext.toString()); // When Vectorized GROUPBY outputs rows instead of vectorized row batchs, we don't // vectorize the operators below it. @@ -720,9 +706,10 @@ public class Vectorizer implements Physi Operator vectorOp = doVectorize(op, vContext); if (LOG.isDebugEnabled()) { - LOG.debug("Vectorized MapWork operator " + vectorOp.getName() + " vectorization context " + vContext.toString()); if (vectorOp instanceof VectorizationContextRegion) { - LOG.debug("Vectorized MapWork operator " + vectorOp.getName() + " added vectorization context " + vContext.toString()); + VectorizationContextRegion vcRegion = (VectorizationContextRegion) vectorOp; + VectorizationContext vNewContext = vcRegion.getOuputVectorizationContext(); + LOG.debug("Vectorized MapWork operator " + vectorOp.getName() + " added vectorization context " + vNewContext.toString()); } } @@ -733,8 +720,7 @@ public class Vectorizer implements Physi class ReduceWorkVectorizationNodeProcessor extends VectorizationNodeProcessor { private final List reduceColumnNames; - - private VectorizationContext reduceShuffleVectorizationContext; + private final List reduceTypeInfos; private Operator rootVectorOp; @@ -742,10 +728,12 @@ public class Vectorizer implements Physi return rootVectorOp; } - public ReduceWorkVectorizationNodeProcessor(List reduceColumnNames) { + public ReduceWorkVectorizationNodeProcessor(List reduceColumnNames, + List reduceTypeInfos) { + super(); this.reduceColumnNames = reduceColumnNames; + this.reduceTypeInfos = reduceTypeInfos; rootVectorOp = null; - reduceShuffleVectorizationContext = null; } @Override @@ -753,8 +741,6 @@ public class Vectorizer implements Physi Object... nodeOutputs) throws SemanticException { Operator op = (Operator) nd; - LOG.info("ReduceWorkVectorizationNodeProcessor processing Operator: " + - op.getName() + "..."); VectorizationContext vContext = null; @@ -763,25 +749,30 @@ public class Vectorizer implements Physi if (op.getParentOperators().size() == 0) { LOG.info("ReduceWorkVectorizationNodeProcessor process reduceColumnNames " + reduceColumnNames.toString()); - vContext = new VectorizationContext(reduceColumnNames); - vContext.setFileKey("_REDUCE_SHUFFLE_"); - scratchColumnContext.put("_REDUCE_SHUFFLE_", vContext); - reduceShuffleVectorizationContext = vContext; + vContext = new VectorizationContext("__Reduce_Shuffle__", reduceColumnNames); + taskVectorizationContext = vContext; + int i = 0; + for (TypeInfo typeInfo : reduceTypeInfos) { + taskColumnTypeNameMap.put(i, typeInfo.getTypeName()); + i++; + } saveRootVectorOp = true; if (LOG.isDebugEnabled()) { LOG.debug("Vectorized ReduceWork reduce shuffle vectorization context " + vContext.toString()); } } else { + LOG.info("ReduceWorkVectorizationNodeProcessor process going to walk the operator stack to get vectorization context for " + op.getName()); vContext = walkStackToFindVectorizationContext(stack, op); if (vContext == null) { // If we didn't find a context among the operators, assume the top -- reduce shuffle's // vectorization context. - vContext = reduceShuffleVectorizationContext; + vContext = taskVectorizationContext; } } assert vContext != null; + LOG.info("ReduceWorkVectorizationNodeProcessor process operator " + op.getName() + " using vectorization context" + vContext.toString()); // When Vectorized GROUPBY outputs rows instead of vectorized row batchs, we don't // vectorize the operators below it. @@ -796,9 +787,10 @@ public class Vectorizer implements Physi Operator vectorOp = doVectorize(op, vContext); if (LOG.isDebugEnabled()) { - LOG.debug("Vectorized ReduceWork operator " + vectorOp.getName() + " vectorization context " + vContext.toString()); if (vectorOp instanceof VectorizationContextRegion) { - LOG.debug("Vectorized ReduceWork operator " + vectorOp.getName() + " added vectorization context " + vContext.toString()); + VectorizationContextRegion vcRegion = (VectorizationContextRegion) vectorOp; + VectorizationContext vNewContext = vcRegion.getOuputVectorizationContext(); + LOG.debug("Vectorized ReduceWork operator " + vectorOp.getName() + " added vectorization context " + vNewContext.toString()); } } if (vectorOp instanceof VectorGroupByOperator) { @@ -816,7 +808,7 @@ public class Vectorizer implements Physi private static class ValidatorVectorizationContext extends VectorizationContext { private ValidatorVectorizationContext() { - super(); + super("No Name"); } @Override @@ -831,25 +823,27 @@ public class Vectorizer implements Physi } @Override - public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException { - this.physicalContext = pctx; - boolean vectorPath = HiveConf.getBoolVar(pctx.getConf(), + public PhysicalContext resolve(PhysicalContext physicalContext) throws SemanticException { + this.physicalContext = physicalContext; + hiveConf = physicalContext.getConf(); + + boolean vectorPath = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED); if (!vectorPath) { LOG.info("Vectorization is disabled"); - return pctx; + return physicalContext; } // create dispatcher and graph walker - Dispatcher disp = new VectorizationDispatcher(pctx); + Dispatcher disp = new VectorizationDispatcher(physicalContext); TaskGraphWalker ogw = new TaskGraphWalker(disp); // get all the tasks nodes from root task ArrayList topNodes = new ArrayList(); - topNodes.addAll(pctx.getRootTasks()); + topNodes.addAll(physicalContext.getRootTasks()); // begin to walk through the task tree. ogw.startWalking(topNodes, null); - return pctx; + return physicalContext; } boolean validateMapWorkOperator(Operator op, MapWork mWork, boolean isTez) { @@ -901,7 +895,7 @@ public class Vectorizer implements Physi } break; case GROUPBY: - if (HiveConf.getBoolVar(physicalContext.getConf(), + if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_REDUCE_GROUPBY_ENABLED)) { ret = validateGroupByOperator((GroupByOperator) op, true, true); } else { @@ -1262,20 +1256,24 @@ public class Vectorizer implements Physi return supportedDataTypesPattern.matcher(type.toLowerCase()).matches(); } - private VectorizationContext getVectorizationContext(Operator op, - PhysicalContext pctx) { - RowSchema rs = op.getSchema(); + private VectorizationContext getVectorizationContext(RowSchema rowSchema, String contextName, + Map typeNameMap) { + + VectorizationContext vContext = new VectorizationContext(contextName); // Add all non-virtual columns to make a vectorization context for // the TableScan operator. - VectorizationContext vContext = new VectorizationContext(); - for (ColumnInfo c : rs.getSignature()) { + int i = 0; + for (ColumnInfo c : rowSchema.getSignature()) { // Earlier, validation code should have eliminated virtual columns usage (HIVE-5560). if (!isVirtualColumn(c)) { vContext.addInitialColumn(c.getInternalName()); + typeNameMap.put(i, c.getTypeName()); + i++; } } vContext.finishedAddingInitialColumns(); + return vContext; } @@ -1333,40 +1331,14 @@ public class Vectorizer implements Physi return false; } - public void debugDisplayAllMaps(Map> allColumnVectorMaps, - Map> allScratchColumnVectorTypeMaps) { + public void debugDisplayAllMaps(BaseWork work) { - // Context keys grow in length since they are a path... - Comparator comparerShorterString = new Comparator() { - @Override - public int compare(String o1, String o2) { - Integer length1 = o1.length(); - Integer length2 = o2.length(); - return length1.compareTo(length2); - }}; - - Comparator comparerInteger = new Comparator() { - @Override - public int compare(Integer o1, Integer o2) { - return o1.compareTo(o2); - }}; - - Map> sortedAllColumnVectorMaps = new TreeMap>(comparerShorterString); - for (Map.Entry> entry : allColumnVectorMaps.entrySet()) { - Map sortedColumnMap = new TreeMap(comparerInteger); - for (Map.Entry innerEntry : entry.getValue().entrySet()) { - sortedColumnMap.put(innerEntry.getValue(), innerEntry.getKey()); - } - sortedAllColumnVectorMaps.put(entry.getKey(), sortedColumnMap); - } - LOG.debug("sortedAllColumnVectorMaps " + sortedAllColumnVectorMaps); - - Map> sortedAllScratchColumnVectorTypeMap = new TreeMap>(comparerShorterString); - for (Map.Entry> entry : allScratchColumnVectorTypeMaps.entrySet()) { - Map sortedScratchColumnTypeMap = new TreeMap(comparerInteger); - sortedScratchColumnTypeMap.putAll(entry.getValue()); - sortedAllScratchColumnVectorTypeMap.put(entry.getKey(), sortedScratchColumnTypeMap); - } - LOG.debug("sortedAllScratchColumnVectorTypeMap " + sortedAllScratchColumnVectorTypeMap); + Map columnNameMap = work.getVectorColumnNameMap(); + Map columnTypeMap = work.getVectorColumnTypeMap(); + Map scratchColumnTypeMap = work.getVectorScratchColumnTypeMap(); + + LOG.debug("debugDisplayAllMaps columnNameMap " + columnNameMap.toString()); + LOG.debug("debugDisplayAllMaps columnTypeMap " + columnTypeMap.toString()); + LOG.debug("debugDisplayAllMaps scratchColumnTypeMap " + scratchColumnTypeMap.toString()); } } Modified: hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (original) +++ hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java Tue Apr 14 14:47:30 2015 @@ -196,18 +196,13 @@ public class PartitionPruner implements // Remove all parts that are not partition columns. See javadoc for details. ExprNodeDesc compactExpr = compactExpr(prunerExpr.clone()); String oldFilter = prunerExpr.getExprString(); - if (isBooleanExpr(compactExpr)) { - // For null and true values, return every partition - if (!isFalseExpr(compactExpr)) { - // Non-strict mode, and all the predicates are on non-partition columns - get everything. - if (LOG.isDebugEnabled()) { - LOG.debug("Filter " + oldFilter + " was null after compacting"); - } - return getAllPartsFromCacheOrServer(tab, key, true, prunedPartitionsMap); - } else { - return new PrunedPartitionList(tab, new LinkedHashSet(new ArrayList()), - new ArrayList(), false); - } + if (compactExpr == null || isBooleanExpr(compactExpr)) { + if (isFalseExpr(compactExpr)) { + return new PrunedPartitionList( + tab, new LinkedHashSet(0), new ArrayList(0), false); + } + // For null and true values, return every partition + return getAllPartsFromCacheOrServer(tab, key, true, prunedPartitionsMap); } if (LOG.isDebugEnabled()) { LOG.debug("Filter w/ compacting: " + compactExpr.getExprString() @@ -241,22 +236,22 @@ public class PartitionPruner implements partsCache.put(key, ppList); return ppList; } - + static private boolean isBooleanExpr(ExprNodeDesc expr) { - return expr != null && expr instanceof ExprNodeConstantDesc && + return expr != null && expr instanceof ExprNodeConstantDesc && ((ExprNodeConstantDesc)expr).getTypeInfo() instanceof PrimitiveTypeInfo && ((PrimitiveTypeInfo)(((ExprNodeConstantDesc)expr).getTypeInfo())). - getTypeName().equals(serdeConstants.BOOLEAN_TYPE_NAME); + getTypeName().equals(serdeConstants.BOOLEAN_TYPE_NAME); } static private boolean isTrueExpr(ExprNodeDesc expr) { - return isBooleanExpr(expr) && - ((ExprNodeConstantDesc)expr).getValue() != null && - ((ExprNodeConstantDesc)expr).getValue().equals(Boolean.TRUE); + return isBooleanExpr(expr) && + ((ExprNodeConstantDesc)expr).getValue() != null && + ((ExprNodeConstantDesc)expr).getValue().equals(Boolean.TRUE); } static private boolean isFalseExpr(ExprNodeDesc expr) { - return isBooleanExpr(expr) && + return isBooleanExpr(expr) && ((ExprNodeConstantDesc)expr).getValue() != null && - ((ExprNodeConstantDesc)expr).getValue().equals(Boolean.FALSE); + ((ExprNodeConstantDesc)expr).getValue().equals(Boolean.FALSE); } /** @@ -268,42 +263,48 @@ public class PartitionPruner implements */ static private ExprNodeDesc compactExpr(ExprNodeDesc expr) { // If this is a constant boolean expression, return the value. - if (expr == null) { - return null; - } - if (expr instanceof ExprNodeConstantDesc) { - if (isBooleanExpr(expr)) { - return expr; - } else { - throw new IllegalStateException("Unexpected non-null ExprNodeConstantDesc: " - + expr.getExprString()); + if (expr == null) { + return null; + } + if (expr instanceof ExprNodeConstantDesc) { + if (((ExprNodeConstantDesc)expr).getValue() == null) return null; + if (!isBooleanExpr(expr)) { + throw new IllegalStateException("Unexpected non-boolean ExprNodeConstantDesc: " + + expr.getExprString()); } + return expr; } else if (expr instanceof ExprNodeGenericFuncDesc) { GenericUDF udf = ((ExprNodeGenericFuncDesc)expr).getGenericUDF(); boolean isAnd = udf instanceof GenericUDFOPAnd; boolean isOr = udf instanceof GenericUDFOPOr; - + if (isAnd || isOr) { List children = expr.getChildren(); - ExprNodeDesc left = children.get(0); - children.set(0, compactExpr(left)); - ExprNodeDesc right = children.get(1); - children.set(1, compactExpr(right)); - - if (isTrueExpr(children.get(0)) && isTrueExpr(children.get(1))) { - return new ExprNodeConstantDesc(Boolean.TRUE); - } else if (isTrueExpr(children.get(0))) { - return isAnd ? children.get(1) : new ExprNodeConstantDesc(Boolean.TRUE); - } else if (isTrueExpr(children.get(1))) { - return isAnd ? children.get(0) : new ExprNodeConstantDesc(Boolean.TRUE); - } else if (isFalseExpr(children.get(0)) && isFalseExpr(children.get(1))) { - return new ExprNodeConstantDesc(Boolean.FALSE); - } else if (isFalseExpr(children.get(0))) { - return isAnd ? new ExprNodeConstantDesc(Boolean.FALSE) : children.get(1); - } else if (isFalseExpr(children.get(1))) { - return isAnd ? new ExprNodeConstantDesc(Boolean.FALSE) : children.get(0); - } - + ExprNodeDesc left = compactExpr(children.get(0)); + ExprNodeDesc right = compactExpr(children.get(1)); + // Non-partition expressions are converted to nulls. + if (left == null && right == null) { + return null; + } else if (left == null) { + return isAnd ? right : null; + } else if (right == null) { + return isAnd ? left : null; + } + // Handle boolean expressions + boolean isLeftFalse = isFalseExpr(left), isRightFalse = isFalseExpr(right), + isLeftTrue = isTrueExpr(left), isRightTrue = isTrueExpr(right); + if ((isRightTrue && isLeftTrue) || (isOr && (isLeftTrue || isRightTrue))) { + return new ExprNodeConstantDesc(Boolean.TRUE); + } else if ((isRightFalse && isLeftFalse) || (isAnd && (isLeftFalse || isRightFalse))) { + return new ExprNodeConstantDesc(Boolean.FALSE); + } else if ((isAnd && isLeftTrue) || (isOr && isLeftFalse)) { + return right; + } else if ((isAnd && isRightTrue) || (isOr && isRightFalse)) { + return left; + } + // Nothing to compact, update expr with compacted children. + children.set(0, left); + children.set(1, right); } return expr; } else { @@ -328,9 +329,9 @@ public class PartitionPruner implements if (!partCols.contains(column)) { // Column doesn't appear to be a partition column for the table. return new ExprNodeConstantDesc(expr.getTypeInfo(), null); - } + } referred.add(column); - } + } if (expr instanceof ExprNodeGenericFuncDesc) { List children = expr.getChildren(); for (int i = 0; i < children.size(); ++i) { Modified: hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java (original) +++ hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java Tue Apr 14 14:47:30 2015 @@ -36,6 +36,7 @@ import org.apache.hadoop.hive.ql.metadat import org.apache.hadoop.hive.ql.metadata.InvalidTableException; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.serde.serdeConstants; /** * ColumnStatsSemanticAnalyzer. @@ -186,15 +187,7 @@ public class ColumnStatsSemanticAnalyzer } else { whereClause.append(" and "); } - whereClause.append(partKey); - whereClause.append(" = "); - if (getColTypeOf(partKey).equalsIgnoreCase("string")) { - whereClause.append("'"); - } - whereClause.append(value); - if (getColTypeOf(partKey).equalsIgnoreCase("string")) { - whereClause.append("'"); - } + whereClause.append(partKey).append(" = ").append(genPartValueString(partKey, value)); } } @@ -211,11 +204,39 @@ public class ColumnStatsSemanticAnalyzer return predPresent ? whereClause.append(groupByClause) : groupByClause; } + private String genPartValueString (String partKey, String partVal) throws SemanticException { + String returnVal = partVal; + String partColType = getColTypeOf(partKey); + if (partColType.equals(serdeConstants.STRING_TYPE_NAME) || + partColType.contains(serdeConstants.VARCHAR_TYPE_NAME) || + partColType.contains(serdeConstants.CHAR_TYPE_NAME)) { + returnVal = "'" + partVal + "'"; + } else if (partColType.equals(serdeConstants.TINYINT_TYPE_NAME)) { + returnVal = partVal+"Y"; + } else if (partColType.equals(serdeConstants.SMALLINT_TYPE_NAME)) { + returnVal = partVal+"S"; + } else if (partColType.equals(serdeConstants.INT_TYPE_NAME)) { + returnVal = partVal; + } else if (partColType.equals(serdeConstants.BIGINT_TYPE_NAME)) { + returnVal = partVal+"L"; + } else if (partColType.contains(serdeConstants.DECIMAL_TYPE_NAME)) { + returnVal = partVal + "BD"; + } else if (partColType.equals(serdeConstants.DATE_TYPE_NAME) || + partColType.equals(serdeConstants.TIMESTAMP_TYPE_NAME)) { + returnVal = partColType + " '" + partVal + "'"; + } else { + //for other usually not used types, just quote the value + returnVal = "'" + partVal + "'"; + } + + return returnVal; + } + private String getColTypeOf (String partKey) throws SemanticException{ for (FieldSchema fs : tbl.getPartitionKeys()) { if (partKey.equalsIgnoreCase(fs.getName())) { - return fs.getType(); + return fs.getType().toLowerCase(); } } throw new SemanticException ("Unknown partition key : " + partKey); Modified: hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original) +++ hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Tue Apr 14 14:47:30 2015 @@ -11013,9 +11013,10 @@ public class SemanticAnalyzer extends Ba // Process the position alias in GROUPBY and ORDERBY private void processPositionAlias(ASTNode ast) throws SemanticException { + boolean isByPos = false; if (HiveConf.getBoolVar(conf, - HiveConf.ConfVars.HIVE_GROUPBY_ORDERBY_POSITION_ALIAS) == false) { - return; + HiveConf.ConfVars.HIVE_GROUPBY_ORDERBY_POSITION_ALIAS) == true) { + isByPos = true; } if (ast.getChildCount() == 0) { @@ -11049,15 +11050,20 @@ public class SemanticAnalyzer extends Ba for (int child_pos = 0; child_pos < groupbyNode.getChildCount(); ++child_pos) { ASTNode node = (ASTNode) groupbyNode.getChild(child_pos); if (node.getToken().getType() == HiveParser.Number) { - int pos = Integer.parseInt(node.getText()); - if (pos > 0 && pos <= selectExpCnt) { - groupbyNode.setChild(child_pos, - selectNode.getChild(pos - 1).getChild(0)); + if (isByPos) { + int pos = Integer.parseInt(node.getText()); + if (pos > 0 && pos <= selectExpCnt) { + groupbyNode.setChild(child_pos, + selectNode.getChild(pos - 1).getChild(0)); + } else { + throw new SemanticException( + ErrorMsg.INVALID_POSITION_ALIAS_IN_GROUPBY.getMsg( + "Position alias: " + pos + " does not exist\n" + + "The Select List is indexed from 1 to " + selectExpCnt)); + } } else { - throw new SemanticException( - ErrorMsg.INVALID_POSITION_ALIAS_IN_GROUPBY.getMsg( - "Position alias: " + pos + " does not exist\n" + - "The Select List is indexed from 1 to " + selectExpCnt)); + warn("Using constant number " + node.getText() + + " in group by. If you try to use position alias when hive.groupby.orderby.position.alias is false, the position alias will be ignored."); } } } @@ -11076,19 +11082,24 @@ public class SemanticAnalyzer extends Ba ASTNode colNode = (ASTNode) orderbyNode.getChild(child_pos); ASTNode node = (ASTNode) colNode.getChild(0); if (node.getToken().getType() == HiveParser.Number) { - if (!isAllCol) { - int pos = Integer.parseInt(node.getText()); - if (pos > 0 && pos <= selectExpCnt) { - colNode.setChild(0, selectNode.getChild(pos - 1).getChild(0)); + if( isByPos ) { + if (!isAllCol) { + int pos = Integer.parseInt(node.getText()); + if (pos > 0 && pos <= selectExpCnt) { + colNode.setChild(0, selectNode.getChild(pos - 1).getChild(0)); + } else { + throw new SemanticException( + ErrorMsg.INVALID_POSITION_ALIAS_IN_ORDERBY.getMsg( + "Position alias: " + pos + " does not exist\n" + + "The Select List is indexed from 1 to " + selectExpCnt)); + } } else { throw new SemanticException( - ErrorMsg.INVALID_POSITION_ALIAS_IN_ORDERBY.getMsg( - "Position alias: " + pos + " does not exist\n" + - "The Select List is indexed from 1 to " + selectExpCnt)); + ErrorMsg.NO_SUPPORTED_ORDERBY_ALLCOLREF_POS.getMsg()); } - } else { - throw new SemanticException( - ErrorMsg.NO_SUPPORTED_ORDERBY_ALLCOLREF_POS.getMsg()); + } else { //if not using position alias and it is a number. + warn("Using constant number " + node.getText() + + " in order by. If you try to use position alias when hive.groupby.orderby.position.alias is false, the position alias will be ignored."); } } } @@ -12089,4 +12100,8 @@ public class SemanticAnalyzer extends Ba queryProperties.setOuterQueryLimit(qb.getParseInfo().getOuterQueryLimit()); } } + private void warn(String msg) { + SessionState.getConsole().printInfo( + String.format("Warning: %s", msg)); + } } Modified: hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java (original) +++ hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java Tue Apr 14 14:47:30 2015 @@ -140,6 +140,7 @@ public class UpdateDeleteSemanticAnalyze } List partCols = mTable.getPartCols(); + List bucketingCols = mTable.getBucketCols(); rewrittenQueryStr.append("insert into table "); rewrittenQueryStr.append(getDotName(tableName)); @@ -199,7 +200,10 @@ public class UpdateDeleteSemanticAnalyze } } } - + //updating bucket column should move row from one file to another - not supported + if(bucketingCols != null && bucketingCols.contains(columnName)) { + throw new SemanticException(ErrorMsg.UPDATE_CANNOT_UPDATE_BUCKET_VALUE,columnName); + } // This means that in UPDATE T SET x = _something_ // _something_ can be whatever is supported in SELECT _something_ setCols.put(columnName, (ASTNode)assignment.getChildren().get(1)); Modified: hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java (original) +++ hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java Tue Apr 14 14:47:30 2015 @@ -60,8 +60,9 @@ public abstract class BaseWork extends A private String name; // Vectorization. - protected Map> allScratchColumnVectorTypeMaps = null; - protected Map> allColumnVectorMaps = null; + protected Map vectorColumnNameMap; + protected Map vectorColumnTypeMap; + protected Map vectorScratchColumnTypeMap; public void setGatheringStats(boolean gatherStats) { this.gatheringStats = gatherStats; @@ -143,21 +144,28 @@ public abstract class BaseWork extends A return returnSet; } - public Map> getAllScratchColumnVectorTypeMaps() { - return allScratchColumnVectorTypeMaps; + public Map getVectorColumnNameMap() { + return vectorColumnNameMap; } - public void setAllScratchColumnVectorTypeMaps( - Map> allScratchColumnVectorTypeMaps) { - this.allScratchColumnVectorTypeMaps = allScratchColumnVectorTypeMaps; + public void setVectorColumnNameMap(Map vectorColumnNameMap) { + this.vectorColumnNameMap = vectorColumnNameMap; } - public Map> getAllColumnVectorMaps() { - return allColumnVectorMaps; + public Map getVectorColumnTypeMap() { + return vectorColumnTypeMap; } - public void setAllColumnVectorMaps(Map> allColumnVectorMaps) { - this.allColumnVectorMaps = allColumnVectorMaps; + public void setVectorColumnTypeMap(Map vectorColumnTypeMap) { + this.vectorColumnTypeMap = vectorColumnTypeMap; + } + + public Map getVectorScratchColumnTypeMap() { + return vectorScratchColumnTypeMap; + } + + public void setVectorScratchColumnTypeMap(Map vectorScratchColumnTypeMap) { + this.vectorScratchColumnTypeMap = vectorScratchColumnTypeMap; } /** Modified: hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java (original) +++ hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java Tue Apr 14 14:47:30 2015 @@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.sessio import java.net.URI; import java.net.URISyntaxException; +import java.net.URL; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedList; @@ -58,8 +59,11 @@ public class DependencyResolver { // If HIVE_HOME is not defined or file is not found in HIVE_HOME/conf then load default ivysettings.xml from class loader if (ivysettingsPath == null || !(new File(ivysettingsPath).exists())) { - ivysettingsPath = ClassLoader.getSystemResource("ivysettings.xml").getFile(); - _console.printInfo("ivysettings.xml file not found in HIVE_HOME or HIVE_CONF_DIR," + ivysettingsPath + " will be used"); + URL ivysetttingsResource = ClassLoader.getSystemResource("ivysettings.xml"); + if (ivysetttingsResource != null){ + ivysettingsPath = ivysetttingsResource.getFile(); + _console.printInfo("ivysettings.xml file not found in HIVE_HOME or HIVE_CONF_DIR," + ivysettingsPath + " will be used"); + } } } Modified: hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java (original) +++ hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java Tue Apr 14 14:47:30 2015 @@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.sessio import org.apache.commons.io.FileUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.io.IOUtils; import java.io.*; @@ -36,10 +37,38 @@ public class OperationLog { private final String operationName; private final LogFile logFile; + private LoggingLevel opLoggingLevel = LoggingLevel.UNKNOWN; - public OperationLog(String name, File file) throws FileNotFoundException{ + public static enum LoggingLevel { + NONE, EXECUTION, PERFORMANCE, VERBOSE, UNKNOWN + } + + public OperationLog(String name, File file, HiveConf hiveConf) throws FileNotFoundException { operationName = name; logFile = new LogFile(file); + + if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) { + String logLevel = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL); + opLoggingLevel = getLoggingLevel(logLevel); + } + } + + public static LoggingLevel getLoggingLevel (String mode) { + if (mode.equalsIgnoreCase("none")) { + return LoggingLevel.NONE; + } else if (mode.equalsIgnoreCase("execution")) { + return LoggingLevel.EXECUTION; + } else if (mode.equalsIgnoreCase("verbose")) { + return LoggingLevel.VERBOSE; + } else if (mode.equalsIgnoreCase("performance")) { + return LoggingLevel.PERFORMANCE; + } else { + return LoggingLevel.UNKNOWN; + } + } + + public LoggingLevel getOpLoggingLevel() { + return opLoggingLevel; } /** Modified: hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java (original) +++ hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java Tue Apr 14 14:47:30 2015 @@ -82,7 +82,7 @@ public class JDBCStatsAggregator impleme Utilities.SQLCommand setQueryTimeout = new Utilities.SQLCommand() { @Override public Void run(PreparedStatement stmt) throws SQLException { - stmt.setQueryTimeout(timeout); + Utilities.setQueryTimeout(stmt, timeout); return null; } }; Modified: hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java (original) +++ hive/branches/hbase-metastore/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java Tue Apr 14 14:47:30 2015 @@ -82,7 +82,7 @@ public class JDBCStatsPublisher implemen Utilities.SQLCommand setQueryTimeout = new Utilities.SQLCommand() { @Override public Void run(PreparedStatement stmt) throws SQLException { - stmt.setQueryTimeout(timeout); + Utilities.setQueryTimeout(stmt, timeout); return null; } }; @@ -279,7 +279,7 @@ public class JDBCStatsPublisher implemen conn = DriverManager.getConnection(connectionString); stmt = conn.createStatement(); - stmt.setQueryTimeout(timeout); + Utilities.setQueryTimeout(stmt, timeout); // TODO: why is this not done using Hive db scripts? // Check if the table exists Modified: hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/TestErrorMsg.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/TestErrorMsg.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/TestErrorMsg.java (original) +++ hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/TestErrorMsg.java Tue Apr 14 14:47:30 2015 @@ -23,9 +23,11 @@ import java.util.Set; import junit.framework.Assert; import junit.framework.TestCase; +import org.junit.Test; -public class TestErrorMsg extends TestCase { +public class TestErrorMsg { + @Test public void testUniqueErrorCode() { Set numbers = new HashSet(); for (ErrorMsg err : ErrorMsg.values()) { @@ -33,4 +35,15 @@ public class TestErrorMsg extends TestCa Assert.assertTrue("duplicated error number " + code, numbers.add(code)); } } + @Test + public void testReverseMatch() { + testReverseMatch(ErrorMsg.OP_NOT_ALLOWED_IN_AUTOCOMMIT, "COMMIT"); + testReverseMatch(ErrorMsg.OP_NOT_ALLOWED_IN_TXN, "ALTER TABLE", "1"); + testReverseMatch(ErrorMsg.OP_NOT_ALLOWED_WITHOUT_TXN, "ROLLBACK"); + } + private void testReverseMatch(ErrorMsg errorMsg, String... args) { + String parametrizedMsg = errorMsg.format(args); + ErrorMsg canonicalMsg = ErrorMsg.getErrorMsg(parametrizedMsg); + Assert.assertEquals("Didn't find expected msg", errorMsg.getErrorCode(), canonicalMsg.getErrorCode()); + } } Modified: hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestBytesBytesMultiHashMap.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestBytesBytesMultiHashMap.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestBytesBytesMultiHashMap.java (original) +++ hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestBytesBytesMultiHashMap.java Tue Apr 14 14:47:30 2015 @@ -50,10 +50,10 @@ public class TestBytesBytesMultiHashMap BytesBytesMultiHashMap map = new BytesBytesMultiHashMap(CAPACITY, LOAD_FACTOR, WB_SIZE); RandomKvSource kv = new RandomKvSource(0, 0); map.put(kv, -1); - verifyResults(map, kv.getLastKey(), kv.getLastValue()); + verifyHashMapResult(map, kv.getLastKey(), kv.getLastValue()); kv = new RandomKvSource(10, 100); map.put(kv, -1); - verifyResults(map, kv.getLastKey(), kv.getLastValue()); + verifyHashMapResult(map, kv.getLastKey(), kv.getLastValue()); } @Test @@ -61,12 +61,12 @@ public class TestBytesBytesMultiHashMap BytesBytesMultiHashMap map = new BytesBytesMultiHashMap(CAPACITY, LOAD_FACTOR, WB_SIZE); RandomKvSource kv = new RandomKvSource(0, 100); map.put(kv, -1); - verifyResults(map, kv.getLastKey(), kv.getLastValue()); + verifyHashMapResult(map, kv.getLastKey(), kv.getLastValue()); FixedKeyKvSource kv2 = new FixedKeyKvSource(kv.getLastKey(), 0, 100); kv2.values.add(kv.getLastValue()); for (int i = 0; i < 3; ++i) { map.put(kv2, -1); - verifyResults(map, kv2.key, kv2.values.toArray(new byte[kv2.values.size()][])); + verifyHashMapResult(map, kv2.key, kv2.values.toArray(new byte[kv2.values.size()][])); } } @@ -80,11 +80,11 @@ public class TestBytesBytesMultiHashMap FixedKeyKvSource kv2 = new FixedKeyKvSource(kv.getLastKey(), 0, 100); map.put(kv2, -1); key[0] = (byte)(key[0] + 1); - List results = new ArrayList(0); - map.getValueRefs(key, key.length, results); - assertTrue(results.isEmpty()); - map.getValueRefs(key, 0, results); - assertTrue(results.isEmpty()); + BytesBytesMultiHashMap.Result hashMapResult = new BytesBytesMultiHashMap.Result(); + map.getValueResult(key, 0, key.length, hashMapResult); + assertTrue(!hashMapResult.hasRows()); + map.getValueResult(key, 0, 0, hashMapResult); + assertTrue(!hashMapResult.hasRows()); } @Test @@ -96,13 +96,12 @@ public class TestBytesBytesMultiHashMap map.put(kv, -1); } for (int i = 0; i < kv.keys.size(); ++i) { - verifyResults(map, kv.keys.get(i), kv.values.get(i)); + verifyHashMapResult(map, kv.keys.get(i), kv.values.get(i)); } assertEquals(CAPACITY, map.getCapacity()); // Get of non-existent key should terminate.. - List results = new ArrayList(0); - map.getValueRefs(new byte[0], 0, results); - assertTrue(results.isEmpty()); + BytesBytesMultiHashMap.Result hashMapResult = new BytesBytesMultiHashMap.Result(); + map.getValueResult(new byte[0], 0, 0, hashMapResult); } @Test @@ -113,23 +112,29 @@ public class TestBytesBytesMultiHashMap for (int i = 0; i < 18; ++i) { map.put(kv, -1); for (int j = 0; j <= i; ++j) { - verifyResults(map, kv.keys.get(j), kv.values.get(j)); + verifyHashMapResult(map, kv.keys.get(j), kv.values.get(j)); } } assertEquals(1 << 18, map.getCapacity()); } - private void verifyResults(BytesBytesMultiHashMap map, byte[] key, byte[]... values) { - List results = new ArrayList(0); - byte state = map.getValueRefs(key, key.length, results); - assertEquals(state, results.size()); - assertEquals(values.length, results.size()); + private void verifyHashMapResult(BytesBytesMultiHashMap map, byte[] key, byte[]... values) { + BytesBytesMultiHashMap.Result hashMapResult = new BytesBytesMultiHashMap.Result(); + byte state = map.getValueResult(key, 0, key.length, hashMapResult); HashSet hs = new HashSet(); - for (int i = 0; i < results.size(); ++i) { - WriteBuffers.ByteSegmentRef result = results.get(i); - map.populateValue(result); - hs.add(result.copy()); + int count = 0; + if (hashMapResult.hasRows()) { + WriteBuffers.ByteSegmentRef ref = hashMapResult.first(); + while (ref != null) { + count++; + hs.add(ref.copy()); + ref = hashMapResult.next(); + } + } else { + assertTrue(hashMapResult.isEof()); } + assertEquals(state, count); + assertEquals(values.length, count); for (int i = 0; i < values.length; ++i) { assertTrue(hs.contains(ByteBuffer.wrap(values[i]))); } Modified: hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java (original) +++ hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java Tue Apr 14 14:47:30 2015 @@ -88,7 +88,7 @@ public class TestVectorFilterOperator { ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Long.class, "col1", "table", false); List columns = new ArrayList(); columns.add("col1"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); FilterDesc fdesc = new FilterDesc(); fdesc.setPredicate(col1Expr); return new VectorFilterOperator(vc, fdesc); Modified: hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java (original) +++ hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java Tue Apr 14 14:47:30 2015 @@ -173,7 +173,7 @@ public class TestVectorGroupByOperator { List mapColumnNames = new ArrayList(); mapColumnNames.add("Key"); mapColumnNames.add("Value"); - VectorizationContext ctx = new VectorizationContext(mapColumnNames); + VectorizationContext ctx = new VectorizationContext("name", mapColumnNames); GroupByDesc desc = buildKeyGroupByDesc (ctx, "max", "Value", TypeInfoFactory.longTypeInfo, @@ -1710,7 +1710,7 @@ public class TestVectorGroupByOperator { mapColumnNames.put("value", i); outputColumnNames.add("value"); - VectorizationContext ctx = new VectorizationContext(outputColumnNames); + VectorizationContext ctx = new VectorizationContext("name", outputColumnNames); ArrayList aggs = new ArrayList(1); aggs.add( @@ -1821,7 +1821,7 @@ public class TestVectorGroupByOperator { List mapColumnNames = new ArrayList(); mapColumnNames.add("Key"); mapColumnNames.add("Value"); - VectorizationContext ctx = new VectorizationContext(mapColumnNames); + VectorizationContext ctx = new VectorizationContext("name", mapColumnNames); Set keys = new HashSet(); AggregationDesc agg = buildAggregationDesc(ctx, aggregateName, @@ -2235,7 +2235,7 @@ public class TestVectorGroupByOperator { Object expected) throws HiveException { List mapColumnNames = new ArrayList(); mapColumnNames.add("A"); - VectorizationContext ctx = new VectorizationContext(mapColumnNames); + VectorizationContext ctx = new VectorizationContext("name", mapColumnNames); GroupByDesc desc = buildGroupByDescCountStar (ctx); @@ -2264,7 +2264,7 @@ public class TestVectorGroupByOperator { Object expected) throws HiveException { List mapColumnNames = new ArrayList(); mapColumnNames.add("A"); - VectorizationContext ctx = new VectorizationContext(mapColumnNames); + VectorizationContext ctx = new VectorizationContext("name", mapColumnNames); GroupByDesc desc = buildGroupByDescType(ctx, "count", "A", TypeInfoFactory.longTypeInfo); VectorGroupByDesc vectorDesc = desc.getVectorDesc(); @@ -2296,7 +2296,7 @@ public class TestVectorGroupByOperator { Object expected) throws HiveException { List mapColumnNames = new ArrayList(); mapColumnNames.add("A"); - VectorizationContext ctx = new VectorizationContext(mapColumnNames); + VectorizationContext ctx = new VectorizationContext("name", mapColumnNames); GroupByDesc desc = buildGroupByDescType(ctx, aggregateName, "A", TypeInfoFactory.stringTypeInfo); @@ -2322,11 +2322,12 @@ public class TestVectorGroupByOperator { } public void testAggregateDecimalIterable ( -String aggregateName, Iterable data, - Object expected) throws HiveException { - List mapColumnNames = new ArrayList(); - mapColumnNames.add("A"); - VectorizationContext ctx = new VectorizationContext(mapColumnNames); + String aggregateName, + Iterable data, + Object expected) throws HiveException { + List mapColumnNames = new ArrayList(); + mapColumnNames.add("A"); + VectorizationContext ctx = new VectorizationContext("name", mapColumnNames); GroupByDesc desc = buildGroupByDescType(ctx, aggregateName, "A", TypeInfoFactory.getDecimalTypeInfo(30, 4)); @@ -2358,7 +2359,7 @@ String aggregateName, Iterable mapColumnNames = new ArrayList(); mapColumnNames.add("A"); - VectorizationContext ctx = new VectorizationContext(mapColumnNames); + VectorizationContext ctx = new VectorizationContext("name", mapColumnNames); GroupByDesc desc = buildGroupByDescType (ctx, aggregateName, "A", TypeInfoFactory.doubleTypeInfo); @@ -2389,7 +2390,7 @@ String aggregateName, Iterable mapColumnNames = new ArrayList(); mapColumnNames.add("A"); - VectorizationContext ctx = new VectorizationContext(mapColumnNames); + VectorizationContext ctx = new VectorizationContext("name", mapColumnNames); GroupByDesc desc = buildGroupByDescType(ctx, aggregateName, "A", TypeInfoFactory.longTypeInfo); @@ -2420,7 +2421,7 @@ String aggregateName, Iterable mapColumnNames = new ArrayList(); mapColumnNames.add("Key"); mapColumnNames.add("Value"); - VectorizationContext ctx = new VectorizationContext(mapColumnNames); + VectorizationContext ctx = new VectorizationContext("name", mapColumnNames); Set keys = new HashSet(); @@ -2487,7 +2488,7 @@ String aggregateName, Iterable mapColumnNames = new ArrayList(); mapColumnNames.add("Key"); mapColumnNames.add("Value"); - VectorizationContext ctx = new VectorizationContext(mapColumnNames); + VectorizationContext ctx = new VectorizationContext("name", mapColumnNames); Set keys = new HashSet(); GroupByDesc desc = buildKeyGroupByDesc (ctx, aggregateName, "Value", Modified: hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java (original) +++ hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java Tue Apr 14 14:47:30 2015 @@ -88,7 +88,7 @@ public class TestVectorSelectOperator { columns.add("a"); columns.add("b"); columns.add("c"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); SelectDesc selDesc = new SelectDesc(false); List colList = new ArrayList(); Modified: hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java (original) +++ hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java Tue Apr 14 14:47:30 2015 @@ -247,7 +247,7 @@ public class TestVectorizationContext { children5.add(col6Expr); modExpr.setChildren(children5); - VectorizationContext vc = new VectorizationContext(); + VectorizationContext vc = new VectorizationContext("name"); vc.addInitialColumn("col1"); vc.addInitialColumn("col2"); vc.addInitialColumn("col3"); @@ -297,7 +297,7 @@ public class TestVectorizationContext { columns.add("col0"); columns.add("col1"); columns.add("col2"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); @@ -322,7 +322,7 @@ public class TestVectorizationContext { columns.add("col0"); columns.add("col1"); columns.add("col2"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); @@ -341,7 +341,7 @@ public class TestVectorizationContext { children1.add(col2Expr); exprDesc.setChildren(children1); - vc = new VectorizationContext(columns); + vc = new VectorizationContext("name", columns); ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); @@ -360,7 +360,7 @@ public class TestVectorizationContext { children1.add(col2Expr); exprDesc.setChildren(children1); - vc = new VectorizationContext(columns); + vc = new VectorizationContext("name", columns); ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); @@ -378,7 +378,7 @@ public class TestVectorizationContext { children1.add(col2Expr); exprDesc.setChildren(children1); - vc = new VectorizationContext(columns); + vc = new VectorizationContext("name", columns); ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); @@ -395,7 +395,7 @@ public class TestVectorizationContext { children1.add(col2Expr); exprDesc.setChildren(children1); - vc = new VectorizationContext(columns); + vc = new VectorizationContext("name", columns); ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); @@ -412,7 +412,7 @@ public class TestVectorizationContext { children1.add(col2Expr); exprDesc.setChildren(children1); - vc = new VectorizationContext(columns); + vc = new VectorizationContext("name", columns); ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); @@ -434,7 +434,7 @@ public class TestVectorizationContext { List columns = new ArrayList(); columns.add("col1"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.PROJECTION); @@ -480,7 +480,7 @@ public class TestVectorizationContext { columns.add("col0"); columns.add("col1"); columns.add("col2"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression ve = vc.getVectorExpression(andExprDesc, VectorExpressionDescriptor.Mode.FILTER); @@ -530,7 +530,7 @@ public class TestVectorizationContext { List columns = new ArrayList(); columns.add("col1"); columns.add("col2"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression veAnd = vc.getVectorExpression(andExprDesc, VectorExpressionDescriptor.Mode.FILTER); assertEquals(veAnd.getClass(), FilterExprAndExpr.class); assertEquals(veAnd.getChildExpressions()[0].getClass(), FilterLongColGreaterLongScalar.class); @@ -555,7 +555,7 @@ public class TestVectorizationContext { orExprDesc.setChildren(children4); //Allocate new Vectorization context to reset the intermediate columns. - vc = new VectorizationContext(columns); + vc = new VectorizationContext("name", columns); VectorExpression veOr = vc.getVectorExpression(orExprDesc, VectorExpressionDescriptor.Mode.FILTER); assertEquals(veOr.getClass(), FilterExprOrExpr.class); assertEquals(veOr.getChildExpressions()[0].getClass(), FilterLongColGreaterLongScalar.class); @@ -596,7 +596,7 @@ public class TestVectorizationContext { columns.add("col0"); columns.add("col1"); columns.add("col2"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression ve = vc.getVectorExpression(notExpr, VectorExpressionDescriptor.Mode.FILTER); @@ -633,7 +633,7 @@ public class TestVectorizationContext { List columns = new ArrayList(); columns.add("col1"); columns.add("col2"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression ve = vc.getVectorExpression(isNullExpr, VectorExpressionDescriptor.Mode.FILTER); @@ -674,7 +674,7 @@ public class TestVectorizationContext { List columns = new ArrayList(); columns.add("col1"); columns.add("col2"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression ve = vc.getVectorExpression(isNotNullExpr, VectorExpressionDescriptor.Mode.FILTER); @@ -703,7 +703,7 @@ public class TestVectorizationContext { List columns = new ArrayList(); columns.add("a"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression ve = vc.getVectorExpression(scalarMinusConstant, VectorExpressionDescriptor.Mode.PROJECTION); assertEquals(ve.getClass(), LongScalarSubtractLongColumn.class); @@ -726,7 +726,7 @@ public class TestVectorizationContext { columns.add("col0"); columns.add("col1"); columns.add("col2"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); @@ -744,7 +744,7 @@ public class TestVectorizationContext { List columns = new ArrayList(); columns.add("col0"); columns.add("col1"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression ve = vc.getVectorExpression(negExprDesc, VectorExpressionDescriptor.Mode.PROJECTION); @@ -762,7 +762,7 @@ public class TestVectorizationContext { List columns = new ArrayList(); columns.add("col0"); columns.add("col1"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression ve = vc.getVectorExpression(negExprDesc, VectorExpressionDescriptor.Mode.PROJECTION); @@ -787,7 +787,7 @@ public class TestVectorizationContext { List columns = new ArrayList(); columns.add("a"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression ve = vc.getVectorExpression(scalarGreaterColExpr, VectorExpressionDescriptor.Mode.FILTER); assertEquals(FilterLongScalarGreaterLongColumn.class, ve.getClass()); } @@ -810,7 +810,7 @@ public class TestVectorizationContext { List columns = new ArrayList(); columns.add("a"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression ve = vc.getVectorExpression(colEqualScalar, VectorExpressionDescriptor.Mode.FILTER); assertEquals(FilterLongColEqualLongScalar.class, ve.getClass()); } @@ -833,7 +833,7 @@ public class TestVectorizationContext { List columns = new ArrayList(); columns.add("a"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression ve = vc.getVectorExpression(colEqualScalar, VectorExpressionDescriptor.Mode.PROJECTION); assertEquals(LongColEqualLongScalar.class, ve.getClass()); } @@ -850,7 +850,7 @@ public class TestVectorizationContext { List columns = new ArrayList(); columns.add("b"); columns.add("a"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); GenericUDF stringLower = new GenericUDFLower(); stringUnary.setGenericUDF(stringLower); @@ -860,7 +860,7 @@ public class TestVectorizationContext { assertEquals(1, ((StringLower) ve).getColNum()); assertEquals(2, ((StringLower) ve).getOutputColumn()); - vc = new VectorizationContext(columns); + vc = new VectorizationContext("name", columns); ExprNodeGenericFuncDesc anotherUnary = new ExprNodeGenericFuncDesc(); anotherUnary.setTypeInfo(TypeInfoFactory.stringTypeInfo); @@ -895,7 +895,7 @@ public class TestVectorizationContext { List columns = new ArrayList(); columns.add("b"); columns.add("a"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); // Sin(double) GenericUDFBridge gudfBridge = new GenericUDFBridge("sin", false, UDFSin.class.getName()); @@ -986,7 +986,7 @@ public class TestVectorizationContext { List columns = new ArrayList(); columns.add("b"); columns.add("a"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); //UDFYear GenericUDFBridge gudfBridge = new GenericUDFBridge("year", false, UDFYear.class.getName()); @@ -1024,7 +1024,7 @@ public class TestVectorizationContext { columns.add("col0"); columns.add("col1"); columns.add("col2"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); assertTrue(ve instanceof FilterStringColumnBetween); @@ -1050,7 +1050,7 @@ public class TestVectorizationContext { exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf, children1); - vc = new VectorizationContext(columns); + vc = new VectorizationContext("name", columns); ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); assertTrue(ve instanceof FilterCharColumnBetween); @@ -1075,7 +1075,7 @@ public class TestVectorizationContext { exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf, children1); - vc = new VectorizationContext(columns); + vc = new VectorizationContext("name", columns); ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); assertTrue(ve instanceof FilterVarCharColumnBetween); @@ -1144,7 +1144,7 @@ public class TestVectorizationContext { columns.add("col0"); columns.add("col1"); columns.add("col2"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); assertTrue(ve instanceof FilterStringColumnInList); ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.PROJECTION); @@ -1199,7 +1199,7 @@ public class TestVectorizationContext { columns.add("col1"); columns.add("col2"); columns.add("col3"); - VectorizationContext vc = new VectorizationContext(columns); + VectorizationContext vc = new VectorizationContext("name", columns); VectorExpression ve = vc.getVectorExpression(exprDesc); assertTrue(ve instanceof IfExprLongColumnLongColumn); Modified: hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java (original) +++ hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java Tue Apr 14 14:47:30 2015 @@ -1288,9 +1288,6 @@ public class TestInputOutputFormat { } mapWork.setPathToAliases(aliasMap); mapWork.setPathToPartitionInfo(partMap); - mapWork.setAllColumnVectorMaps(new HashMap>()); - mapWork.setAllScratchColumnVectorTypeMaps(new HashMap>()); // write the plan out FileSystem localFs = FileSystem.getLocal(conf).getRaw(); Modified: hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java (original) +++ hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java Tue Apr 14 14:47:30 2015 @@ -187,10 +187,11 @@ public class TestOrcFile { } private static ByteBuffer byteBuf(int... items) { - ByteBuffer result = ByteBuffer.allocate(items.length); + ByteBuffer result = ByteBuffer.allocate(items.length); for(int item: items) { result.put((byte) item); } + result.flip(); return result; } @@ -703,12 +704,12 @@ public class TestOrcFile { assertEquals(0, items.get(0).getPositions(0)); assertEquals(0, items.get(0).getPositions(1)); assertEquals(0, items.get(0).getPositions(2)); - assertEquals(1, + assertEquals(1, items.get(0).getStatistics().getIntStatistics().getMinimum()); index = recordReader.readRowIndex(1, null, null).getRowGroupIndex(); assertEquals(3, index.length); items = index[1].getEntryList(); - assertEquals(2, + assertEquals(2, items.get(0).getStatistics().getIntStatistics().getMaximum()); } Modified: hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java (original) +++ hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/optimizer/physical/TestVectorizer.java Tue Apr 14 14:47:30 2015 @@ -52,7 +52,7 @@ public class TestVectorizer { columns.add("col3"); //Generate vectorized expression - vContext = new VectorizationContext(columns); + vContext = new VectorizationContext("name", columns); } @Description(name = "fake", value = "FAKE") Modified: hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java (original) +++ hive/branches/hbase-metastore/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java Tue Apr 14 14:47:30 2015 @@ -17,15 +17,12 @@ */ package org.apache.hadoop.hive.ql.parse; -import static org.junit.Assert.*; - import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.Map; -import junit.framework.Assert; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataInputStream; @@ -44,7 +41,6 @@ import org.apache.hadoop.hive.ql.metadat import org.apache.hadoop.hive.ql.plan.ExplainWork; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.Before; -import org.junit.Ignore; import org.junit.Test; public class TestUpdateDeleteSemanticAnalyzer { @@ -135,7 +131,7 @@ public class TestUpdateDeleteSemanticAna @Test public void testUpdateAllNonPartitioned() throws Exception { try { - ReturnInfo rc = parseAndAnalyze("update T set a = 5", "testUpdateAllNonPartitioned"); + ReturnInfo rc = parseAndAnalyze("update T set b = 5", "testUpdateAllNonPartitioned"); LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast.dump())); } finally { cleanupTables(); @@ -145,7 +141,7 @@ public class TestUpdateDeleteSemanticAna @Test public void testUpdateAllNonPartitionedWhere() throws Exception { try { - ReturnInfo rc = parseAndAnalyze("update T set a = 5 where b > 5", + ReturnInfo rc = parseAndAnalyze("update T set b = 5 where b > 5", "testUpdateAllNonPartitionedWhere"); LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast.dump())); } finally { @@ -156,7 +152,7 @@ public class TestUpdateDeleteSemanticAna @Test public void testUpdateAllPartitioned() throws Exception { try { - ReturnInfo rc = parseAndAnalyze("update U set a = 5", "testUpdateAllPartitioned"); + ReturnInfo rc = parseAndAnalyze("update U set b = 5", "testUpdateAllPartitioned"); LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast.dump())); } finally { cleanupTables(); @@ -166,7 +162,7 @@ public class TestUpdateDeleteSemanticAna @Test public void testUpdateAllPartitionedWhere() throws Exception { try { - ReturnInfo rc = parseAndAnalyze("update U set a = 5 where b > 5", + ReturnInfo rc = parseAndAnalyze("update U set b = 5 where b > 5", "testUpdateAllPartitionedWhere"); LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast.dump())); } finally { @@ -177,7 +173,7 @@ public class TestUpdateDeleteSemanticAna @Test public void testUpdateOnePartition() throws Exception { try { - ReturnInfo rc = parseAndAnalyze("update U set a = 5 where ds = 'today'", + ReturnInfo rc = parseAndAnalyze("update U set b = 5 where ds = 'today'", "testUpdateOnePartition"); LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast.dump())); } finally { @@ -188,7 +184,7 @@ public class TestUpdateDeleteSemanticAna @Test public void testUpdateOnePartitionWhere() throws Exception { try { - ReturnInfo rc = parseAndAnalyze("update U set a = 5 where ds = 'today' and b > 5", + ReturnInfo rc = parseAndAnalyze("update U set b = 5 where ds = 'today' and b > 5", "testUpdateOnePartitionWhere"); LOG.info(explain((SemanticAnalyzer)rc.sem, rc.plan, rc.ast.dump())); } finally { @@ -266,7 +262,7 @@ public class TestUpdateDeleteSemanticAna db = sem.getDb(); // I have to create the tables here (rather than in setup()) because I need the Hive - // connection, which is conviently created by the semantic analyzer. + // connection, which is conveniently created by the semantic analyzer. Map params = new HashMap(1); params.put(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, "true"); db.createTable("T", Arrays.asList("a", "b"), null, OrcInputFormat.class, Modified: hive/branches/hbase-metastore/ql/src/test/queries/clientnegative/authorization_update_noupdatepriv.q URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/test/queries/clientnegative/authorization_update_noupdatepriv.q?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/test/queries/clientnegative/authorization_update_noupdatepriv.q (original) +++ hive/branches/hbase-metastore/ql/src/test/queries/clientnegative/authorization_update_noupdatepriv.q Tue Apr 14 14:47:30 2015 @@ -9,7 +9,7 @@ set hive.enforce.bucketing=true; -- check update without update priv -create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true'); +create table auth_noupd(i int, j int) clustered by (j) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true'); set user.name=user1; update auth_noupd set i = 0 where i > 0; Modified: hive/branches/hbase-metastore/ql/src/test/queries/clientpositive/authorization_update.q URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/test/queries/clientpositive/authorization_update.q?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/test/queries/clientpositive/authorization_update.q (original) +++ hive/branches/hbase-metastore/ql/src/test/queries/clientpositive/authorization_update.q Tue Apr 14 14:47:30 2015 @@ -9,7 +9,7 @@ set hive.enforce.bucketing=true; set user.name=user1; -- current user has been set (comment line before the set cmd is resulting in parse error!!) -CREATE TABLE t_auth_up(i int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true'); +CREATE TABLE t_auth_up(i int, j int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true'); CREATE TABLE t_select(i int); GRANT ALL ON TABLE t_select TO ROLE public; @@ -24,4 +24,4 @@ SHOW GRANT ON TABLE t_auth_up; set user.name=userWIns; -update t_auth_up set i = 0 where i > 0; +update t_auth_up set j = 0 where i > 0; Modified: hive/branches/hbase-metastore/ql/src/test/queries/clientpositive/authorization_update_own_table.q URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/ql/src/test/queries/clientpositive/authorization_update_own_table.q?rev=1673437&r1=1673436&r2=1673437&view=diff ============================================================================== --- hive/branches/hbase-metastore/ql/src/test/queries/clientpositive/authorization_update_own_table.q (original) +++ hive/branches/hbase-metastore/ql/src/test/queries/clientpositive/authorization_update_own_table.q Tue Apr 14 14:47:30 2015 @@ -9,8 +9,8 @@ set hive.enforce.bucketing=true; set user.name=user1; -create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true'); -update auth_noupd set i = 0 where i > 0; +create table auth_noupd(i int, j int) clustered by (i) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true'); +update auth_noupd set j = 0 where i > 0; set user.name=hive_admin_user; set role admin;