Return-Path: X-Original-To: apmail-hive-commits-archive@www.apache.org Delivered-To: apmail-hive-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id CB03211F4A for ; Wed, 13 Aug 2014 02:29:43 +0000 (UTC) Received: (qmail 58465 invoked by uid 500); 13 Aug 2014 02:29:43 -0000 Delivered-To: apmail-hive-commits-archive@hive.apache.org Received: (qmail 58424 invoked by uid 500); 13 Aug 2014 02:29:43 -0000 Mailing-List: contact commits-help@hive.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hive.apache.org Delivered-To: mailing list commits@hive.apache.org Received: (qmail 58413 invoked by uid 99); 13 Aug 2014 02:29:43 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 13 Aug 2014 02:29:43 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 13 Aug 2014 02:29:08 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id A40732388A2C; Wed, 13 Aug 2014 02:29:05 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1617652 [4/7] - in /hive/branches/cbo: ./ ant/ ant/src/org/apache/hadoop/hive/ant/ beeline/ beeline/src/java/org/apache/hive/beeline/ beeline/src/main/resources/ cli/src/java/org/apache/hadoop/hive/cli/ common/src/java/org/apache/hadoop/hi... Date: Wed, 13 Aug 2014 02:28:58 -0000 To: commits@hive.apache.org From: gunther@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20140813022905.A40732388A2C@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java Wed Aug 13 02:28:54 2014 @@ -44,7 +44,6 @@ import org.apache.hadoop.hive.ql.lib.Nod import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.metadata.Partition; -import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.IndexUtils; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -64,11 +63,11 @@ import org.apache.hadoop.hive.ql.plan.Ta public class IndexWhereProcessor implements NodeProcessor { private static final Log LOG = LogFactory.getLog(IndexWhereProcessor.class.getName()); - private final Map> indexes; + private final Map> tsToIndices; - public IndexWhereProcessor(Map> indexes) { + public IndexWhereProcessor(Map> tsToIndices) { super(); - this.indexes = indexes; + this.tsToIndices = tsToIndices; } @Override @@ -81,9 +80,11 @@ public class IndexWhereProcessor impleme TableScanOperator operator = (TableScanOperator) nd; List opChildren = operator.getChildren(); TableScanDesc operatorDesc = operator.getConf(); - if (operatorDesc == null) { + if (operatorDesc == null || !tsToIndices.containsKey(operator)) { return null; } + List indexes = tsToIndices.get(operator); + ExprNodeDesc predicate = operatorDesc.getFilterExpr(); IndexWhereProcCtx context = (IndexWhereProcCtx) procCtx; @@ -96,7 +97,7 @@ public class IndexWhereProcessor impleme } LOG.info(predicate.getExprString()); - // check if we have indexes on all partitions in this table scan + // check if we have tsToIndices on all partitions in this table scan Set queryPartitions; try { queryPartitions = IndexUtils.checkPartitionsCoveredByIndex(operator, pctx, indexes); @@ -118,14 +119,9 @@ public class IndexWhereProcessor impleme Map queryContexts = new HashMap(); // make sure we have an index on the table being scanned TableDesc tblDesc = operator.getTableDesc(); - Table srcTable = pctx.getTopToTable().get(operator); - if (indexes == null || indexes.get(srcTable) == null) { - return null; - } - List tableIndexes = indexes.get(srcTable); Map> indexesByType = new HashMap>(); - for (Index indexOnTable : tableIndexes) { + for (Index indexOnTable : indexes) { if (indexesByType.get(indexOnTable.getIndexHandlerClass()) == null) { List newType = new ArrayList(); newType.add(indexOnTable); @@ -135,7 +131,7 @@ public class IndexWhereProcessor impleme } } - // choose index type with most indexes of the same type on the table + // choose index type with most tsToIndices of the same type on the table // TODO HIVE-2130 This would be a good place for some sort of cost based choice? List bestIndexes = indexesByType.values().iterator().next(); for (List indexTypes : indexesByType.values()) { @@ -179,7 +175,7 @@ public class IndexWhereProcessor impleme } /** - * Get a list of Tasks to activate use of indexes. + * Get a list of Tasks to activate use of tsToIndices. * Generate the tasks for the index query (where we store results of * querying the index in a tmp file) inside the IndexHandler * @param predicate Predicate of query to rewrite @@ -193,7 +189,7 @@ public class IndexWhereProcessor impleme HiveIndexQueryContext queryContext) throws SemanticException { HiveIndexHandler indexHandler; - // All indexes in the list are of the same type, and therefore can use the + // All tsToIndices in the list are of the same type, and therefore can use the // same handler to generate the index query tasks Index index = indexes.get(0); try { Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereTaskDispatcher.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereTaskDispatcher.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereTaskDispatcher.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereTaskDispatcher.java Wed Aug 13 02:28:54 2014 @@ -116,12 +116,11 @@ public class IndexWhereTaskDispatcher im // query the metastore to know what columns we have indexed Collection topTables = pctx.getTopToTable().values(); - Map> indexes = new HashMap>(); - for (Table tbl : topTables) - { - List tblIndexes = IndexUtils.getIndexes(tbl, supportedIndexes); + Map> indexes = new HashMap>(); + for (Map.Entry entry : pctx.getTopToTable().entrySet()) { + List tblIndexes = IndexUtils.getIndexes(entry.getValue(), supportedIndexes); if (tblIndexes.size() > 0) { - indexes.put(tbl, tblIndexes); + indexes.put(entry.getKey(), tblIndexes); } } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java Wed Aug 13 02:28:54 2014 @@ -133,7 +133,7 @@ public class PartitionPruner implements * condition. */ public static PrunedPartitionList prune(TableScanOperator ts, ParseContext parseCtx, - String alias) throws HiveException { + String alias) throws SemanticException { return prune(parseCtx.getTopToTable().get(ts), parseCtx.getOpToPartPruner().get(ts), parseCtx.getConf(), alias, parseCtx.getPrunedPartitions()); } @@ -158,7 +158,7 @@ public class PartitionPruner implements */ public static PrunedPartitionList prune(Table tab, ExprNodeDesc prunerExpr, HiveConf conf, String alias, Map prunedPartitionsMap) - throws HiveException { + throws SemanticException { LOG.trace("Started pruning partiton"); LOG.trace("dbname = " + tab.getDbName()); LOG.trace("tabname = " + tab.getTableName()); @@ -281,7 +281,7 @@ public class PartitionPruner implements } private static PrunedPartitionList getPartitionsFromServer(Table tab, - ExprNodeDesc prunerExpr, HiveConf conf, String alias) throws HiveException { + ExprNodeDesc prunerExpr, HiveConf conf, String alias) throws SemanticException { try { if (!tab.isPartitioned()) { // If the table is not partitioned, return everything. @@ -348,10 +348,10 @@ public class PartitionPruner implements return new PrunedPartitionList(tab, new LinkedHashSet(partitions), new ArrayList(referred), hasUnknownPartitions || !isPruningByExactFilter); - } catch (HiveException e) { + } catch (SemanticException e) { throw e; } catch (Exception e) { - throw new HiveException(e); + throw new SemanticException(e); } } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java Wed Aug 13 02:28:54 2014 @@ -18,11 +18,8 @@ package org.apache.hadoop.hive.ql.optimizer.stats.annotation; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Stack; - +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; @@ -69,8 +66,10 @@ import org.apache.hadoop.hive.ql.udf.gen import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr; import org.apache.hadoop.hive.serde.serdeConstants; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Stack; public class StatsRulesProcFactory { @@ -92,12 +91,8 @@ public class StatsRulesProcFactory { Object... nodeOutputs) throws SemanticException { TableScanOperator tsop = (TableScanOperator) nd; AnnotateStatsProcCtx aspCtx = (AnnotateStatsProcCtx) procCtx; - PrunedPartitionList partList = null; - try { - partList = aspCtx.getParseContext().getPrunedPartitions(tsop.getName(), tsop); - } catch (HiveException e1) { - throw new SemanticException(e1); - } + PrunedPartitionList partList = + aspCtx.getParseContext().getPrunedPartitions(tsop.getName(), tsop); Table table = aspCtx.getParseContext().getTopToTable().get(tsop); try { @@ -925,8 +920,7 @@ public class StatsRulesProcFactory { + " #Rows of parents: " + rowCountParents.toString() + ". Denominator: " + denom); } - stats.setNumRows(newRowCount); - stats.setDataSize(StatsUtils.getDataSizeFromColumnStats(newRowCount, outColStats)); + updateStatsForJoinType(stats, newRowCount, true, jop.getConf()); jop.setStatistics(stats); if (LOG.isDebugEnabled()) { @@ -972,6 +966,39 @@ public class StatsRulesProcFactory { return null; } + private void updateStatsForJoinType(Statistics stats, long newNumRows, + boolean useColStats, JoinDesc conf) { + long oldRowCount = stats.getNumRows(); + double ratio = (double) newNumRows / (double) oldRowCount; + stats.setNumRows(newNumRows); + + if (useColStats) { + List colStats = stats.getColumnStats(); + for (ColStatistics cs : colStats) { + long oldDV = cs.getCountDistint(); + long newDV = oldDV; + + // if ratio is greater than 1, then number of rows increases. This can happen + // when some operators like GROUPBY duplicates the input rows in which case + // number of distincts should not change. Update the distinct count only when + // the output number of rows is less than input number of rows. + if (ratio <= 1.0) { + newDV = (long) Math.ceil(ratio * oldDV); + } + // Assumes inner join + // TODO: HIVE-5579 will handle different join types + cs.setNumNulls(0); + cs.setCountDistint(newDV); + } + stats.setColumnStats(colStats); + long newDataSize = StatsUtils.getDataSizeFromColumnStats(newNumRows, colStats); + stats.setDataSize(newDataSize); + } else { + long newDataSize = (long) (ratio * stats.getDataSize()); + stats.setDataSize(newDataSize); + } + } + private long computeNewRowCount(List rowCountParents, long denom) { double factor = 0.0d; long result = 1; Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Wed Aug 13 02:28:54 2014 @@ -34,6 +34,7 @@ import java.util.Map.Entry; import org.antlr.runtime.tree.CommonTree; import org.antlr.runtime.tree.Tree; +import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; @@ -305,6 +306,28 @@ public abstract class BaseSemanticAnalyz return unescapeIdentifier(tableOrColumnNode.getText()); } + public static String[] getQualifiedTableName(ASTNode tabNameNode) throws SemanticException { + if (tabNameNode.getType() != HiveParser.TOK_TABNAME || + (tabNameNode.getChildCount() != 1 && tabNameNode.getChildCount() != 2)) { + throw new SemanticException(ErrorMsg.INVALID_TABLE_NAME.getMsg(tabNameNode)); + } + if (tabNameNode.getChildCount() == 2) { + String dbName = unescapeIdentifier(tabNameNode.getChild(0).getText()); + String tableName = unescapeIdentifier(tabNameNode.getChild(1).getText()); + return new String[] {dbName, tableName}; + } + String tableName = unescapeIdentifier(tabNameNode.getChild(0).getText()); + return new String[]{SessionState.get().getCurrentDatabase(), tableName}; + } + + public static String getDotName(String[] qname) throws SemanticException { + String genericName = StringUtils.join(qname, "."); + if (qname.length != 2) { + throw new SemanticException(ErrorMsg.INVALID_TABLE_NAME, genericName); + } + return genericName; + } + /** * Get the unqualified name from a table node. * @@ -817,9 +840,9 @@ public abstract class BaseSemanticAnalyz this.columnAccessInfo = columnAccessInfo; } - protected HashMap extractPartitionSpecs(Tree partspec) + protected LinkedHashMap extractPartitionSpecs(Tree partspec) throws SemanticException { - HashMap partSpec = new LinkedHashMap(); + LinkedHashMap partSpec = new LinkedHashMap(); for (int i = 0; i < partspec.getChildCount(); ++i) { CommonTree partspec_val = (CommonTree) partspec.getChild(i); String val = stripQuotes(partspec_val.getChild(1).getText()); @@ -1176,23 +1199,16 @@ public abstract class BaseSemanticAnalyz } } + protected Table getTable(String[] qualified) throws SemanticException { + return getTable(qualified[0], qualified[1], true); + } + protected Table getTable(String tblName) throws SemanticException { return getTable(null, tblName, true); } protected Table getTable(String tblName, boolean throwException) throws SemanticException { - String currentDb = SessionState.get().getCurrentDatabase(); - return getTable(currentDb, tblName, throwException); - } - - // qnName : possibly contains database name (dot separated) - protected Table getTableWithQN(String qnName, boolean throwException) throws SemanticException { - int dot = qnName.indexOf('.'); - if (dot < 0) { - String currentDb = SessionState.get().getCurrentDatabase(); - return getTable(currentDb, qnName, throwException); - } - return getTable(qnName.substring(0, dot), qnName.substring(dot + 1), throwException); + return getTable(null, tblName, throwException); } protected Table getTable(String database, String tblName, boolean throwException) Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessAnalyzer.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessAnalyzer.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessAnalyzer.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessAnalyzer.java Wed Aug 13 02:28:54 2014 @@ -23,7 +23,6 @@ import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.exec.TableScanOperator; -import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; public class ColumnAccessAnalyzer { @@ -49,13 +48,7 @@ public class ColumnAccessAnalyzer { columnAccessInfo.add(tableName, column); } if (table.isPartitioned()) { - PrunedPartitionList parts; - try { - parts = pGraphContext.getPrunedPartitions(table.getTableName(), op); - } catch (HiveException e) { - LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e)); - throw new SemanticException(e.getMessage(), e); - } + PrunedPartitionList parts = pGraphContext.getPrunedPartitions(table.getTableName(), op); if (parts.getReferredPartCols() != null) { for (String partKey : parts.getReferredPartCols()) { columnAccessInfo.add(tableName, partKey); Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessInfo.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessInfo.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessInfo.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessInfo.java Wed Aug 13 02:28:54 2014 @@ -18,8 +18,11 @@ package org.apache.hadoop.hive.ql.parse; +import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; @@ -42,7 +45,13 @@ public class ColumnAccessInfo { tableColumns.add(col); } - public Map> getTableToColumnAccessMap() { - return tableToColumnAccessMap; + public Map> getTableToColumnAccessMap() { + Map> mapping = new HashMap>(); + for (Map.Entry> entry : tableToColumnAccessMap.entrySet()) { + List sortedCols = new ArrayList(entry.getValue()); + Collections.sort(sortedCols); + mapping.put(entry.getKey(), sortedCols); + } + return mapping; } } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Wed Aug 13 02:28:54 2014 @@ -217,7 +217,7 @@ public class DDLSemanticAnalyzer extends } public TablePartition(ASTNode tblPart) throws SemanticException { - tableName = unescapeIdentifier(tblPart.getChild(0).getText()); + tableName = getDotName((getQualifiedTableName((ASTNode) tblPart.getChild(0)))); if (tblPart.getChildCount() > 1) { ASTNode part = (ASTNode) tblPart.getChild(1); if (part.getToken().getType() == HiveParser.TOK_PARTSPEC) { @@ -1015,7 +1015,7 @@ public class DDLSemanticAnalyzer extends private void analyzeCreateIndex(ASTNode ast) throws SemanticException { String indexName = unescapeIdentifier(ast.getChild(0).getText()); String typeName = unescapeSQLString(ast.getChild(1).getText()); - String tableName = getUnescapedName((ASTNode) ast.getChild(2)); + String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(2)); List indexedCols = getColumnNames((ASTNode) ast.getChild(3)); IndexType indexType = HiveIndex.getIndexType(typeName); @@ -1081,8 +1081,14 @@ public class DDLSemanticAnalyzer extends storageFormat.fillDefaultStorageFormat(); + if (indexTableName == null) { + indexTableName = MetaStoreUtils.getIndexTableName(qualified[0], qualified[1], indexName); + indexTableName = qualified[0] + "." + indexTableName; // on same database with base table + } else { + indexTableName = getDotName(Utilities.getDbTableName(indexTableName)); + } - CreateIndexDesc crtIndexDesc = new CreateIndexDesc(tableName, indexName, + CreateIndexDesc crtIndexDesc = new CreateIndexDesc(getDotName(qualified), indexName, indexedCols, indexTableName, deferredRebuild, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), storageFormat.getStorageHandler(), typeName, location, idxProps, tblProps, @@ -1116,21 +1122,20 @@ public class DDLSemanticAnalyzer extends } private void analyzeAlterIndexRebuild(ASTNode ast) throws SemanticException { - String baseTableName = unescapeIdentifier(ast.getChild(0).getText()); + String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); String indexName = unescapeIdentifier(ast.getChild(1).getText()); HashMap partSpec = null; Tree part = ast.getChild(2); if (part != null) { partSpec = extractPartitionSpecs(part); } - List> indexBuilder = getIndexBuilderMapRed(baseTableName, indexName, partSpec); + List> indexBuilder = getIndexBuilderMapRed(qualified, indexName, partSpec); rootTasks.addAll(indexBuilder); // Handle updating index timestamps AlterIndexDesc alterIdxDesc = new AlterIndexDesc(AlterIndexTypes.UPDATETIMESTAMP); alterIdxDesc.setIndexName(indexName); - alterIdxDesc.setBaseTableName(baseTableName); - alterIdxDesc.setDbName(SessionState.get().getCurrentDatabase()); + alterIdxDesc.setBaseTableName(getDotName(qualified)); alterIdxDesc.setSpec(partSpec); Task tsTask = TaskFactory.get(new DDLWork(alterIdxDesc), conf); @@ -1142,27 +1147,28 @@ public class DDLSemanticAnalyzer extends private void analyzeAlterIndexProps(ASTNode ast) throws SemanticException { - String baseTableName = getUnescapedName((ASTNode) ast.getChild(0)); + String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); String indexName = unescapeIdentifier(ast.getChild(1).getText()); HashMap mapProp = getProps((ASTNode) (ast.getChild(2)) .getChild(0)); - AlterIndexDesc alterIdxDesc = - new AlterIndexDesc(AlterIndexTypes.ADDPROPS); + AlterIndexDesc alterIdxDesc = new AlterIndexDesc(AlterIndexTypes.ADDPROPS); alterIdxDesc.setProps(mapProp); alterIdxDesc.setIndexName(indexName); - alterIdxDesc.setBaseTableName(baseTableName); - alterIdxDesc.setDbName(SessionState.get().getCurrentDatabase()); + alterIdxDesc.setBaseTableName(getDotName(qualified)); rootTasks.add(TaskFactory.get(new DDLWork(alterIdxDesc), conf)); } - private List> getIndexBuilderMapRed(String baseTableName, String indexName, + private List> getIndexBuilderMapRed(String[] names, String indexName, HashMap partSpec) throws SemanticException { try { - String dbName = SessionState.get().getCurrentDatabase(); - Index index = db.getIndex(dbName, baseTableName, indexName); - Table indexTbl = getTable(index.getIndexTableName()); + Index index = db.getIndex(names[0], names[1], indexName); + Table indexTbl = null; + String indexTableName = index.getIndexTableName(); + if (indexTableName != null) { + indexTbl = getTable(Utilities.getDbTableName(index.getDbName(), indexTableName)); + } String baseTblName = index.getOrigTableName(); Table baseTbl = getTable(baseTblName); @@ -1474,7 +1480,7 @@ public class DDLSemanticAnalyzer extends boolean checkIndex = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CONCATENATE_CHECK_INDEX); if (checkIndex) { - List indexes = db.getIndexes(tblObj.getDbName(), tableName, + List indexes = db.getIndexes(tblObj.getDbName(), tblObj.getTableName(), Short.MAX_VALUE); if (indexes != null && indexes.size() > 0) { throw new SemanticException("can not do merge because source table " @@ -1633,7 +1639,7 @@ public class DDLSemanticAnalyzer extends LinkedHashMap newPartSpec = null; if (partSpec != null) newPartSpec = new LinkedHashMap(partSpec); - AlterTableSimpleDesc desc = new AlterTableSimpleDesc(SessionState.get().getCurrentDatabase(), + AlterTableSimpleDesc desc = new AlterTableSimpleDesc( tableName, newPartSpec, type); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc), conf)); @@ -2098,25 +2104,17 @@ public class DDLSemanticAnalyzer extends } private void analyzeShowColumns(ASTNode ast) throws SemanticException { - ShowColumnsDesc showColumnsDesc; - String dbName = null; - String tableName = null; - switch (ast.getChildCount()) { - case 1: - tableName = getUnescapedName((ASTNode) ast.getChild(0)); - break; - case 2: - dbName = getUnescapedName((ASTNode) ast.getChild(0)); - tableName = getUnescapedName((ASTNode) ast.getChild(1)); - break; - default: - break; + String tableName = getUnescapedName((ASTNode) ast.getChild(0)); + if (ast.getChildCount() > 1) { + if (tableName.contains(".")) { + throw new SemanticException("Duplicates declaration for database name"); + } + tableName = getUnescapedName((ASTNode) ast.getChild(1)) + "." + tableName; } - - Table tab = getTable(dbName, tableName, true); + Table tab = getTable(tableName); inputs.add(new ReadEntity(tab)); - showColumnsDesc = new ShowColumnsDesc(ctx.getResFile(), dbName, tableName); + ShowColumnsDesc showColumnsDesc = new ShowColumnsDesc(ctx.getResFile(), tableName); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showColumnsDesc), conf)); setFetchTask(createFetchTask(showColumnsDesc.getSchema())); @@ -2157,13 +2155,13 @@ public class DDLSemanticAnalyzer extends private void analyzeShowTableProperties(ASTNode ast) throws SemanticException { ShowTblPropertiesDesc showTblPropertiesDesc; - String tableNames = getUnescapedName((ASTNode) ast.getChild(0)); - String dbName = SessionState.get().getCurrentDatabase(); + String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); String propertyName = null; if (ast.getChildCount() > 1) { propertyName = unescapeSQLString(ast.getChild(1).getText()); } + String tableNames = getDotName(qualified); validateTable(tableNames, null); showTblPropertiesDesc = new ShowTblPropertiesDesc(ctx.getResFile().toString(), tableNames, @@ -2437,17 +2435,20 @@ public class DDLSemanticAnalyzer extends private void analyzeAlterTableRename(ASTNode ast, boolean expectView) throws SemanticException { - String tblName = getUnescapedName((ASTNode) ast.getChild(0)); - AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, - getUnescapedName((ASTNode) ast.getChild(1)), expectView); + String[] source = getQualifiedTableName((ASTNode) ast.getChild(0)); + String[] target = getQualifiedTableName((ASTNode) ast.getChild(1)); - addInputsOutputsAlterTable(tblName, null, alterTblDesc); + String sourceName = getDotName(source); + String targetName = getDotName(target); + + AlterTableDesc alterTblDesc = new AlterTableDesc(sourceName, targetName, expectView); + addInputsOutputsAlterTable(sourceName, null, alterTblDesc); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf)); } private void analyzeAlterTableRenameCol(ASTNode ast) throws SemanticException { - String tblName = getUnescapedName((ASTNode) ast.getChild(0)); + String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); String newComment = null; String newType = null; newType = getTypeStringFromAST((ASTNode) ast.getChild(3)); @@ -2477,7 +2478,7 @@ public class DDLSemanticAnalyzer extends String newColName = ast.getChild(2).getText(); /* Validate the operation of renaming a column name. */ - Table tab = getTable(tblName); + Table tab = getTable(qualified); SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo(); if ((null != skewInfo) @@ -2487,6 +2488,7 @@ public class DDLSemanticAnalyzer extends + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg()); } + String tblName = getDotName(qualified); AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, unescapeIdentifier(oldColName), unescapeIdentifier(newColName), newType, newComment, first, flagCol); @@ -2511,9 +2513,8 @@ public class DDLSemanticAnalyzer extends List> partSpecs = new ArrayList>(); partSpecs.add(oldPartSpec); partSpecs.add(newPartSpec); - addTablePartsOutputs(tblName, partSpecs, WriteEntity.WriteType.DDL_EXCLUSIVE); - RenamePartitionDesc renamePartitionDesc = new RenamePartitionDesc( - SessionState.get().getCurrentDatabase(), tblName, oldPartSpec, newPartSpec); + addTablePartsOutputs(tab, partSpecs, WriteEntity.WriteType.DDL_EXCLUSIVE); + RenamePartitionDesc renamePartitionDesc = new RenamePartitionDesc(tblName, oldPartSpec, newPartSpec); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), renamePartitionDesc), conf)); } @@ -2536,7 +2537,9 @@ public class DDLSemanticAnalyzer extends private void analyzeAlterTableModifyCols(ASTNode ast, AlterTableTypes alterType) throws SemanticException { - String tblName = getUnescapedName((ASTNode) ast.getChild(0)); + String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); + + String tblName = getDotName(qualified); List newCols = getColumns((ASTNode) ast.getChild(1)); AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, newCols, alterType); @@ -2559,8 +2562,8 @@ public class DDLSemanticAnalyzer extends // popular case but that's kinda hacky. Let's not do it for now. boolean canGroupExprs = ifExists; - String tblName = getUnescapedName((ASTNode) ast.getChild(0)); - Table tab = getTable(tblName, true); + String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); + Table tab = getTable(qualified); Map> partSpecs = getFullPartitionSpecs(ast, tab, canGroupExprs); if (partSpecs.isEmpty()) return; // nothing to do @@ -2574,24 +2577,19 @@ public class DDLSemanticAnalyzer extends addTableDropPartsOutputs(tab, partSpecs.values(), !ifExists, ignoreProtection); DropTableDesc dropTblDesc = - new DropTableDesc(tblName, partSpecs, expectView, ignoreProtection); + new DropTableDesc(getDotName(qualified), partSpecs, expectView, ignoreProtection); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropTblDesc), conf)); } private void analyzeAlterTablePartColType(ASTNode ast) throws SemanticException { // get table name - String tblName = getUnescapedName((ASTNode)ast.getChild(0)); + String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); - Table tab = null; // check if table exists. - try { - tab = getTable(tblName, true); - inputs.add(new ReadEntity(tab)); - } catch (HiveException e) { - throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName)); - } + Table tab = getTable(qualified); + inputs.add(new ReadEntity(tab)); // validate the DDL is a valid operation on the table. validateAlterTableType(tab, AlterTableTypes.ALTERPARTITION, false); @@ -2625,7 +2623,7 @@ public class DDLSemanticAnalyzer extends } AlterTableAlterPartDesc alterTblAlterPartDesc = - new AlterTableAlterPartDesc(SessionState.get().getCurrentDatabase(), tblName, newCol); + new AlterTableAlterPartDesc(getDotName(qualified), newCol); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblAlterPartDesc), conf)); @@ -2648,10 +2646,10 @@ public class DDLSemanticAnalyzer extends throws SemanticException { // ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists? alterStatementSuffixAddPartitionsElement+) - String tblName = getUnescapedName((ASTNode)ast.getChild(0)); + String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); boolean ifNotExists = ast.getChild(1).getType() == HiveParser.TOK_IFNOTEXISTS; - Table tab = getTable(tblName, true); + Table tab = getTable(qualified); boolean isView = tab.isView(); validateAlterTableType(tab, AlterTableTypes.ADDPARTITION, expectView); outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_SHARED)); @@ -2662,7 +2660,8 @@ public class DDLSemanticAnalyzer extends String currentLocation = null; Map currentPart = null; // Parser has done some verification, so the order of tokens doesn't need to be verified here. - AddPartitionDesc addPartitionDesc = new AddPartitionDesc(tab.getDbName(), tblName, ifNotExists); + AddPartitionDesc addPartitionDesc = + new AddPartitionDesc(tab.getDbName(), tab.getTableName(), ifNotExists); for (int num = start; num < numCh; num++) { ASTNode child = (ASTNode) ast.getChild(num); switch (child.getToken().getType()) { @@ -2683,7 +2682,7 @@ public class DDLSemanticAnalyzer extends currentLocation = unescapeSQLString(child.getChild(0).getText()); boolean isLocal = false; try { - // do best effor to determine if this is a local file + // do best effort to determine if this is a local file String scheme = new URI(currentLocation).getScheme(); if (scheme != null) { isLocal = FileUtils.isLocalFile(conf, currentLocation); @@ -2714,7 +2713,7 @@ public class DDLSemanticAnalyzer extends // Compile internal query to capture underlying table partition dependencies StringBuilder cmd = new StringBuilder(); cmd.append("SELECT * FROM "); - cmd.append(HiveUtils.unparseIdentifier(tblName)); + cmd.append(HiveUtils.unparseIdentifier(getDotName(qualified))); cmd.append(" WHERE "); boolean firstOr = true; for (int i = 0; i < addPartitionDesc.getPartitionCount(); ++i) { @@ -2775,9 +2774,9 @@ public class DDLSemanticAnalyzer extends */ private void analyzeAlterTableTouch(CommonTree ast) throws SemanticException { + String[] qualified = getQualifiedTableName((ASTNode)ast.getChild(0)); - String tblName = getUnescapedName((ASTNode)ast.getChild(0)); - Table tab = getTable(tblName, true); + Table tab = getTable(qualified); validateAlterTableType(tab, AlterTableTypes.TOUCH); inputs.add(new ReadEntity(tab)); @@ -2786,16 +2785,16 @@ public class DDLSemanticAnalyzer extends if (partSpecs.size() == 0) { AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc( - SessionState.get().getCurrentDatabase(), tblName, null, + getDotName(qualified), null, AlterTableDesc.AlterTableTypes.TOUCH); outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_NO_LOCK)); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), touchDesc), conf)); } else { - addTablePartsOutputs(tblName, partSpecs, WriteEntity.WriteType.DDL_NO_LOCK); + addTablePartsOutputs(tab, partSpecs, WriteEntity.WriteType.DDL_NO_LOCK); for (Map partSpec : partSpecs) { AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc( - SessionState.get().getCurrentDatabase(), tblName, partSpec, + getDotName(qualified), partSpec, AlterTableDesc.AlterTableTypes.TOUCH); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), touchDesc), conf)); @@ -2810,12 +2809,12 @@ public class DDLSemanticAnalyzer extends throw new SemanticException(ErrorMsg.ARCHIVE_METHODS_DISABLED.getMsg()); } - String tblName = getUnescapedName((ASTNode) ast.getChild(0)); + String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); // partition name to value List> partSpecs = getPartitionSpecs(ast); - Table tab = getTable(tblName, true); - addTablePartsOutputs(tblName, partSpecs, true, WriteEntity.WriteType.DDL_NO_LOCK); + Table tab = getTable(qualified); + addTablePartsOutputs(tab, partSpecs, true, WriteEntity.WriteType.DDL_NO_LOCK); validateAlterTableType(tab, AlterTableTypes.ARCHIVE); inputs.add(new ReadEntity(tab)); @@ -2835,7 +2834,7 @@ public class DDLSemanticAnalyzer extends throw new SemanticException(e.getMessage(), e); } AlterTableSimpleDesc archiveDesc = new AlterTableSimpleDesc( - SessionState.get().getCurrentDatabase(), tblName, partSpec, + getDotName(qualified), partSpec, (isUnArchive ? AlterTableTypes.UNARCHIVE : AlterTableTypes.ARCHIVE)); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), archiveDesc), conf)); @@ -3003,20 +3002,20 @@ public class DDLSemanticAnalyzer extends * Add the table partitions to be modified in the output, so that it is available for the * pre-execution hook. If the partition does not exist, no error is thrown. */ - private void addTablePartsOutputs(String tblName, List> partSpecs, + private void addTablePartsOutputs(Table table, List> partSpecs, WriteEntity.WriteType writeType) throws SemanticException { - addTablePartsOutputs(tblName, partSpecs, false, false, null, writeType); + addTablePartsOutputs(table, partSpecs, false, false, null, writeType); } /** * Add the table partitions to be modified in the output, so that it is available for the * pre-execution hook. If the partition does not exist, no error is thrown. */ - private void addTablePartsOutputs(String tblName, List> partSpecs, + private void addTablePartsOutputs(Table table, List> partSpecs, boolean allowMany, WriteEntity.WriteType writeType) throws SemanticException { - addTablePartsOutputs(tblName, partSpecs, false, allowMany, null, writeType); + addTablePartsOutputs(table, partSpecs, false, allowMany, null, writeType); } /** @@ -3024,10 +3023,9 @@ public class DDLSemanticAnalyzer extends * pre-execution hook. If the partition does not exist, throw an error if * throwIfNonExistent is true, otherwise ignore it. */ - private void addTablePartsOutputs(String tblName, List> partSpecs, + private void addTablePartsOutputs(Table table, List> partSpecs, boolean throwIfNonExistent, boolean allowMany, ASTNode ast, WriteEntity.WriteType writeType) throws SemanticException { - Table tab = getTable(tblName); Iterator> i; int index; @@ -3036,7 +3034,7 @@ public class DDLSemanticAnalyzer extends List parts = null; if (allowMany) { try { - parts = db.getPartitions(tab, partSpec); + parts = db.getPartitions(table, partSpec); } catch (HiveException e) { LOG.error("Got HiveException during obtaining list of partitions" + StringUtils.stringifyException(e)); @@ -3045,7 +3043,7 @@ public class DDLSemanticAnalyzer extends } else { parts = new ArrayList(); try { - Partition p = db.getPartition(tab, partSpec, false); + Partition p = db.getPartition(table, partSpec, false); if (p != null) { parts.add(p); } @@ -3125,14 +3123,15 @@ public class DDLSemanticAnalyzer extends */ HiveConf hiveConf = SessionState.get().getConf(); - String tableName = getUnescapedName((ASTNode) ast.getChild(0)); - Table tab = getTable(tableName, true); + String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); + Table tab = getTable(qualified); inputs.add(new ReadEntity(tab)); outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_EXCLUSIVE)); validateAlterTableType(tab, AlterTableTypes.ADDSKEWEDBY); + String tableName = getDotName(qualified); if (ast.getChildCount() == 1) { /* Convert a skewed table to non-skewed table. */ AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, true, Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Wed Aug 13 02:28:54 2014 @@ -953,8 +953,8 @@ alterTableStatementSuffix alterStatementPartitionKeyType @init {msgs.push("alter partition key type"); } @after {msgs.pop();} - : identifier KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN - -> ^(TOK_ALTERTABLE_PARTCOLTYPE identifier columnNameType) + : tableName KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN + -> ^(TOK_ALTERTABLE_PARTCOLTYPE tableName columnNameType) ; alterViewStatementSuffix @@ -974,16 +974,14 @@ alterViewStatementSuffix alterIndexStatementSuffix @init { pushMsg("alter index statement", state); } @after { popMsg(state); } - : indexName=identifier - (KW_ON tableNameId=identifier) - partitionSpec? + : indexName=identifier KW_ON tableName partitionSpec? ( KW_REBUILD - ->^(TOK_ALTERINDEX_REBUILD $tableNameId $indexName partitionSpec?) + ->^(TOK_ALTERINDEX_REBUILD tableName $indexName partitionSpec?) | KW_SET KW_IDXPROPERTIES indexProperties - ->^(TOK_ALTERINDEX_PROPERTIES $tableNameId $indexName indexProperties) + ->^(TOK_ALTERINDEX_PROPERTIES tableName $indexName indexProperties) ) ; @@ -1011,23 +1009,23 @@ alterDatabaseSuffixSetOwner alterStatementSuffixRename @init { pushMsg("rename statement", state); } @after { popMsg(state); } - : oldName=identifier KW_RENAME KW_TO newName=identifier + : oldName=tableName KW_RENAME KW_TO newName=tableName -> ^(TOK_ALTERTABLE_RENAME $oldName $newName) ; alterStatementSuffixAddCol @init { pushMsg("add column statement", state); } @after { popMsg(state); } - : identifier (add=KW_ADD | replace=KW_REPLACE) KW_COLUMNS LPAREN columnNameTypeList RPAREN - -> {$add != null}? ^(TOK_ALTERTABLE_ADDCOLS identifier columnNameTypeList) - -> ^(TOK_ALTERTABLE_REPLACECOLS identifier columnNameTypeList) + : tableName (add=KW_ADD | replace=KW_REPLACE) KW_COLUMNS LPAREN columnNameTypeList RPAREN + -> {$add != null}? ^(TOK_ALTERTABLE_ADDCOLS tableName columnNameTypeList) + -> ^(TOK_ALTERTABLE_REPLACECOLS tableName columnNameTypeList) ; alterStatementSuffixRenameCol @init { pushMsg("rename column name", state); } @after { popMsg(state); } - : identifier KW_CHANGE KW_COLUMN? oldName=identifier newName=identifier colType (KW_COMMENT comment=StringLiteral)? alterStatementChangeColPosition? - ->^(TOK_ALTERTABLE_RENAMECOL identifier $oldName $newName colType $comment? alterStatementChangeColPosition?) + : tableName KW_CHANGE KW_COLUMN? oldName=identifier newName=identifier colType (KW_COMMENT comment=StringLiteral)? alterStatementChangeColPosition? + ->^(TOK_ALTERTABLE_RENAMECOL tableName $oldName $newName colType $comment? alterStatementChangeColPosition?) ; alterStatementChangeColPosition @@ -1039,8 +1037,8 @@ alterStatementChangeColPosition alterStatementSuffixAddPartitions @init { pushMsg("add partition statement", state); } @after { popMsg(state); } - : identifier KW_ADD ifNotExists? alterStatementSuffixAddPartitionsElement+ - -> ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists? alterStatementSuffixAddPartitionsElement+) + : tableName KW_ADD ifNotExists? alterStatementSuffixAddPartitionsElement+ + -> ^(TOK_ALTERTABLE_ADDPARTS tableName ifNotExists? alterStatementSuffixAddPartitionsElement+) ; alterStatementSuffixAddPartitionsElement @@ -1050,22 +1048,22 @@ alterStatementSuffixAddPartitionsElement alterStatementSuffixTouch @init { pushMsg("touch statement", state); } @after { popMsg(state); } - : identifier KW_TOUCH (partitionSpec)* - -> ^(TOK_ALTERTABLE_TOUCH identifier (partitionSpec)*) + : tableName KW_TOUCH (partitionSpec)* + -> ^(TOK_ALTERTABLE_TOUCH tableName (partitionSpec)*) ; alterStatementSuffixArchive @init { pushMsg("archive statement", state); } @after { popMsg(state); } - : identifier KW_ARCHIVE (partitionSpec)* - -> ^(TOK_ALTERTABLE_ARCHIVE identifier (partitionSpec)*) + : tableName KW_ARCHIVE (partitionSpec)* + -> ^(TOK_ALTERTABLE_ARCHIVE tableName (partitionSpec)*) ; alterStatementSuffixUnArchive @init { pushMsg("unarchive statement", state); } @after { popMsg(state); } - : identifier KW_UNARCHIVE (partitionSpec)* - -> ^(TOK_ALTERTABLE_UNARCHIVE identifier (partitionSpec)*) + : tableName KW_UNARCHIVE (partitionSpec)* + -> ^(TOK_ALTERTABLE_UNARCHIVE tableName (partitionSpec)*) ; partitionLocation @@ -1078,26 +1076,26 @@ partitionLocation alterStatementSuffixDropPartitions @init { pushMsg("drop partition statement", state); } @after { popMsg(state); } - : identifier KW_DROP ifExists? dropPartitionSpec (COMMA dropPartitionSpec)* ignoreProtection? - -> ^(TOK_ALTERTABLE_DROPPARTS identifier dropPartitionSpec+ ifExists? ignoreProtection?) + : tableName KW_DROP ifExists? dropPartitionSpec (COMMA dropPartitionSpec)* ignoreProtection? + -> ^(TOK_ALTERTABLE_DROPPARTS tableName dropPartitionSpec+ ifExists? ignoreProtection?) ; alterStatementSuffixProperties @init { pushMsg("alter properties statement", state); } @after { popMsg(state); } - : name=identifier KW_SET KW_TBLPROPERTIES tableProperties - -> ^(TOK_ALTERTABLE_PROPERTIES $name tableProperties) - | name=identifier KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties - -> ^(TOK_DROPTABLE_PROPERTIES $name tableProperties ifExists?) + : tableName KW_SET KW_TBLPROPERTIES tableProperties + -> ^(TOK_ALTERTABLE_PROPERTIES tableName tableProperties) + | tableName KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties + -> ^(TOK_DROPTABLE_PROPERTIES tableName tableProperties ifExists?) ; alterViewSuffixProperties @init { pushMsg("alter view properties statement", state); } @after { popMsg(state); } - : name=identifier KW_SET KW_TBLPROPERTIES tableProperties - -> ^(TOK_ALTERVIEW_PROPERTIES $name tableProperties) - | name=identifier KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties - -> ^(TOK_DROPVIEW_PROPERTIES $name tableProperties ifExists?) + : tableName KW_SET KW_TBLPROPERTIES tableProperties + -> ^(TOK_ALTERVIEW_PROPERTIES tableName tableProperties) + | tableName KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties + -> ^(TOK_DROPVIEW_PROPERTIES tableName tableProperties ifExists?) ; alterStatementSuffixSerdeProperties @@ -1112,8 +1110,8 @@ alterStatementSuffixSerdeProperties tablePartitionPrefix @init {pushMsg("table partition prefix", state);} @after {popMsg(state);} - :name=identifier partitionSpec? - ->^(TOK_TABLE_PARTITION $name partitionSpec?) + : tableName partitionSpec? + ->^(TOK_TABLE_PARTITION tableName partitionSpec?) ; alterTblPartitionStatement @@ -1192,21 +1190,21 @@ alterStatementSuffixLocation alterStatementSuffixSkewedby @init {pushMsg("alter skewed by statement", state);} @after{popMsg(state);} - :name=identifier tableSkewed - ->^(TOK_ALTERTABLE_SKEWED $name tableSkewed) + : tableName tableSkewed + ->^(TOK_ALTERTABLE_SKEWED tableName tableSkewed) | - name=identifier KW_NOT KW_SKEWED - ->^(TOK_ALTERTABLE_SKEWED $name) + tableName KW_NOT KW_SKEWED + ->^(TOK_ALTERTABLE_SKEWED tableName) | - name=identifier KW_NOT storedAsDirs - ->^(TOK_ALTERTABLE_SKEWED $name storedAsDirs) + tableName KW_NOT storedAsDirs + ->^(TOK_ALTERTABLE_SKEWED tableName storedAsDirs) ; alterStatementSuffixExchangePartition @init {pushMsg("alter exchange partition", state);} @after{popMsg(state);} - : name=tableName KW_EXCHANGE partitionSpec KW_WITH KW_TABLE exchangename=tableName - -> ^(TOK_EXCHANGEPARTITION $name partitionSpec $exchangename) + : tableName KW_EXCHANGE partitionSpec KW_WITH KW_TABLE exchangename=tableName + -> ^(TOK_EXCHANGEPARTITION tableName partitionSpec $exchangename) ; alterStatementSuffixProtectMode @@ -1315,14 +1313,14 @@ showStatement @after { popMsg(state); } : KW_SHOW (KW_DATABASES|KW_SCHEMAS) (KW_LIKE showStmtIdentifier)? -> ^(TOK_SHOWDATABASES showStmtIdentifier?) | KW_SHOW KW_TABLES ((KW_FROM|KW_IN) db_name=identifier)? (KW_LIKE showStmtIdentifier|showStmtIdentifier)? -> ^(TOK_SHOWTABLES (TOK_FROM $db_name)? showStmtIdentifier?) - | KW_SHOW KW_COLUMNS (KW_FROM|KW_IN) tabname=tableName ((KW_FROM|KW_IN) db_name=identifier)? - -> ^(TOK_SHOWCOLUMNS $db_name? $tabname) + | KW_SHOW KW_COLUMNS (KW_FROM|KW_IN) tableName ((KW_FROM|KW_IN) db_name=identifier)? + -> ^(TOK_SHOWCOLUMNS tableName $db_name?) | KW_SHOW KW_FUNCTIONS showFunctionIdentifier? -> ^(TOK_SHOWFUNCTIONS showFunctionIdentifier?) | KW_SHOW KW_PARTITIONS tabName=tableName partitionSpec? -> ^(TOK_SHOWPARTITIONS $tabName partitionSpec?) | KW_SHOW KW_CREATE KW_TABLE tabName=tableName -> ^(TOK_SHOW_CREATETABLE $tabName) | KW_SHOW KW_TABLE KW_EXTENDED ((KW_FROM|KW_IN) db_name=identifier)? KW_LIKE showStmtIdentifier partitionSpec? -> ^(TOK_SHOW_TABLESTATUS showStmtIdentifier $db_name? partitionSpec?) - | KW_SHOW KW_TBLPROPERTIES tblName=identifier (LPAREN prptyName=StringLiteral RPAREN)? -> ^(TOK_SHOW_TBLPROPERTIES $tblName $prptyName?) + | KW_SHOW KW_TBLPROPERTIES tableName (LPAREN prptyName=StringLiteral RPAREN)? -> ^(TOK_SHOW_TBLPROPERTIES tableName $prptyName?) | KW_SHOW KW_LOCKS (parttype=partTypeExpr)? (isExtended=KW_EXTENDED)? -> ^(TOK_SHOWLOCKS $parttype? $isExtended?) | KW_SHOW KW_LOCKS (KW_DATABASE|KW_SCHEMA) (dbName=Identifier) (isExtended=KW_EXTENDED)? -> ^(TOK_SHOWDBLOCKS $dbName $isExtended?) | KW_SHOW (showOptions=KW_FORMATTED)? (KW_INDEX|KW_INDEXES) KW_ON showStmtIdentifier ((KW_FROM|KW_IN) db_name=identifier)? @@ -1455,26 +1453,25 @@ privilegeIncludeColObject @init {pushMsg("privilege object including columns", state);} @after {popMsg(state);} : KW_ALL -> ^(TOK_RESOURCE_ALL) - | privObjectType identifier (LPAREN cols=columnNameList RPAREN)? partitionSpec? - -> ^(TOK_PRIV_OBJECT_COL identifier privObjectType $cols? partitionSpec?) + | privObjectCols -> ^(TOK_PRIV_OBJECT_COL privObjectCols) ; privilegeObject -@init {pushMsg("privilege subject", state);} +@init {pushMsg("privilege object", state);} @after {popMsg(state);} - : KW_ON privObjectType identifier partitionSpec? - -> ^(TOK_PRIV_OBJECT identifier privObjectType partitionSpec?) + : KW_ON privObject -> ^(TOK_PRIV_OBJECT privObject) ; - // database or table type. Type is optional, default type is table -privObjectType -@init {pushMsg("privilege object type type", state);} -@after {popMsg(state);} - : (KW_DATABASE|KW_SCHEMA) -> ^(TOK_DB_TYPE) - | KW_TABLE? -> ^(TOK_TABLE_TYPE) +privObject + : (KW_DATABASE|KW_SCHEMA) identifier -> ^(TOK_DB_TYPE identifier) + | KW_TABLE? tableName partitionSpec? -> ^(TOK_TABLE_TYPE tableName partitionSpec?) ; +privObjectCols + : (KW_DATABASE|KW_SCHEMA) identifier -> ^(TOK_DB_TYPE identifier) + | KW_TABLE? tableName (LPAREN cols=columnNameList RPAREN)? partitionSpec? -> ^(TOK_TABLE_TYPE tableName $cols? partitionSpec?) + ; privilegeList @init {pushMsg("grant privilege list", state);} @@ -1551,8 +1548,8 @@ withAdminOption metastoreCheck @init { pushMsg("metastore check statement", state); } @after { popMsg(state); } - : KW_MSCK (repair=KW_REPAIR)? (KW_TABLE table=identifier partitionSpec? (COMMA partitionSpec)*)? - -> ^(TOK_MSCK $repair? ($table partitionSpec*)?) + : KW_MSCK (repair=KW_REPAIR)? (KW_TABLE tableName partitionSpec? (COMMA partitionSpec)*)? + -> ^(TOK_MSCK $repair? (tableName partitionSpec*)?) ; resourceList Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java Wed Aug 13 02:28:54 2014 @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.conf.HiveC import org.apache.hadoop.hive.metastore.api.Index; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -134,10 +135,10 @@ public class IndexUpdater { inputs.addAll(driver.getPlan().getInputs()); } - private boolean containsPartition(Index index, Map partSpec) - throws HiveException { - Table indexTable = hive.getTable(index.getIndexTableName()); + throws HiveException { + String[] qualified = Utilities.getDbTableName(index.getDbName(), index.getIndexTableName()); + Table indexTable = hive.getTable(qualified[0], qualified[1]); List parts = hive.getPartitions(indexTable, partSpec); return (parts == null || parts.size() == 0); } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java Wed Aug 13 02:28:54 2014 @@ -613,7 +613,7 @@ public class ParseContext { } public PrunedPartitionList getPrunedPartitions(String alias, TableScanOperator ts) - throws HiveException { + throws SemanticException { PrunedPartitionList partsList = opToPartList.get(ts); if (partsList == null) { partsList = PartitionPruner.prune(ts, this, alias); Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Aug 13 02:28:54 2014 @@ -590,7 +590,7 @@ public class SemanticAnalyzer extends Ba if(containsLeadLagUDF(expressionTree)) { throw new SemanticException(ErrorMsg.MISSING_OVER_CLAUSE.getMsg(functionName)); } - aggregations.put(expressionTree.toStringTree(), expressionTree); + aggregations.put(expressionTree.toStringTree().toLowerCase(), expressionTree); FunctionInfo fi = FunctionRegistry.getFunctionInfo(functionName); if (!fi.isNative()) { unparseTranslator.addIdentifierTranslation((ASTNode) expressionTree @@ -2695,6 +2695,7 @@ public class SemanticAnalyzer extends Ba throw new SemanticException(generateErrorMessage(rowChild, ErrorMsg.LINES_TERMINATED_BY_NON_NEWLINE.getMsg())); } + break; case HiveParser.TOK_TABLEROWFORMATNULL: String nullFormat = unescapeSQLString(rowChild.getChild(0).getText()); tblDesc.getProperties().setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT, @@ -10326,7 +10327,7 @@ public class SemanticAnalyzer extends Ba // check for existence of table if (ifNotExists) { try { - Table table = getTableWithQN(tableName, false); + Table table = getTable(tableName, false); if (table != null) { // table exists return null; } @@ -10391,7 +10392,7 @@ public class SemanticAnalyzer extends Ba tblProps = addDefaultProperties(tblProps); if (isTemporary) { - Table likeTable = getTableWithQN(likeTableName, false); + Table likeTable = getTable(likeTableName, false); if (likeTable != null && likeTable.getPartCols().size() > 0) { throw new SemanticException("Partition columns are not supported on temporary tables " + "and source table in CREATE TABLE LIKE is partitioned."); @@ -10511,7 +10512,7 @@ public class SemanticAnalyzer extends Ba private void validateCreateView(CreateViewDesc createVwDesc) throws SemanticException { try { - Table oldView = getTableWithQN(createVwDesc.getViewName(), false); + Table oldView = getTable(createVwDesc.getViewName(), false); // ALTER VIEW AS SELECT requires the view must exist if (createVwDesc.getIsAlterViewAs() && oldView == null) { Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java Wed Aug 13 02:28:54 2014 @@ -161,7 +161,6 @@ public class HiveAuthorizationTaskFactor PrincipalDesc principalDesc = null; PrivilegeObjectDesc privHiveObj = null; - List cols = null; ASTNode param = null; if (ast.getChildCount() > 0) { @@ -176,30 +175,12 @@ public class HiveAuthorizationTaskFactor if (param.getType() == HiveParser.TOK_RESOURCE_ALL) { privHiveObj = new PrivilegeObjectDesc(); } else if (param.getType() == HiveParser.TOK_PRIV_OBJECT_COL) { - privHiveObj = new PrivilegeObjectDesc(); - //set object name - String text = param.getChild(0).getText(); - privHiveObj.setObject(BaseSemanticAnalyzer.unescapeIdentifier(text)); - //set object type - ASTNode objTypeNode = (ASTNode) param.getChild(1); - privHiveObj.setTable(objTypeNode.getToken().getType() == HiveParser.TOK_TABLE_TYPE); - - //set col and partition spec if specified - for (int i = 2; i < param.getChildCount(); i++) { - ASTNode partOrCol = (ASTNode) param.getChild(i); - if (partOrCol.getType() == HiveParser.TOK_PARTSPEC) { - privHiveObj.setPartSpec(DDLSemanticAnalyzer.getPartSpec(partOrCol)); - } else if (partOrCol.getType() == HiveParser.TOK_TABCOLNAME) { - cols = BaseSemanticAnalyzer.getColumnNames(partOrCol); - } else { - throw new SemanticException("Invalid token type " + partOrCol.getType()); - } - } + privHiveObj = parsePrivObject(param); } } ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), - principalDesc, privHiveObj, cols); + principalDesc, privHiveObj); return TaskFactory.get(new DDLWork(inputs, outputs, showGrant), conf); } @@ -219,7 +200,7 @@ public class HiveAuthorizationTaskFactor boolean isAdmin = false; if((isGrant && wAdminOption.getToken().getType() == HiveParser.TOK_GRANT_WITH_ADMIN_OPTION) || (!isGrant && wAdminOption.getToken().getType() == HiveParser.TOK_ADMIN_OPTION_FOR)){ - rolesStartPos = 2; //start reading role names from next postion + rolesStartPos = 2; //start reading role names from next position isAdmin = true; } @@ -242,20 +223,10 @@ public class HiveAuthorizationTaskFactor HashSet outputs) throws SemanticException { - PrivilegeObjectDesc subject = new PrivilegeObjectDesc(); - //set object identifier - subject.setObject(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText())); - //set object type - ASTNode objTypeNode = (ASTNode) ast.getChild(1); - subject.setTable(objTypeNode.getToken().getType() == HiveParser.TOK_TABLE_TYPE); - if (ast.getChildCount() == 3) { - //if partition spec node is present, set partition spec - ASTNode partSpecNode = (ASTNode) ast.getChild(2); - subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(partSpecNode)); - } + PrivilegeObjectDesc subject = parsePrivObject(ast); if (subject.getTable()) { - Table tbl = getTable(SessionState.get().getCurrentDatabase(), subject.getObject()); + Table tbl = getTable(subject.getObject()); if (subject.getPartSpec() != null) { Partition part = getPartition(tbl, subject.getPartSpec()); outputs.add(new WriteEntity(part, WriteEntity.WriteType.DDL_NO_LOCK)); @@ -267,6 +238,30 @@ public class HiveAuthorizationTaskFactor return subject; } + private PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticException { + PrivilegeObjectDesc subject = new PrivilegeObjectDesc(); + ASTNode child = (ASTNode) ast.getChild(0); + ASTNode gchild = (ASTNode)child.getChild(0); + if (child.getType() == HiveParser.TOK_TABLE_TYPE) { + subject.setTable(true); + String[] qualified = BaseSemanticAnalyzer.getQualifiedTableName(gchild); + subject.setObject(BaseSemanticAnalyzer.getDotName(qualified)); + } else { + subject.setTable(false); + subject.setObject(BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText())); + } + //if partition spec node is present, set partition spec + for (int i = 1; i < child.getChildCount(); i++) { + gchild = (ASTNode) child.getChild(i); + if (gchild.getType() == HiveParser.TOK_PARTSPEC) { + subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(gchild)); + } else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) { + subject.setColumns(BaseSemanticAnalyzer.getColumnNames(gchild)); + } + } + return subject; + } + private List analyzePrivilegeListDef(ASTNode node) throws SemanticException { List ret = new ArrayList(); @@ -289,6 +284,10 @@ public class HiveAuthorizationTaskFactor return ret; } + private Table getTable(String tblName) throws SemanticException { + return getTable(null, tblName); + } + private Table getTable(String database, String tblName) throws SemanticException { try { Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterIndexDesc.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterIndexDesc.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterIndexDesc.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterIndexDesc.java Wed Aug 13 02:28:54 2014 @@ -19,13 +19,7 @@ package org.apache.hadoop.hive.ql.plan; import java.io.Serializable; -import java.util.ArrayList; import java.util.Map; -import java.util.List; - -import org.apache.hadoop.hive.metastore.api.FieldSchema; -import org.apache.hadoop.hive.metastore.api.Order; -import org.apache.hadoop.hive.ql.exec.Utilities; /** * AlterIndexDesc. @@ -36,7 +30,6 @@ public class AlterIndexDesc extends DDLD private static final long serialVersionUID = 1L; private String indexName; private String baseTable; - private String dbName; private Map partSpec; // partition specification of partitions touched private Map props; @@ -105,21 +98,6 @@ public class AlterIndexDesc extends DDLD } /** - * @return the name of the database that the base table is in - */ - public String getDbName() { - return dbName; - } - - /** - * @param dbName - * the dbName to set - */ - public void setDbName(String dbName) { - this.dbName = dbName; - } - - /** * @return the op */ public AlterIndexTypes getOp() { Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableAlterPartDesc.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableAlterPartDesc.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableAlterPartDesc.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableAlterPartDesc.java Wed Aug 13 02:28:54 2014 @@ -20,27 +20,20 @@ package org.apache.hadoop.hive.ql.plan; import org.apache.hadoop.hive.metastore.api.FieldSchema; -import java.util.List; - public class AlterTableAlterPartDesc extends DDLDesc { private String tableName; - private String dbName; private FieldSchema partKeySpec; public AlterTableAlterPartDesc() { } /** - * @param dbName - * database that contains the table / partition * @param tableName * table containing the partition * @param partKeySpec - * key column specification. */ - public AlterTableAlterPartDesc(String dbName, String tableName, FieldSchema partKeySpec) { + public AlterTableAlterPartDesc(String tableName, FieldSchema partKeySpec) { super(); - this.dbName = dbName; this.tableName = tableName; this.partKeySpec = partKeySpec; } @@ -53,14 +46,6 @@ public class AlterTableAlterPartDesc ext this.tableName = tableName; } - public String getDbName() { - return dbName; - } - - public void setDbName(String dbName) { - this.dbName = dbName; - } - public FieldSchema getPartKeySpec() { return partKeySpec; } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableSimpleDesc.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableSimpleDesc.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableSimpleDesc.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableSimpleDesc.java Wed Aug 13 02:28:54 2014 @@ -29,7 +29,6 @@ import org.apache.hadoop.hive.ql.plan.Al */ public class AlterTableSimpleDesc extends DDLDesc { private String tableName; - private String dbName; private LinkedHashMap partSpec; private String compactionType; @@ -39,17 +38,12 @@ public class AlterTableSimpleDesc extend } /** - * @param dbName - * database that contains the table / partition * @param tableName * table containing the partition * @param partSpec - * partition specification. Null if touching a table. */ - public AlterTableSimpleDesc(String dbName, String tableName, - Map partSpec, AlterTableDesc.AlterTableTypes type) { - super(); - this.dbName = dbName; + public AlterTableSimpleDesc(String tableName, + Map partSpec, AlterTableTypes type) { this.tableName = tableName; if(partSpec == null) { this.partSpec = null; @@ -61,16 +55,14 @@ public class AlterTableSimpleDesc extend /** * Constructor for ALTER TABLE ... COMPACT. - * @param dbname name of the database containing the table * @param tableName name of the table to compact * @param partSpec partition to compact * @param compactionType currently supported values: 'major' and 'minor' */ - public AlterTableSimpleDesc(String dbname, String tableName, - LinkedHashMap partSpec, String compactionType) { + public AlterTableSimpleDesc(String tableName, + LinkedHashMap partSpec, String compactionType) { type = AlterTableTypes.COMPACT; this.compactionType = compactionType; - this.dbName = dbname; this.tableName = tableName; this.partSpec = partSpec; } @@ -83,14 +75,6 @@ public class AlterTableSimpleDesc extend this.tableName = tableName; } - public String getDbName() { - return dbName; - } - - public void setDbName(String dbName) { - this.dbName = dbName; - } - public AlterTableDesc.AlterTableTypes getType() { return type; } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java Wed Aug 13 02:28:54 2014 @@ -49,8 +49,10 @@ public class MapJoinDesc extends JoinDes private transient String bigTableAlias; // for tez. used to remember which position maps to which logical input + // TODO: should these rather be arrays? private Map parentToInput = new HashMap(); - + private Map parentKeyCounts = new HashMap(); + // for tez. used to remember which type of a Bucket Map Join this is. private boolean customBucketMapJoin; @@ -86,6 +88,7 @@ public class MapJoinDesc extends JoinDes this.bigTablePartSpecToFileMapping = clone.bigTablePartSpecToFileMapping; this.dumpFilePrefix = clone.dumpFilePrefix; this.parentToInput = clone.parentToInput; + this.parentKeyCounts = clone.parentKeyCounts; this.customBucketMapJoin = clone.customBucketMapJoin; } @@ -128,6 +131,28 @@ public class MapJoinDesc extends JoinDes this.parentToInput = parentToInput; } + public Map getParentKeyCounts() { + return parentKeyCounts; + } + + @Explain(displayName = "Estimated key counts", normalExplain = false) + public String getKeyCountsExplainDesc() { + StringBuilder result = null; + for (Map.Entry entry : parentKeyCounts.entrySet()) { + if (result == null) { + result = new StringBuilder(); + } else { + result.append(", "); + } + result.append(parentToInput.get(entry.getKey())).append(" => ").append(entry.getValue()); + } + return result == null ? null : result.toString(); + } + + public void setParentKeyCount(Map parentKeyCounts) { + this.parentKeyCounts = parentKeyCounts; + } + public Map getValueIndices() { return valueIndices; } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeObjectDesc.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeObjectDesc.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeObjectDesc.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeObjectDesc.java Wed Aug 13 02:28:54 2014 @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.plan; import java.util.HashMap; +import java.util.List; @Explain(displayName="privilege subject") public class PrivilegeObjectDesc { @@ -30,6 +31,8 @@ public class PrivilegeObjectDesc { private HashMap partSpec; + private List columns; + public PrivilegeObjectDesc(boolean isTable, String object, HashMap partSpec) { super(); @@ -68,4 +71,11 @@ public class PrivilegeObjectDesc { this.partSpec = partSpec; } + public List getColumns() { + return columns; + } + + public void setColumns(List columns) { + this.columns = columns; + } } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/RenamePartitionDesc.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/RenamePartitionDesc.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/RenamePartitionDesc.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/RenamePartitionDesc.java Wed Aug 13 02:28:54 2014 @@ -29,7 +29,6 @@ public class RenamePartitionDesc extends private static final long serialVersionUID = 1L; String tableName; - String dbName; String location; LinkedHashMap oldPartSpec; LinkedHashMap newPartSpec; @@ -50,31 +49,14 @@ public class RenamePartitionDesc extends * @param newPartSpec * new partition specification. */ - public RenamePartitionDesc(String dbName, String tableName, + public RenamePartitionDesc(String tableName, Map oldPartSpec, Map newPartSpec) { - super(); - this.dbName = dbName; this.tableName = tableName; this.oldPartSpec = new LinkedHashMap(oldPartSpec); this.newPartSpec = new LinkedHashMap(newPartSpec); } /** - * @return database name - */ - public String getDbName() { - return dbName; - } - - /** - * @param dbName - * database name - */ - public void setDbName(String dbName) { - this.dbName = dbName; - } - - /** * @return the table we're going to add the partitions to. */ public String getTableName() { Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowColumnsDesc.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowColumnsDesc.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowColumnsDesc.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowColumnsDesc.java Wed Aug 13 02:28:54 2014 @@ -23,7 +23,6 @@ import org.apache.hadoop.fs.Path; public class ShowColumnsDesc extends DDLDesc implements Serializable { private static final long serialVersionUID = 1L; - String dbName; String tableName; String resFile; /** @@ -63,16 +62,6 @@ public class ShowColumnsDesc extends DDL } /** - * @param dbName name of the database - * @param tableName name of table to show columns of - */ - public ShowColumnsDesc(Path resFile, String dbName, String tableName) { - this.resFile = resFile.toString(); - this.dbName = dbName; - this.tableName = tableName; - } - - /** * @return the tableName */ @Explain(displayName = "table name") @@ -103,12 +92,4 @@ public class ShowColumnsDesc extends DDL public void setResFile(String resFile) { this.resFile = resFile; } - - public String getDbName() { - return dbName; - } - - public void setDbName(String dbName) { - this.dbName = dbName; - } } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java Wed Aug 13 02:28:54 2014 @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hive.ql.plan; -import java.util.List; - @Explain(displayName="show grant desc") public class ShowGrantDesc { @@ -26,8 +24,6 @@ public class ShowGrantDesc { private PrivilegeObjectDesc hiveObj; - private List columns; - private String resFile; /** @@ -42,11 +38,10 @@ public class ShowGrantDesc { } public ShowGrantDesc(String resFile, PrincipalDesc principalDesc, - PrivilegeObjectDesc subjectObj, List columns) { + PrivilegeObjectDesc subjectObj) { this.resFile = resFile; this.principalDesc = principalDesc; this.hiveObj = subjectObj; - this.columns = columns; } public static String getSchema() { @@ -78,12 +73,4 @@ public class ShowGrantDesc { public void setResFile(String resFile) { this.resFile = resFile; } - - public List getColumns() { - return columns; - } - - public void setColumns(List columns) { - this.columns = columns; - } } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/Statistics.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/Statistics.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/Statistics.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/Statistics.java Wed Aug 13 02:28:54 2014 @@ -237,12 +237,14 @@ public class Statistics implements Seria } public ColStatistics getColumnStatisticsFromColName(String colName) { + if (columnStats == null) { + return null; + } for (ColStatistics cs : columnStats.values()) { if (cs.getColumnName().equalsIgnoreCase(colName)) { return cs; } } - return null; } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessor.java Wed Aug 13 02:28:54 2014 @@ -21,7 +21,8 @@ package org.apache.hadoop.hive.ql.proces import org.apache.hadoop.hive.ql.CommandNeedRetryException; public interface CommandProcessor { - public void init(); - public CommandProcessorResponse run(String command) throws CommandNeedRetryException; + void init(); + + CommandProcessorResponse run(String command) throws CommandNeedRetryException; } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java Wed Aug 13 02:28:54 2014 @@ -74,6 +74,8 @@ public final class CommandProcessorFacto return new DfsProcessor(ss.getConf()); case ADD: return new AddResourceProcessor(); + case LIST: + return new ListResourceProcessor(); case DELETE: return new DeleteResourceProcessor(); case COMPILE: Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java Wed Aug 13 02:28:54 2014 @@ -30,6 +30,7 @@ public enum HiveCommand { RESET(), DFS(), ADD(), + LIST(), DELETE(), COMPILE(); private static final Set COMMANDS = new HashSet(); Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java Wed Aug 13 02:28:54 2014 @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.ql.securi import java.util.ArrayList; import java.util.List; -import java.util.Set; import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; @@ -173,14 +172,15 @@ public class AuthorizationUtils { privObj.getPartValues(), privObj.getColumnName()); } - public static HivePrivilegeObject getHivePrivilegeObject( - PrivilegeObjectDesc privSubjectDesc, Set columns) throws HiveException { + public static HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc privSubjectDesc) + throws HiveException { // null means ALL for show grants, GLOBAL for grant/revoke HivePrivilegeObjectType objectType = null; String[] dbTable; List partSpec = null; + List columns = null; if (privSubjectDesc == null) { dbTable = new String[] {null, null}; } else { @@ -192,6 +192,7 @@ public class AuthorizationUtils { if (privSubjectDesc.getPartSpec() != null) { partSpec = new ArrayList(privSubjectDesc.getPartSpec().values()); } + columns = privSubjectDesc.getColumns(); objectType = getPrivObjectType(privSubjectDesc); } return new HivePrivilegeObject(objectType, dbTable[0], dbTable[1], partSpec, columns, null); Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java Wed Aug 13 02:28:54 2014 @@ -19,10 +19,8 @@ package org.apache.hadoop.hive.ql.securi import java.util.Arrays; import java.util.Collection; -import java.util.HashSet; import java.util.Iterator; import java.util.List; -import java.util.Set; import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate; import org.apache.hadoop.hive.common.classification.InterfaceStability.Unstable; @@ -91,7 +89,7 @@ public class HivePrivilegeObject impleme private final String objectName; private final List commandParams; private final List partKeys; - private Set columns; + private final List columns; private final HivePrivObjectActionType actionType; public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName) { @@ -106,7 +104,7 @@ public class HivePrivilegeObject impleme public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName, List partKeys, String column) { this(type, dbname, objectName, partKeys, - column == null ? null : new HashSet(Arrays.asList(column)), + column == null ? null : Arrays.asList(column), HivePrivObjectActionType.OTHER, null); } @@ -121,12 +119,12 @@ public class HivePrivilegeObject impleme } public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName, - List partKeys, Set columns, List commandParams) { + List partKeys, List columns, List commandParams) { this(type, dbname, objectName, partKeys, columns, HivePrivObjectActionType.OTHER, commandParams); } public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName, - List partKeys, Set columns, HivePrivObjectActionType actionType, + List partKeys, List columns, HivePrivObjectActionType actionType, List commandParams) { this.type = type; this.dbname = dbname; @@ -170,7 +168,7 @@ public class HivePrivilegeObject impleme * Column information is not set for DDL operations and for tables being written into * @return list of applicable columns */ - public Set getColumns() { + public List getColumns() { return columns; } @@ -218,9 +216,4 @@ public class HivePrivilegeObject impleme private String getDbObjectName(String dbname2, String objectName2) { return (dbname == null ? "" : dbname + ".") + objectName; } - - public void setColumns(Set columnms) { - this.columns = columnms; - } - } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java?rev=1617652&r1=1617651&r2=1617652&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java Wed Aug 13 02:28:54 2014 @@ -319,7 +319,7 @@ public class HiveV1Authorizer implements privs.addAll(hive.showPrivilegeGrant(HiveObjectType.DATABASE, name, type, dbObj.getName(), null, null, null)); } else { - Set columns = privObj.getColumns(); + List columns = privObj.getColumns(); if (columns != null && !columns.isEmpty()) { // show column level privileges for (String columnName : columns) {