Return-Path: X-Original-To: apmail-hive-commits-archive@www.apache.org Delivered-To: apmail-hive-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 335FC18E2C for ; Mon, 4 Apr 2016 22:37:48 +0000 (UTC) Received: (qmail 28247 invoked by uid 500); 4 Apr 2016 22:37:46 -0000 Delivered-To: apmail-hive-commits-archive@hive.apache.org Received: (qmail 28127 invoked by uid 500); 4 Apr 2016 22:37:46 -0000 Mailing-List: contact commits-help@hive.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hive.apache.org Delivered-To: mailing list commits@hive.apache.org Received: (qmail 27981 invoked by uid 99); 4 Apr 2016 22:37:45 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 04 Apr 2016 22:37:45 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id AF677DFC72; Mon, 4 Apr 2016 22:37:45 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: sseth@apache.org To: commits@hive.apache.org Date: Mon, 04 Apr 2016 22:37:45 -0000 Message-Id: X-Mailer: ASF-Git Admin Mailer Subject: [01/24] hive git commit: HIVE-13318: Cache the result of getTable from metastore (Pengcheng Xiong, reviewed by Ashutosh Chauhan) Repository: hive Updated Branches: refs/heads/llap a7b0ca733 -> 79c1c691e HIVE-13318: Cache the result of getTable from metastore (Pengcheng Xiong, reviewed by Ashutosh Chauhan) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/255069e4 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/255069e4 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/255069e4 Branch: refs/heads/llap Commit: 255069e4f1bb1ac874f5a3472ebed1abf26e8187 Parents: 4e9f95a Author: Pengcheng Xiong Authored: Wed Mar 30 14:43:44 2016 -0700 Committer: Pengcheng Xiong Committed: Wed Mar 30 14:43:44 2016 -0700 ---------------------------------------------------------------------- .../hadoop/hive/ql/parse/ParseContext.java | 7 +++++ .../hadoop/hive/ql/parse/SemanticAnalyzer.java | 29 +++++++++++--------- .../hadoop/hive/ql/parse/TaskCompiler.java | 2 +- 3 files changed, 24 insertions(+), 14 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hive/blob/255069e4/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java index 95c254c..1bccf20 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java @@ -87,6 +87,7 @@ public class ParseContext { // reducer private Map prunedPartitions; private Map viewAliasToInput; + private Map tabNameToTabObject; /** * The lineage information. @@ -162,6 +163,7 @@ public class ParseContext { Context ctx, HashMap idToTableNameMap, int destTableId, UnionProcContext uCtx, List> listMapJoinOpsNoReducer, Map prunedPartitions, + Map tabNameToTabObject, HashMap opToSamplePruner, GlobalLimitCtx globalLimitCtx, HashMap nameToSplitSample, @@ -185,6 +187,7 @@ public class ParseContext { this.uCtx = uCtx; this.listMapJoinOpsNoReducer = listMapJoinOpsNoReducer; this.prunedPartitions = prunedPartitions; + this.tabNameToTabObject = tabNameToTabObject; this.opToSamplePruner = opToSamplePruner; this.nameToSplitSample = nameToSplitSample; this.globalLimitCtx = globalLimitCtx; @@ -577,4 +580,8 @@ public class ParseContext { public void setNeedViewColumnAuthorization(boolean needViewColumnAuthorization) { this.needViewColumnAuthorization = needViewColumnAuthorization; } + + public Map getTabNameToTabObject() { + return tabNameToTabObject; + } } http://git-wip-us.apache.org/repos/asf/hive/blob/255069e4/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index adee14b..e81d46e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -324,7 +324,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { protected AnalyzeRewriteContext analyzeRewrite; // A mapping from a tableName to a table object in metastore. - Map tableNameToMetaDataTableObject; + Map tabNameToTabObject; // The tokens we should ignore when we are trying to do table masking. private final Set ignoredTokens = Sets.newHashSet(HiveParser.TOK_GROUPBY, @@ -359,6 +359,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { listMapJoinOpsNoReducer = new ArrayList>(); groupOpToInputTables = new HashMap>(); prunedPartitions = new HashMap(); + tabNameToTabObject = new HashMap(); unparseTranslator = new UnparseTranslator(conf); autogenColAliasPrfxLbl = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_AUTOGEN_COLUMNALIAS_PREFIX_LABEL); @@ -371,7 +372,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { viewAliasToInput = new HashMap(); noscan = partialscan = false; tableMask = new TableMask(this, conf); - tableNameToMetaDataTableObject = new HashMap<>(); + tabNameToTabObject = new HashMap<>(); } @Override @@ -380,6 +381,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { if(clearPartsCache) { prunedPartitions.clear(); } + tabNameToTabObject.clear(); loadTableWork.clear(); loadFileWork.clear(); topOps.clear(); @@ -429,6 +431,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { uCtx = pctx.getUCtx(); listMapJoinOpsNoReducer = pctx.getListMapJoinOpsNoReducer(); prunedPartitions = pctx.getPrunedPartitions(); + tabNameToTabObject = pctx.getTabNameToTabObject(); fetchTask = pctx.getFetchTask(); setLineageInfo(pctx.getLineageInfo()); } @@ -440,7 +443,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { new HashSet(joinContext.keySet()), new HashSet(smbMapJoinContext.keySet()), loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx, - listMapJoinOpsNoReducer, prunedPartitions, + listMapJoinOpsNoReducer, prunedPartitions, tabNameToTabObject, opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks, opToPartToSkewedPruner, viewAliasToInput, reduceSinkOperatorsAddedByEnforceBucketingSorting, analyzeRewrite, tableDesc, queryProperties, viewProjectToTableSchema); @@ -1606,7 +1609,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { } Table table = null; try { - table = db.getTable(tableName); + table = this.getTableObjectByName(tableName); } catch (HiveException ex) { throw new SemanticException(ex); } @@ -10344,13 +10347,13 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { } } - private Table getMetaDataTableObjectByName(String tableName) throws HiveException { - if (!tableNameToMetaDataTableObject.containsKey(tableName)) { + private Table getTableObjectByName(String tableName) throws HiveException { + if (!tabNameToTabObject.containsKey(tableName)) { Table table = db.getTable(tableName); - tableNameToMetaDataTableObject.put(tableName, table); + tabNameToTabObject.put(tableName, table); return table; } else { - return tableNameToMetaDataTableObject.get(tableName); + return tabNameToTabObject.get(tableName); } } @@ -10400,7 +10403,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { String replacementText = null; Table table = null; try { - table = getMetaDataTableObjectByName(tabIdName); + table = getTableObjectByName(tabIdName); } catch (HiveException e) { throw new SemanticException("Table " + tabIdName + " is not found."); } @@ -10636,7 +10639,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { new HashSet(joinContext.keySet()), new HashSet(smbMapJoinContext.keySet()), loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx, - listMapJoinOpsNoReducer, prunedPartitions, opToSamplePruner, + listMapJoinOpsNoReducer, prunedPartitions, tabNameToTabObject, opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks, opToPartToSkewedPruner, viewAliasToInput, reduceSinkOperatorsAddedByEnforceBucketingSorting, analyzeRewrite, tableDesc, queryProperties, viewProjectToTableSchema); @@ -11671,7 +11674,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { Set tableAliases = qb.getTabAliases(); for (String alias : tableAliases) { try { - Table table = db.getTable(qb.getTabNameForAlias(alias)); + Table table = this.getTableObjectByName(qb.getTabNameForAlias(alias)); if (table.isTemporary()) { throw new SemanticException("View definition references temporary table " + alias); } @@ -11874,7 +11877,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { String tableName = getUnescapedName((ASTNode) tree.getChild(0).getChild(0)); Table tbl; try { - tbl = db.getTable(tableName); + tbl = this.getTableObjectByName(tableName); } catch (InvalidTableException e) { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName), e); } @@ -11903,7 +11906,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { String tableName = getUnescapedName((ASTNode) tree.getChild(0).getChild(0)); Table tbl; try { - tbl = db.getTable(tableName); + tbl = this.getTableObjectByName(tableName); } catch (InvalidTableException e) { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName), e); } catch (HiveException e) { http://git-wip-us.apache.org/repos/asf/hive/blob/255069e4/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java index 8e64a0b..f7d7a40 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java @@ -399,7 +399,7 @@ public abstract class TaskCompiler { pCtx.getLoadTableWork(), pCtx.getLoadFileWork(), pCtx.getContext(), pCtx.getIdToTableNameMap(), pCtx.getDestTableId(), pCtx.getUCtx(), pCtx.getListMapJoinOpsNoReducer(), - pCtx.getPrunedPartitions(), pCtx.getOpToSamplePruner(), pCtx.getGlobalLimitCtx(), + pCtx.getPrunedPartitions(), pCtx.getTabNameToTabObject(), pCtx.getOpToSamplePruner(), pCtx.getGlobalLimitCtx(), pCtx.getNameToSplitSample(), pCtx.getSemanticInputs(), rootTasks, pCtx.getOpToPartToSkewedPruner(), pCtx.getViewAliasToInput(), pCtx.getReduceSinkOperatorsAddedByEnforceBucketingSorting(),