Return-Path: X-Original-To: apmail-hive-commits-archive@www.apache.org Delivered-To: apmail-hive-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 29C8B10D96 for ; Tue, 18 Feb 2014 02:19:11 +0000 (UTC) Received: (qmail 14311 invoked by uid 500); 18 Feb 2014 02:19:10 -0000 Delivered-To: apmail-hive-commits-archive@hive.apache.org Received: (qmail 14277 invoked by uid 500); 18 Feb 2014 02:19:10 -0000 Mailing-List: contact commits-help@hive.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hive.apache.org Delivered-To: mailing list commits@hive.apache.org Received: (qmail 14267 invoked by uid 99); 18 Feb 2014 02:19:10 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 18 Feb 2014 02:19:10 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 18 Feb 2014 02:19:01 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id F008E2388A5B; Tue, 18 Feb 2014 02:18:38 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1569164 [5/5] - in /hive/trunk: common/ common/src/java/org/apache/hadoop/hive/ant/ common/src/java/org/apache/hadoop/hive/conf/ common/src/java/org/apache/hive/common/util/ common/src/test/org/apache/hadoop/hive/conf/ conf/ itests/hive-un... Date: Tue, 18 Feb 2014 02:18:37 -0000 To: commits@hive.apache.org From: brock@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20140218021838.F008E2388A5B@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=1569164&r1=1569163&r2=1569164&view=diff ============================================================================== --- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (original) +++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Tue Feb 18 02:18:36 2014 @@ -19,7 +19,7 @@ package org.apache.hadoop.hive.jdbc; import static org.apache.hadoop.hive.ql.exec.ExplainTask.EXPL_COLUMN_NAME; -import static org.apache.hadoop.hive.ql.processors.SetProcessor.SET_COLUMN_NAME; +import static org.apache.hive.common.util.SystemVariables.SET_COLUMN_NAME; import java.sql.Connection; import java.sql.DatabaseMetaData; Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1569164&r1=1569163&r2=1569164&view=diff ============================================================================== --- hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java (original) +++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java Tue Feb 18 02:18:36 2014 @@ -19,7 +19,7 @@ package org.apache.hive.jdbc; import static org.apache.hadoop.hive.ql.exec.ExplainTask.EXPL_COLUMN_NAME; -import static org.apache.hadoop.hive.ql.processors.SetProcessor.SET_COLUMN_NAME; +import static org.apache.hive.common.util.SystemVariables.SET_COLUMN_NAME; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; @@ -1855,7 +1855,7 @@ public class TestJdbcDriver2 { * @throws Exception */ public void testFetchFirstSetCmds() throws Exception { - execFetchFirst("set -v", SetProcessor.SET_COLUMN_NAME, false); + execFetchFirst("set -v", SET_COLUMN_NAME, false); } /** Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1569164&r1=1569163&r2=1569164&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Tue Feb 18 02:18:36 2014 @@ -138,6 +138,7 @@ import org.apache.hadoop.hive.ql.plan.Re import org.apache.hadoop.hive.ql.plan.RevokeDesc; import org.apache.hadoop.hive.ql.plan.RoleDDLDesc; import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc; +import org.apache.hadoop.hive.ql.plan.ShowConfDesc; import org.apache.hadoop.hive.ql.plan.ShowCreateTableDesc; import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc; import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc; @@ -176,7 +177,6 @@ import org.apache.hadoop.hive.shims.Shim import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ToolRunner; -import org.apache.thrift.TException; import org.stringtemplate.v4.ST; /** @@ -399,6 +399,11 @@ public class DDLTask extends Task ^(TOK_SHOWDBLOCKS $dbName $isExtended?) | KW_SHOW (showOptions=KW_FORMATTED)? (KW_INDEX|KW_INDEXES) KW_ON showStmtIdentifier ((KW_FROM|KW_IN) db_name=identifier)? -> ^(TOK_SHOWINDEXES showStmtIdentifier $showOptions? $db_name?) + | KW_SHOW KW_CONF StringLiteral + -> ^(TOK_SHOWCONF StringLiteral) ; lockStatement Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1569164&r1=1569163&r2=1569164&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java Tue Feb 18 02:18:36 2014 @@ -69,6 +69,7 @@ public final class SemanticAnalyzerFacto commandType.put(HiveParser.TOK_SHOWPARTITIONS, HiveOperation.SHOWPARTITIONS); commandType.put(HiveParser.TOK_SHOWLOCKS, HiveOperation.SHOWLOCKS); commandType.put(HiveParser.TOK_SHOWDBLOCKS, HiveOperation.SHOWLOCKS); + commandType.put(HiveParser.TOK_SHOWCONF, HiveOperation.SHOWCONF); commandType.put(HiveParser.TOK_CREATEFUNCTION, HiveOperation.CREATEFUNCTION); commandType.put(HiveParser.TOK_DROPFUNCTION, HiveOperation.DROPFUNCTION); commandType.put(HiveParser.TOK_CREATEMACRO, HiveOperation.CREATEMACRO); @@ -193,6 +194,7 @@ public final class SemanticAnalyzerFacto case HiveParser.TOK_SHOWINDEXES: case HiveParser.TOK_SHOWLOCKS: case HiveParser.TOK_SHOWDBLOCKS: + case HiveParser.TOK_SHOWCONF: case HiveParser.TOK_CREATEINDEX: case HiveParser.TOK_DROPINDEX: case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT: Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/VariableSubstitution.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/VariableSubstitution.java?rev=1569164&r1=1569163&r2=1569164&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/VariableSubstitution.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/VariableSubstitution.java Tue Feb 18 02:18:36 2014 @@ -17,79 +17,48 @@ */ package org.apache.hadoop.hive.ql.parse; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.ql.processors.SetProcessor; import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hive.common.util.SystemVariables; + +import java.util.Map; -public class VariableSubstitution { +public class VariableSubstitution extends SystemVariables { private static final Log l4j = LogFactory.getLog(VariableSubstitution.class); - protected static Pattern varPat = Pattern.compile("\\$\\{[^\\}\\$\u0020]+\\}"); - private String getSubstitute(HiveConf conf, String var) { - String val = null; - try { - if (var.startsWith(SetProcessor.SYSTEM_PREFIX)) { - val = System.getProperty(var.substring(SetProcessor.SYSTEM_PREFIX.length())); - } - } catch(SecurityException se) { - l4j.warn("Unexpected SecurityException in Configuration", se); - } - if (val ==null){ - if (var.startsWith(SetProcessor.ENV_PREFIX)){ - val = System.getenv(var.substring(SetProcessor.ENV_PREFIX.length())); - } - } + @Override + protected String getSubstitute(HiveConf conf, String var) { + String val = super.getSubstitute(conf, var); if (val == null) { - if (var.startsWith(SetProcessor.HIVECONF_PREFIX)){ - val = conf.get(var.substring(SetProcessor.HIVECONF_PREFIX.length())); + if (var.startsWith(HIVECONF_PREFIX)) { + val = conf.get(var.substring(HIVECONF_PREFIX.length())); } } - if (val ==null){ - if(var.startsWith(SetProcessor.HIVEVAR_PREFIX)){ - val = SessionState.get().getHiveVariables().get(var.substring(SetProcessor.HIVEVAR_PREFIX.length())); + if (val == null){ + Map vars = SessionState.get().getHiveVariables(); + if (var.startsWith(HIVEVAR_PREFIX)) { + val = vars.get(var.substring(HIVEVAR_PREFIX.length())); } else { - val = SessionState.get().getHiveVariables().get(var); + val = vars.get(var); } } return val; } + @Override public String substitute (HiveConf conf, String expr) { - - if (conf.getBoolVar(ConfVars.HIVEVARIABLESUBSTITUTE)){ - l4j.debug("Substitution is on: "+expr); + if (conf.getBoolVar(ConfVars.HIVEVARIABLESUBSTITUTE)) { + l4j.debug("Substitution is on: " + expr); } else { return expr; } if (expr == null) { return null; } - Matcher match = varPat.matcher(""); - String eval = expr; - for(int s=0;s inputs, HashSet outputs, + ShowConfDesc showConfDesc) { + this(inputs, outputs); + this.showConfDesc = showConfDesc; + } + public DescDatabaseDesc getDescDatabaseDesc() { return descDbDesc; } @@ -1085,4 +1093,12 @@ public class DDLWork implements Serializ AlterTableExchangePartition alterTableExchangePartition) { this.alterTableExchangePartition = alterTableExchangePartition; } + + public ShowConfDesc getShowConfDesc() { + return showConfDesc; + } + + public void setShowConfDesc(ShowConfDesc showConfDesc) { + this.showConfDesc = showConfDesc; + } } Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java?rev=1569164&r1=1569163&r2=1569164&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java Tue Feb 18 02:18:36 2014 @@ -66,6 +66,7 @@ public enum HiveOperation { SHOWINDEXES("SHOWINDEXES", null, null), SHOWPARTITIONS("SHOWPARTITIONS", null, null), SHOWLOCKS("SHOWLOCKS", null, null), + SHOWCONF("SHOWCONF", null, null), CREATEFUNCTION("CREATEFUNCTION", null, null), DROPFUNCTION("DROPFUNCTION", null, null), CREATEMACRO("CREATEMACRO", null, null), Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java?rev=1569164&view=auto ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java (added) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java Tue Feb 18 02:18:36 2014 @@ -0,0 +1,61 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.plan; + +import org.apache.hadoop.fs.Path; + +import java.io.Serializable; + +public class ShowConfDesc extends DDLDesc implements Serializable { + private static final long serialVersionUID = 1L; + + private Path resFile; + private String confName; + + private static final String schema = "default,type,desc#string,string,string"; + + public String getSchema() { + return schema; + } + + public ShowConfDesc() { + } + + public ShowConfDesc(Path resFile, String confName) { + this.resFile = resFile; + this.confName = confName; + } + + @Explain(displayName = "result file", normalExplain = false) + public Path getResFile() { + return resFile; + } + + public void setResFile(Path resFile) { + this.resFile = resFile; + } + + @Explain(displayName = "conf name", normalExplain = false) + public String getConfName() { + return confName; + } + + public void setConfName(String confName) { + this.confName = confName; + } +} Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java?rev=1569164&r1=1569163&r2=1569164&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java Tue Feb 18 02:18:36 2014 @@ -22,6 +22,8 @@ import static org.apache.hadoop.hive.ser import static org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME; import static org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.defaultNullString; +import static org.apache.hive.common.util.SystemVariables.*; + import java.util.Map; import java.util.Properties; import java.util.SortedMap; @@ -39,12 +41,7 @@ import org.apache.hadoop.hive.ql.session */ public class SetProcessor implements CommandProcessor { - private static String prefix = "set: "; - public static final String ENV_PREFIX = "env:"; - public static final String SYSTEM_PREFIX = "system:"; - public static final String HIVECONF_PREFIX = "hiveconf:"; - public static final String HIVEVAR_PREFIX = "hivevar:"; - public static final String SET_COLUMN_NAME = "set"; + private static final String prefix = "set: "; public static boolean getBoolean(String value) { if (value.equals("on") || value.equals("true")) { @@ -69,7 +66,7 @@ public class SetProcessor implements Com // Inserting hive variables for (String s : ss.getHiveVariables().keySet()) { - sortedMap.put(SetProcessor.HIVEVAR_PREFIX + s, ss.getHiveVariables().get(s)); + sortedMap.put(HIVEVAR_PREFIX + s, ss.getHiveVariables().get(s)); } for (Map.Entry entries : sortedMap.entrySet()) { @@ -108,23 +105,23 @@ public class SetProcessor implements Com if (varvalue.contains("\n")){ ss.err.println("Warning: Value had a \\n character in it."); } - if (varname.startsWith(SetProcessor.ENV_PREFIX)){ + if (varname.startsWith(ENV_PREFIX)){ ss.err.println("env:* variables can not be set."); return new CommandProcessorResponse(1); - } else if (varname.startsWith(SetProcessor.SYSTEM_PREFIX)){ - String propName = varname.substring(SetProcessor.SYSTEM_PREFIX.length()); + } else if (varname.startsWith(SYSTEM_PREFIX)){ + String propName = varname.substring(SYSTEM_PREFIX.length()); System.getProperties().setProperty(propName, new VariableSubstitution().substitute(ss.getConf(),varvalue)); return new CommandProcessorResponse(0); - } else if (varname.startsWith(SetProcessor.HIVECONF_PREFIX)){ - String propName = varname.substring(SetProcessor.HIVECONF_PREFIX.length()); + } else if (varname.startsWith(HIVECONF_PREFIX)){ + String propName = varname.substring(HIVECONF_PREFIX.length()); try { setConf(varname, propName, varvalue, false); return new CommandProcessorResponse(0); } catch (IllegalArgumentException e) { return new CommandProcessorResponse(1, e.getMessage(), "42000"); } - } else if (varname.startsWith(SetProcessor.HIVEVAR_PREFIX)) { - String propName = varname.substring(SetProcessor.HIVEVAR_PREFIX.length()); + } else if (varname.startsWith(HIVEVAR_PREFIX)) { + String propName = varname.substring(HIVEVAR_PREFIX.length()); ss.getHiveVariables().put(propName, new VariableSubstitution().substitute(ss.getConf(),varvalue)); return new CommandProcessorResponse(0); } else { @@ -169,7 +166,7 @@ public class SetProcessor implements Com private SortedMap propertiesToSortedMap(Properties p){ SortedMap sortedPropMap = new TreeMap(); - for (Map.Entry entry :System.getProperties().entrySet() ){ + for (Map.Entry entry : p.entrySet() ){ sortedPropMap.put( (String) entry.getKey(), (String) entry.getValue()); } return sortedPropMap; @@ -188,38 +185,38 @@ public class SetProcessor implements Com ss.out.println("silent" + "=" + ss.getIsSilent()); return createProcessorSuccessResponse(); } - if (varname.startsWith(SetProcessor.SYSTEM_PREFIX)){ - String propName = varname.substring(SetProcessor.SYSTEM_PREFIX.length()); + if (varname.startsWith(SYSTEM_PREFIX)) { + String propName = varname.substring(SYSTEM_PREFIX.length()); String result = System.getProperty(propName); - if (result != null){ - ss.out.println(SetProcessor.SYSTEM_PREFIX+propName + "=" + result); + if (result != null) { + ss.out.println(SYSTEM_PREFIX + propName + "=" + result); return createProcessorSuccessResponse(); } else { - ss.out.println( propName + " is undefined as a system property"); + ss.out.println(propName + " is undefined as a system property"); return new CommandProcessorResponse(1); } - } else if (varname.indexOf(SetProcessor.ENV_PREFIX)==0){ + } else if (varname.indexOf(ENV_PREFIX) == 0) { String var = varname.substring(ENV_PREFIX.length()); - if (System.getenv(var)!=null){ - ss.out.println(SetProcessor.ENV_PREFIX+var + "=" + System.getenv(var)); + if (System.getenv(var) != null) { + ss.out.println(ENV_PREFIX + var + "=" + System.getenv(var)); return createProcessorSuccessResponse(); } else { ss.out.println(varname + " is undefined as an environmental variable"); return new CommandProcessorResponse(1); } - } else if (varname.indexOf(SetProcessor.HIVECONF_PREFIX)==0) { - String var = varname.substring(SetProcessor.HIVECONF_PREFIX.length()); - if (ss.getConf().get(var)!=null){ - ss.out.println(SetProcessor.HIVECONF_PREFIX+var + "=" + ss.getConf().get(var)); + } else if (varname.indexOf(HIVECONF_PREFIX) == 0) { + String var = varname.substring(HIVECONF_PREFIX.length()); + if (ss.getConf().get(var) != null) { + ss.out.println(HIVECONF_PREFIX + var + "=" + ss.getConf().get(var)); return createProcessorSuccessResponse(); } else { ss.out.println(varname + " is undefined as a hive configuration variable"); return new CommandProcessorResponse(1); } - } else if (varname.indexOf(SetProcessor.HIVEVAR_PREFIX)==0) { - String var = varname.substring(SetProcessor.HIVEVAR_PREFIX.length()); - if (ss.getHiveVariables().get(var)!=null){ - ss.out.println(SetProcessor.HIVEVAR_PREFIX+var + "=" + ss.getHiveVariables().get(var)); + } else if (varname.indexOf(HIVEVAR_PREFIX) == 0) { + String var = varname.substring(HIVEVAR_PREFIX.length()); + if (ss.getHiveVariables().get(var) != null) { + ss.out.println(HIVEVAR_PREFIX + var + "=" + ss.getHiveVariables().get(var)); return createProcessorSuccessResponse(); } else { ss.out.println(varname + " is undefined as a hive variable"); Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java?rev=1569164&r1=1569163&r2=1569164&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java Tue Feb 18 02:18:36 2014 @@ -69,6 +69,7 @@ public enum HiveOperationType { SHOWINDEXES, SHOWPARTITIONS, SHOWLOCKS, + SHOWCONF, CREATEFUNCTION, DROPFUNCTION, CREATEMACRO, Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java?rev=1569164&r1=1569163&r2=1569164&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java Tue Feb 18 02:18:36 2014 @@ -160,6 +160,7 @@ public class Operation2Privilege { op2Priv.put(HiveOperationType.SHOWINDEXES, new InOutPrivs(null, null)); op2Priv.put(HiveOperationType.SHOWPARTITIONS, new InOutPrivs(null, null)); op2Priv.put(HiveOperationType.SHOWLOCKS, new InOutPrivs(null, null)); + op2Priv.put(HiveOperationType.SHOWCONF, new InOutPrivs(null, null)); op2Priv.put(HiveOperationType.CREATEFUNCTION, new InOutPrivs(null, null)); op2Priv.put(HiveOperationType.DROPFUNCTION, new InOutPrivs(null, null)); op2Priv.put(HiveOperationType.CREATEMACRO, new InOutPrivs(null, null)); Added: hive/trunk/ql/src/test/queries/clientpositive/show_conf.q URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/show_conf.q?rev=1569164&view=auto ============================================================================== --- hive/trunk/ql/src/test/queries/clientpositive/show_conf.q (added) +++ hive/trunk/ql/src/test/queries/clientpositive/show_conf.q Tue Feb 18 02:18:36 2014 @@ -0,0 +1,3 @@ +show conf "hive.auto.convert.sortmerge.join.to.mapjoin"; + +show conf "hive.stats.retries.wait"; Added: hive/trunk/ql/src/test/results/clientpositive/show_conf.q.out URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/show_conf.q.out?rev=1569164&view=auto ============================================================================== --- hive/trunk/ql/src/test/results/clientpositive/show_conf.q.out (added) +++ hive/trunk/ql/src/test/results/clientpositive/show_conf.q.out Tue Feb 18 02:18:36 2014 @@ -0,0 +1,10 @@ +PREHOOK: query: show conf "hive.auto.convert.sortmerge.join.to.mapjoin" +PREHOOK: type: SHOWCONF +POSTHOOK: query: show conf "hive.auto.convert.sortmerge.join.to.mapjoin" +POSTHOOK: type: SHOWCONF +false BOOLEAN If hive.auto.convert.sortmerge.join is set to true, and a join was converted to a sort-merge join, this parameter decides whether each table should be tried as a big table, and effectively a map-join should be tried. That would create a conditional task with n+1 children for a n-way join (1 child for each table as the big table), and the backup task will be the sort-merge join. In some cases, a map-join would be faster than a sort-merge join, if there is no advantage of having the output bucketed and sorted. For example, if a very big sorted and bucketed table with few files (say 10 files) are being joined with a very small sorter and bucketed table with few files (10 files), the sort-merge join will only use 10 mappers, and a simple map-only join might be faster if the complete small table can fit in memory, and a map-join can be performed. +PREHOOK: query: show conf "hive.stats.retries.wait" +PREHOOK: type: SHOWCONF +POSTHOOK: query: show conf "hive.stats.retries.wait" +POSTHOOK: type: SHOWCONF +3000 INT The base waiting window (in milliseconds) before the next retry. The actual wait time is calculated by baseWindow * failures baseWindow * (failure 1) * (random number between [0.0,1.0]).