hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From br...@apache.org
Subject svn commit: r1613740 [16/29] - in /hive/branches/spark: ./ beeline/src/java/org/apache/hive/beeline/ beeline/src/main/resources/ beeline/src/test/org/apache/hive/beeline/ bin/ bin/ext/ common/ common/src/java/org/apache/hadoop/hive/ant/ common/src/java...
Date Sat, 26 Jul 2014 23:46:00 GMT
Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java Sat Jul 26 23:45:46 2014
@@ -60,6 +60,8 @@ import org.apache.hadoop.hive.ql.optimiz
 import org.apache.hadoop.hive.ql.optimizer.ReduceSinkMapJoinProc;
 import org.apache.hadoop.hive.ql.optimizer.SetReducerParallelism;
 import org.apache.hadoop.hive.ql.optimizer.physical.CrossProductCheck;
+import org.apache.hadoop.hive.ql.optimizer.physical.MetadataOnlyOptimizer;
+import org.apache.hadoop.hive.ql.optimizer.physical.NullScanOptimizer;
 import org.apache.hadoop.hive.ql.optimizer.physical.PhysicalContext;
 import org.apache.hadoop.hive.ql.optimizer.physical.Vectorizer;
 import org.apache.hadoop.hive.ql.optimizer.physical.StageIDsRearranger;
@@ -247,15 +249,34 @@ public class TezCompiler extends TaskCom
     PhysicalContext physicalCtx = new PhysicalContext(conf, pCtx, pCtx.getContext(), rootTasks,
        pCtx.getFetchTask());
 
+    if (conf.getBoolVar(HiveConf.ConfVars.HIVENULLSCANOPTIMIZE)) {
+      physicalCtx = new NullScanOptimizer().resolve(physicalCtx);
+    } else {
+      LOG.debug("Skipping null scan query optimization");
+    }
+
+    if (conf.getBoolVar(HiveConf.ConfVars.HIVEMETADATAONLYQUERIES)) {
+      physicalCtx = new MetadataOnlyOptimizer().resolve(physicalCtx);
+    } else {
+      LOG.debug("Skipping metadata only query optimization");
+    }
+
     if (conf.getBoolVar(HiveConf.ConfVars.HIVE_CHECK_CROSS_PRODUCT)) {
       physicalCtx = new CrossProductCheck().resolve(physicalCtx);
+    } else {
+      LOG.debug("Skipping cross product analysis");
     }
 
     if (conf.getBoolVar(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED)) {
-      (new Vectorizer()).resolve(physicalCtx);
+      physicalCtx = new Vectorizer().resolve(physicalCtx);
+    } else {
+      LOG.debug("Skipping vectorization");
     }
+
     if (!"none".equalsIgnoreCase(conf.getVar(HiveConf.ConfVars.HIVESTAGEIDREARRANGE))) {
-      (new StageIDsRearranger()).resolve(physicalCtx);
+      physicalCtx = new StageIDsRearranger().resolve(physicalCtx);
+    } else {
+      LOG.debug("Skipping stage id rearranger");
     }
     return;
   }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java Sat Jul 26 23:45:46 2014
@@ -987,7 +987,7 @@ public final class TypeCheckProcFactory 
         // descendant nodes, DFS traversal ensures that the error only needs to
         // be cleared once. Also, for a case like
         // SELECT concat(value, concat(value))... the logic still works as the
-        // error is only set with the first 'value'; all node pocessors quit
+        // error is only set with the first 'value'; all node processors quit
         // early if the global error is set.
 
         if (isDescendant(nd, ctx.getErrorSrcNode())) {

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/VariableSubstitution.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/VariableSubstitution.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/VariableSubstitution.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/VariableSubstitution.java Sat Jul 26 23:45:46 2014
@@ -17,79 +17,44 @@
  */
 package org.apache.hadoop.hive.ql.parse;
 
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.processors.SetProcessor;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.conf.SystemVariables;
+
+import java.util.Map;
 
-public class VariableSubstitution {
+public class VariableSubstitution extends SystemVariables {
 
   private static final Log l4j = LogFactory.getLog(VariableSubstitution.class);
-  protected static Pattern varPat = Pattern.compile("\\$\\{[^\\}\\$\u0020]+\\}");
 
-  private String getSubstitute(HiveConf conf, String var) {
-    String val = null;
-    try {
-      if (var.startsWith(SetProcessor.SYSTEM_PREFIX)) {
-        val = System.getProperty(var.substring(SetProcessor.SYSTEM_PREFIX.length()));
-      }
-    } catch(SecurityException se) {
-      l4j.warn("Unexpected SecurityException in Configuration", se);
-    }
-    if (val ==null){
-      if (var.startsWith(SetProcessor.ENV_PREFIX)){
-        val = System.getenv(var.substring(SetProcessor.ENV_PREFIX.length()));
-      }
-    }
-    if (val == null) {
-      if (var.startsWith(SetProcessor.HIVECONF_PREFIX)){
-        val = conf.get(var.substring(SetProcessor.HIVECONF_PREFIX.length()));
-      }
-    }
-    if (val ==null){
-      if(var.startsWith(SetProcessor.HIVEVAR_PREFIX)){
-        val =  SessionState.get().getHiveVariables().get(var.substring(SetProcessor.HIVEVAR_PREFIX.length()));
+  @Override
+  protected String getSubstitute(Configuration conf, String var) {
+    String val = super.getSubstitute(conf, var);
+    if (val == null && SessionState.get() != null) {
+      Map<String,String> vars = SessionState.get().getHiveVariables();
+      if (var.startsWith(HIVEVAR_PREFIX)) {
+        val =  vars.get(var.substring(HIVEVAR_PREFIX.length()));
       } else {
-        val = SessionState.get().getHiveVariables().get(var);
+        val = vars.get(var);
       }
     }
     return val;
   }
 
-  public String substitute (HiveConf conf, String expr) {
-
-    if (conf.getBoolVar(ConfVars.HIVEVARIABLESUBSTITUTE)){
-      l4j.debug("Substitution is on: "+expr);
-    } else {
-      return expr;
-    }
+  public String substitute(HiveConf conf, String expr) {
     if (expr == null) {
-      return null;
+      return expr;
     }
-    Matcher match = varPat.matcher("");
-    String eval = expr;
-    for(int s=0;s<conf.getIntVar(ConfVars.HIVEVARIABLESUBSTITUTEDEPTH); s++) {
-      match.reset(eval);
-      if (!match.find()) {
-        return eval;
-      }
-      String var = match.group();
-      var = var.substring(2, var.length()-1); // remove ${ .. }
-      String val = getSubstitute(conf, var);
-
-      if (val == null) {
-        l4j.debug("Interpolation result: "+eval);
-        return eval; // return literal, no substitution found
-      }
-      // substitute
-      eval = eval.substring(0, match.start())+val+eval.substring(match.end());
+    if (HiveConf.getBoolVar(conf, ConfVars.HIVEVARIABLESUBSTITUTE)) {
+      l4j.debug("Substitution is on: " + expr);
+    } else {
+      return expr;
     }
-    throw new IllegalStateException("Variable substitution depth too large: "
-                                    + conf.getIntVar(ConfVars.HIVEVARIABLESUBSTITUTEDEPTH) + " " + expr);
+    int depth = HiveConf.getIntVar(conf, ConfVars.HIVEVARIABLESUBSTITUTEDEPTH);
+    return substitute(conf, expr, depth);
   }
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java Sat Jul 26 23:45:46 2014
@@ -138,11 +138,16 @@ public class HiveAuthorizationTaskFactor
     List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef((ASTNode) ast.getChild(0));
     List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(1));
     PrivilegeObjectDesc hiveObj = null;
+    boolean grantOption = false;
     if (ast.getChildCount() > 2) {
       ASTNode astChild = (ASTNode) ast.getChild(2);
       hiveObj = analyzePrivilegeObject(astChild, outputs);
+
+      if (null != ast.getFirstChildWithType(HiveParser.TOK_GRANT_OPTION_FOR)) {
+        grantOption = true;
+      }
     }
-    RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, hiveObj);
+    RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, hiveObj, grantOption);
     return TaskFactory.get(new DDLWork(inputs, outputs, revokeDesc), conf);
   }
   @Override
@@ -212,7 +217,8 @@ public class HiveAuthorizationTaskFactor
     int rolesStartPos = 1;
     ASTNode wAdminOption = (ASTNode) ast.getChild(1);
     boolean isAdmin = false;
-    if(wAdminOption.getToken().getType() == HiveParser.TOK_GRANT_WITH_ADMIN_OPTION){
+    if((isGrant && wAdminOption.getToken().getType() == HiveParser.TOK_GRANT_WITH_ADMIN_OPTION) ||
+       (!isGrant && wAdminOption.getToken().getType() == HiveParser.TOK_ADMIN_OPTION_FOR)){
       rolesStartPos = 2; //start reading role names from next postion
       isAdmin = true;
     }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java Sat Jul 26 23:45:46 2014
@@ -24,22 +24,20 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
+import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.ParseUtils;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
@@ -77,11 +75,12 @@ public class CreateTableDesc extends DDL
   List<String> skewedColNames;
   List<List<String>> skewedColValues;
   boolean isStoredAsSubDirectories = false;
+  boolean isTemporary = false;
 
   public CreateTableDesc() {
   }
 
-  public CreateTableDesc(String databaseName, String tableName, boolean isExternal,
+  public CreateTableDesc(String databaseName, String tableName, boolean isExternal, boolean isTemporary,
       List<FieldSchema> cols, List<FieldSchema> partCols,
       List<String> bucketCols, List<Order> sortCols, int numBuckets,
       String fieldDelim, String fieldEscape, String collItemDelim,
@@ -92,7 +91,7 @@ public class CreateTableDesc extends DDL
       Map<String, String> tblProps,
       boolean ifNotExists, List<String> skewedColNames, List<List<String>> skewedColValues) {
 
-    this(tableName, isExternal, cols, partCols,
+    this(tableName, isExternal, isTemporary, cols, partCols,
         bucketCols, sortCols, numBuckets, fieldDelim, fieldEscape,
         collItemDelim, mapKeyDelim, lineDelim, comment, inputFormat,
         outputFormat, location, serName, storageHandler, serdeProps,
@@ -101,7 +100,7 @@ public class CreateTableDesc extends DDL
     this.databaseName = databaseName;
   }
 
-  public CreateTableDesc(String tableName, boolean isExternal,
+  public CreateTableDesc(String tableName, boolean isExternal, boolean isTemporary,
       List<FieldSchema> cols, List<FieldSchema> partCols,
       List<String> bucketCols, List<Order> sortCols, int numBuckets,
       String fieldDelim, String fieldEscape, String collItemDelim,
@@ -113,6 +112,7 @@ public class CreateTableDesc extends DDL
       boolean ifNotExists, List<String> skewedColNames, List<List<String>> skewedColValues) {
     this.tableName = tableName;
     this.isExternal = isExternal;
+    this.isTemporary = isTemporary;
     this.bucketCols = new ArrayList<String>(bucketCols);
     this.sortCols = new ArrayList<Order>(sortCols);
     this.collItemDelim = collItemDelim;
@@ -410,8 +410,7 @@ public class CreateTableDesc extends DDL
 
     if ((this.getCols() == null) || (this.getCols().size() == 0)) {
       // for now make sure that serde exists
-      if (StringUtils.isEmpty(this.getSerName())
-          || conf.getStringCollection(ConfVars.SERDESUSINGMETASTOREFORSCHEMA.varname).contains(this.getSerName())) {
+      if (Table.hasMetastoreBasedSchema(conf, getSerName())) {
         throw new SemanticException(ErrorMsg.INVALID_TBL_DDL_SERDE.getMsg());
       }
       return;
@@ -535,4 +534,19 @@ public class CreateTableDesc extends DDL
     this.nullFormat = nullFormat;
   }
 
+  /**
+   * @return the isTemporary
+   */
+  @Explain(displayName = "isTemporary", displayOnlyOnTrue = true)
+  public boolean isTemporary() {
+    return isTemporary;
+  }
+
+  /**
+   * @param isTemporary table is Temporary or not.
+   */
+  public void setTemporary(boolean isTemporary) {
+    this.isTemporary = isTemporary;
+  }
+
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java Sat Jul 26 23:45:46 2014
@@ -38,16 +38,18 @@ public class CreateTableLikeDesc extends
   Map<String, String> tblProps;
   boolean ifNotExists;
   String likeTableName;
+  boolean isTemporary = false;
 
   public CreateTableLikeDesc() {
   }
 
-  public CreateTableLikeDesc(String tableName, boolean isExternal,
+  public CreateTableLikeDesc(String tableName, boolean isExternal, boolean isTemporary,
       String defaultInputFormat, String defaultOutputFormat, String location,
       String defaultSerName, Map<String, String> defaultSerdeProps, Map<String, String> tblProps,
       boolean ifNotExists, String likeTableName) {
     this.tableName = tableName;
     this.isExternal = isExternal;
+    this.isTemporary = isTemporary;
     this.defaultInputFormat=defaultInputFormat;
     this.defaultOutputFormat=defaultOutputFormat;
     this.defaultSerName=defaultSerName;
@@ -168,4 +170,20 @@ public class CreateTableLikeDesc extends
   public void setTblProps(Map<String, String> tblProps) {
     this.tblProps = tblProps;
   }
+
+  /**
+   * @return the isTemporary
+   */
+  @Explain(displayName = "isTemporary", displayOnlyOnTrue = true)
+  public boolean isTemporary() {
+    return isTemporary;
+  }
+
+  /**
+   * @param isTemporary table is Temporary or not.
+   */
+  public void setTemporary(boolean isTemporary) {
+    this.isTemporary = isTemporary;
+  }
+
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java Sat Jul 26 23:45:46 2014
@@ -76,6 +76,8 @@ public class DDLWork implements Serializ
   private RevokeDesc revokeDesc;
   private GrantRevokeRoleDDL grantRevokeRoleDDL;
 
+  private ShowConfDesc showConfDesc;
+
   boolean needLock = false;
 
   /**
@@ -139,6 +141,12 @@ public class DDLWork implements Serializ
     this.truncateTblDesc = truncateTblDesc;
   }
 
+  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+      ShowConfDesc showConfDesc) {
+    this(inputs, outputs);
+    this.showConfDesc = showConfDesc;
+  }
+
   public DescDatabaseDesc getDescDatabaseDesc() {
     return descDbDesc;
   }
@@ -1117,4 +1125,12 @@ public class DDLWork implements Serializ
       AlterTableExchangePartition alterTableExchangePartition) {
     this.alterTableExchangePartition = alterTableExchangePartition;
   }
+
+  public ShowConfDesc getShowConfDesc() {
+    return showConfDesc;
+  }
+
+  public void setShowConfDesc(ShowConfDesc showConfDesc) {
+    this.showConfDesc = showConfDesc;
+  }
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java Sat Jul 26 23:45:46 2014
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.serde2.typ
  */
 public class ExprNodeConstantDesc extends ExprNodeDesc implements Serializable {
   private static final long serialVersionUID = 1L;
+  final protected transient static char[] hexArray = "0123456789ABCDEF".toCharArray();
   private Object value;
 
   public ExprNodeConstantDesc() {
@@ -83,6 +84,15 @@ public class ExprNodeConstantDesc extend
 
     if (typeInfo.getTypeName().equals(serdeConstants.STRING_TYPE_NAME)) {
       return "'" + value.toString() + "'";
+    } else if (typeInfo.getTypeName().equals(serdeConstants.BINARY_TYPE_NAME)) {
+      byte[] bytes = (byte[]) value;
+      char[] hexChars = new char[bytes.length * 2];
+      for (int j = 0; j < bytes.length; j++) {
+        int v = bytes[j] & 0xFF;
+        hexChars[j * 2] = hexArray[v >>> 4];
+        hexChars[j * 2 + 1] = hexArray[v & 0x0F];
+      }
+      return new String(hexChars);
     } else {
       return value.toString();
     }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java Sat Jul 26 23:45:46 2014
@@ -23,6 +23,7 @@ import java.util.List;
 
 import org.apache.hadoop.hive.ql.udf.UDFType;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hive.common.util.AnnotationUtils;
 
 /**
  * GroupByDesc.
@@ -228,7 +229,7 @@ public class GroupByDesc extends Abstrac
     for (AggregationDesc ad : aggregators) {
       if (!ad.getDistinct()) {
         GenericUDAFEvaluator udafEval = ad.getGenericUDAFEvaluator();
-        UDFType annot = udafEval.getClass().getAnnotation(UDFType.class);
+        UDFType annot = AnnotationUtils.getAnnotation(udafEval.getClass(), UDFType.class);
         if (annot == null || !annot.distinctLike()) {
           return false;
         }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java Sat Jul 26 23:45:46 2014
@@ -67,6 +67,7 @@ public enum HiveOperation {
   SHOWINDEXES("SHOWINDEXES", null, null),
   SHOWPARTITIONS("SHOWPARTITIONS", null, null),
   SHOWLOCKS("SHOWLOCKS", null, null),
+  SHOWCONF("SHOWCONF", null, null),
   CREATEFUNCTION("CREATEFUNCTION", null, null),
   DROPFUNCTION("DROPFUNCTION", null, null),
   CREATEMACRO("CREATEMACRO", null, null),

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java Sat Jul 26 23:45:46 2014
@@ -114,6 +114,8 @@ public class MapWork extends BaseWork {
 
   private boolean useBucketizedHiveInputFormat;
 
+  private boolean useOneNullRowInputFormat;
+
   private Map<String, Map<Integer, String>> scratchColumnVectorTypes = null;
   private Map<String, Map<String, Integer>> scratchColumnMap = null;
   private boolean vectorMode = false;
@@ -390,6 +392,7 @@ public class MapWork extends BaseWork {
   public void setInputformat(String inputformat) {
     this.inputformat = inputformat;
   }
+
   public boolean isUseBucketizedHiveInputFormat() {
     return useBucketizedHiveInputFormat;
   }
@@ -398,6 +401,14 @@ public class MapWork extends BaseWork {
     this.useBucketizedHiveInputFormat = useBucketizedHiveInputFormat;
   }
 
+  public void setUseOneNullRowInputFormat(boolean useOneNullRowInputFormat) {
+    this.useOneNullRowInputFormat = useOneNullRowInputFormat;
+  }
+
+  public boolean isUseOneNullRowInputFormat() {
+    return useOneNullRowInputFormat;
+  }
+
   public QBJoinTree getJoinTree() {
     return joinTree;
   }
@@ -534,4 +545,14 @@ public class MapWork extends BaseWork {
     this.vectorMode = vectorMode;
   }
 
+  public void logPathToAliases() {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("LOGGING PATH TO ALIASES");
+      for (Map.Entry<String, ArrayList<String>> entry: pathToAliases.entrySet()) {
+        for (String a: entry.getValue()) {
+          LOG.debug("Path: " + entry.getKey() + ", Alias: " + a);
+        }
+      }
+    }
+  }
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java Sat Jul 26 23:45:46 2014
@@ -353,6 +353,7 @@ public class ReduceSinkDesc extends Abst
     return skipTag;
   }
 
+  @Explain(displayName = "auto parallelism", normalExplain = false)
   public final boolean isAutoParallel() {
     return (autoParallel != null) && autoParallel;
   }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java Sat Jul 26 23:45:46 2014
@@ -31,16 +31,24 @@ public class RevokeDesc extends DDLDesc 
   private List<PrincipalDesc> principals;
 
   private PrivilegeObjectDesc privilegeSubjectDesc;
-  
+
+  private boolean grantOption;
+
   public RevokeDesc(){
   }
 
   public RevokeDesc(List<PrivilegeDesc> privileges,
       List<PrincipalDesc> principals, PrivilegeObjectDesc privilegeSubjectDesc) {
+    this(privileges, principals, privilegeSubjectDesc, false);
+  }
+
+  public RevokeDesc(List<PrivilegeDesc> privileges,
+        List<PrincipalDesc> principals, PrivilegeObjectDesc privilegeSubjectDesc, boolean grantOption) {
     super();
     this.privileges = privileges;
     this.principals = principals;
     this.privilegeSubjectDesc = privilegeSubjectDesc;
+    this.grantOption = grantOption;
   }
 
   public List<PrivilegeDesc> getPrivileges() {
@@ -66,5 +74,13 @@ public class RevokeDesc extends DDLDesc 
   public void setPrivilegeSubjectDesc(PrivilegeObjectDesc privilegeSubjectDesc) {
     this.privilegeSubjectDesc = privilegeSubjectDesc;
   }
+
+  public boolean isGrantOption() {
+    return grantOption;
+  }
+
+  public void setGrantOption(boolean grantOption) {
+    this.grantOption = grantOption;
+  }
   
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/TezEdgeProperty.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/TezEdgeProperty.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/TezEdgeProperty.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/TezEdgeProperty.java Sat Jul 26 23:45:46 2014
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hadoop.hive.ql.plan;
 
 import org.apache.hadoop.hive.conf.HiveConf;

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java Sat Jul 26 23:45:46 2014
@@ -77,9 +77,7 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFRank.GenericUDAFRankEvaluator;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan;
-import org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction;
 import org.apache.hadoop.hive.serde2.Deserializer;
-import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.mapred.JobConf;
 
@@ -196,7 +194,7 @@ public final class OpProcFactory {
         return;
       }
       
-      ExprWalkerInfo childInfo = getChildWalkerInfo((Operator<?>) ptfOp, owi);
+      ExprWalkerInfo childInfo = getChildWalkerInfo(ptfOp, owi);
 
       if (childInfo == null) {
         return;
@@ -411,16 +409,18 @@ public final class OpProcFactory {
         Object... nodeOutputs) throws SemanticException {
       LOG.info("Processing for " + nd.getName() + "("
           + ((Operator) nd).getIdentifier() + ")");
+
       OpWalkerInfo owi = (OpWalkerInfo) procCtx;
-      Operator<? extends OperatorDesc> op =
-        (Operator<? extends OperatorDesc>) nd;
-      ExprNodeDesc predicate = (((FilterOperator) nd).getConf()).getPredicate();
-      ExprWalkerInfo ewi = new ExprWalkerInfo();
+      Operator<? extends OperatorDesc> op = (Operator<? extends OperatorDesc>) nd;
+
+      // if this filter is generated one, predicates need not to be extracted
+      ExprWalkerInfo ewi = owi.getPrunedPreds(op);
       // Don't push a sampling predicate since createFilter() always creates filter
       // with isSamplePred = false. Also, the filterop with sampling pred is always
       // a child of TableScan, so there is no need to push this predicate.
-      if (!((FilterOperator)op).getConf().getIsSamplingPred()) {
+      if (ewi == null && !((FilterOperator)op).getConf().getIsSamplingPred()) {
         // get pushdown predicates for this operator's predicate
+        ExprNodeDesc predicate = (((FilterOperator) nd).getConf()).getPredicate();
         ewi = ExprWalkerProcFactory.extractPushdownPreds(owi, op, predicate);
         if (!ewi.isDeterministic()) {
           /* predicate is not deterministic */
@@ -964,6 +964,12 @@ public final class OpProcFactory {
       }
       owi.getCandidateFilterOps().clear();
     }
+    // push down current ppd context to newly added filter
+    ExprWalkerInfo walkerInfo = owi.getPrunedPreds(op);
+    if (walkerInfo != null) {
+      walkerInfo.getNonFinalCandidates().clear();
+      owi.putPrunedPreds(output, walkerInfo);
+    }
     return output;
   }
 
@@ -1048,7 +1054,7 @@ public final class OpProcFactory {
     tableScanDesc.setFilterExpr(decomposed.pushedPredicate);
     tableScanDesc.setFilterObject(decomposed.pushedPredicateObject);
 
-    return (ExprNodeGenericFuncDesc)decomposed.residualPredicate;
+    return decomposed.residualPredicate;
   }
 
   public static NodeProcessor getFilterProc() {

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java Sat Jul 26 23:45:46 2014
@@ -21,11 +21,14 @@ import java.util.ArrayList;
 import java.util.LinkedHashMap;
 import java.util.Map;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.CommonJoinOperator;
 import org.apache.hadoop.hive.ql.exec.FilterOperator;
 import org.apache.hadoop.hive.ql.exec.LateralViewForwardOperator;
 import org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator;
 import org.apache.hadoop.hive.ql.exec.LimitOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.PTFOperator;
 import org.apache.hadoop.hive.ql.exec.ScriptOperator;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
@@ -77,6 +80,7 @@ import org.apache.hadoop.hive.ql.parse.S
  */
 public class PredicatePushDown implements Transform {
 
+  private static final Log LOG = LogFactory.getLog(PredicatePushDown.class);
   private ParseContext pGraphContext;
 
   @Override
@@ -126,6 +130,7 @@ public class PredicatePushDown implement
     topNodes.addAll(pGraphContext.getTopOps().values());
     ogw.startWalking(topNodes, null);
 
+    LOG.debug("After PPD:\n" + Operator.toString(pctx.getTopOps().values()));
     return pGraphContext;
   }
 

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java Sat Jul 26 23:45:46 2014
@@ -24,6 +24,7 @@ import org.apache.commons.lang.StringUti
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 
@@ -37,9 +38,11 @@ public class AddResourceProcessor implem
       .getName());
   public static final LogHelper console = new LogHelper(LOG);
 
+  @Override
   public void init() {
   }
 
+  @Override
   public CommandProcessorResponse run(String command) {
     SessionState ss = SessionState.get();
     command = new VariableSubstitution().substitute(ss.getConf(),command);
@@ -52,11 +55,19 @@ public class AddResourceProcessor implem
           + "] <value> [<value>]*");
       return new CommandProcessorResponse(1);
     }
+
+    CommandProcessorResponse authErrResp =
+        CommandUtil.authorizeCommand(ss, HiveOperationType.ADD, Arrays.asList(tokens));
+    if(authErrResp != null){
+      // there was an authorization issue
+      return authErrResp;
+    }
+
     try {
       ss.add_resources(t,
           Arrays.asList(Arrays.copyOfRange(tokens, 1, tokens.length)));
     } catch (Exception e) {
-      return new CommandProcessorResponse(1, e.getMessage(), null);
+      return CommandProcessorResponse.create(e);
     }
     return new CommandProcessorResponse(0);
   }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java Sat Jul 26 23:45:46 2014
@@ -27,12 +27,12 @@ import org.apache.hadoop.hive.metastore.
  * is not 0.
  */
 public class CommandProcessorResponse {
-  private int responseCode;
-  private String errorMessage;
-  private String SQLState;
-  private Schema resSchema;
+  private final int responseCode;
+  private final String errorMessage;
+  private final String SQLState;
+  private final Schema resSchema;
 
-  private Throwable exception;
+  private final Throwable exception;
 
   public CommandProcessorResponse(int responseCode) {
     this(responseCode, null, null, null, null);
@@ -50,6 +50,18 @@ public class CommandProcessorResponse {
     this(responseCode, errorMessage, SQLState, schema, null);
   }
 
+  /**
+   * Create CommandProcessorResponse object indicating an error.
+   * Creates new CommandProcessorResponse with responseCode=1, and sets message
+   * from exception argument
+   *
+   * @param e
+   * @return
+   */
+  public static CommandProcessorResponse create(Exception e) {
+    return new CommandProcessorResponse(1, e.getMessage(), null);
+  }
+
   public CommandProcessorResponse(int responseCode, String errorMessage, String SQLState,
       Schema schema, Throwable exception) {
     this.responseCode = responseCode;

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java Sat Jul 26 23:45:46 2014
@@ -23,6 +23,7 @@ import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.nio.charset.Charset;
+import java.util.Arrays;
 import java.util.StringTokenizer;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -34,6 +35,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.ql.session.SessionState.ResourceType;
@@ -107,18 +109,27 @@ public class CompileProcessor implements
   @Override
   public CommandProcessorResponse run(String command) throws CommandNeedRetryException {
     SessionState ss = SessionState.get();
-    myId = runCount.getAndIncrement();
     this.command = command;
+
+    CommandProcessorResponse authErrResp =
+        CommandUtil.authorizeCommand(ss, HiveOperationType.COMPILE, Arrays.asList(command));
+    if(authErrResp != null){
+      // there was an authorization issue
+      return authErrResp;
+    }
+
+    myId = runCount.getAndIncrement();
+
     try {
       parse(ss);
     } catch (CompileProcessorException e) {
-      return new CommandProcessorResponse(1, e.getMessage(), null);
+      return CommandProcessorResponse.create(e);
     }
     CommandProcessorResponse result = null;
     try {
       result = compile(ss);
     } catch (CompileProcessorException e) {
-      result = new CommandProcessorResponse(1, e.getMessage(), null);
+      return CommandProcessorResponse.create(e);
     }
     return result;
   }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java Sat Jul 26 23:45:46 2014
@@ -24,6 +24,7 @@ import org.apache.commons.lang.StringUti
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 
@@ -36,9 +37,11 @@ public class DeleteResourceProcessor imp
   public static final Log LOG = LogFactory.getLog(DeleteResourceProcessor.class.getName());
   public static final LogHelper console = new LogHelper(LOG);
 
+  @Override
   public void init() {
   }
 
+  @Override
   public CommandProcessorResponse run(String command) {
     SessionState ss = SessionState.get();
     command = new VariableSubstitution().substitute(ss.getConf(),command);
@@ -52,7 +55,12 @@ public class DeleteResourceProcessor imp
           + "] <value> [<value>]*");
       return new CommandProcessorResponse(1);
     }
-
+    CommandProcessorResponse authErrResp =
+        CommandUtil.authorizeCommand(ss, HiveOperationType.DELETE, Arrays.asList(tokens));
+    if(authErrResp != null){
+      // there was an authorization issue
+      return authErrResp;
+    }
     if (tokens.length >= 2) {
       ss.delete_resources(t, Arrays.asList(Arrays.copyOfRange(tokens, 1, tokens.length)));
     } else {

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java Sat Jul 26 23:45:46 2014
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.processors;
 
 import java.io.PrintStream;
+import java.util.Arrays;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -27,6 +28,7 @@ import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
 import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 
@@ -53,9 +55,11 @@ public class DfsProcessor implements Com
     dfsSchema.addToFieldSchemas(new FieldSchema(DFS_RESULT_HEADER, "string", ""));
   }
 
+  @Override
   public void init() {
   }
 
+  @Override
   public CommandProcessorResponse run(String command) {
 
 
@@ -64,6 +68,13 @@ public class DfsProcessor implements Com
       command = new VariableSubstitution().substitute(ss.getConf(),command);
 
       String[] tokens = command.split("\\s+");
+      CommandProcessorResponse authErrResp =
+          CommandUtil.authorizeCommand(ss, HiveOperationType.DFS, Arrays.asList(tokens));
+      if(authErrResp != null){
+        // there was an authorization issue
+        return authErrResp;
+      }
+
       PrintStream oldOut = System.out;
 
       if (ss != null && ss.out != null) {

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java Sat Jul 26 23:45:46 2014
@@ -18,17 +18,30 @@
 
 package org.apache.hadoop.hive.ql.processors;
 
+import java.util.Arrays;
+
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
 public class ResetProcessor implements CommandProcessor {
 
+  @Override
   public void init() {
   }
 
+  @Override
   public CommandProcessorResponse run(String command) throws CommandNeedRetryException {
     SessionState ss = SessionState.get();
+
+    CommandProcessorResponse authErrResp =
+        CommandUtil.authorizeCommand(ss, HiveOperationType.RESET, Arrays.asList(command));
+    if(authErrResp != null){
+      // there was an authorization issue
+      return authErrResp;
+    }
+
     if (ss.getOverriddenConfigurations().isEmpty()) {
       return new CommandProcessorResponse(0);
     }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java Sat Jul 26 23:45:46 2014
@@ -22,6 +22,8 @@ import static org.apache.hadoop.hive.ser
 import static org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME;
 import static org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.defaultNullString;
 
+import static org.apache.hadoop.hive.conf.SystemVariables.*;
+
 import java.util.Map;
 import java.util.Properties;
 import java.util.SortedMap;
@@ -39,12 +41,7 @@ import org.apache.hadoop.hive.ql.session
  */
 public class SetProcessor implements CommandProcessor {
 
-  private static String prefix = "set: ";
-  public static final String ENV_PREFIX = "env:";
-  public static final String SYSTEM_PREFIX = "system:";
-  public static final String HIVECONF_PREFIX = "hiveconf:";
-  public static final String HIVEVAR_PREFIX = "hivevar:";
-  public static final String SET_COLUMN_NAME = "set";
+  private static final String prefix = "set: ";
 
   public static boolean getBoolean(String value) {
     if (value.equals("on") || value.equals("true")) {
@@ -69,7 +66,7 @@ public class SetProcessor implements Com
 
     // Inserting hive variables
     for (String s : ss.getHiveVariables().keySet()) {
-      sortedMap.put(SetProcessor.HIVEVAR_PREFIX + s, ss.getHiveVariables().get(s));
+      sortedMap.put(HIVEVAR_PREFIX + s, ss.getHiveVariables().get(s));
     }
 
     for (Map.Entry<String, String> entries : sortedMap.entrySet()) {
@@ -117,17 +114,17 @@ public class SetProcessor implements Com
       ss.err.println("Warning: Value had a \\n character in it.");
     }
     varname = varname.trim();
-    if (varname.startsWith(SetProcessor.ENV_PREFIX)){
+    if (varname.startsWith(ENV_PREFIX)){
       ss.err.println("env:* variables can not be set.");
       return 1;
-    } else if (varname.startsWith(SetProcessor.SYSTEM_PREFIX)){
-      String propName = varname.substring(SetProcessor.SYSTEM_PREFIX.length());
+    } else if (varname.startsWith(SYSTEM_PREFIX)){
+      String propName = varname.substring(SYSTEM_PREFIX.length());
       System.getProperties().setProperty(propName, new VariableSubstitution().substitute(ss.getConf(),varvalue));
-    } else if (varname.startsWith(SetProcessor.HIVECONF_PREFIX)){
-      String propName = varname.substring(SetProcessor.HIVECONF_PREFIX.length());
+    } else if (varname.startsWith(HIVECONF_PREFIX)){
+      String propName = varname.substring(HIVECONF_PREFIX.length());
       setConf(varname, propName, varvalue, false);
-    } else if (varname.startsWith(SetProcessor.HIVEVAR_PREFIX)) {
-      String propName = varname.substring(SetProcessor.HIVEVAR_PREFIX.length());
+    } else if (varname.startsWith(HIVEVAR_PREFIX)) {
+      String propName = varname.substring(HIVEVAR_PREFIX.length());
       ss.getHiveVariables().put(propName, new VariableSubstitution().substitute(ss.getConf(),varvalue));
     } else {
       setConf(varname, varname, varvalue, true);
@@ -169,7 +166,7 @@ public class SetProcessor implements Com
 
   private SortedMap<String,String> propertiesToSortedMap(Properties p){
     SortedMap<String,String> sortedPropMap = new TreeMap<String,String>();
-    for (Map.Entry<Object, Object> entry :System.getProperties().entrySet() ){
+    for (Map.Entry<Object, Object> entry : p.entrySet() ){
       sortedPropMap.put( (String) entry.getKey(), (String) entry.getValue());
     }
     return sortedPropMap;
@@ -188,38 +185,38 @@ public class SetProcessor implements Com
       ss.out.println("silent" + "=" + ss.getIsSilent());
       return createProcessorSuccessResponse();
     }
-    if (varname.startsWith(SetProcessor.SYSTEM_PREFIX)){
-      String propName = varname.substring(SetProcessor.SYSTEM_PREFIX.length());
+    if (varname.startsWith(SYSTEM_PREFIX)) {
+      String propName = varname.substring(SYSTEM_PREFIX.length());
       String result = System.getProperty(propName);
-      if (result != null){
-        ss.out.println(SetProcessor.SYSTEM_PREFIX+propName + "=" + result);
+      if (result != null) {
+        ss.out.println(SYSTEM_PREFIX + propName + "=" + result);
         return createProcessorSuccessResponse();
       } else {
-        ss.out.println( propName + " is undefined as a system property");
+        ss.out.println(propName + " is undefined as a system property");
         return new CommandProcessorResponse(1);
       }
-    } else if (varname.indexOf(SetProcessor.ENV_PREFIX)==0){
+    } else if (varname.indexOf(ENV_PREFIX) == 0) {
       String var = varname.substring(ENV_PREFIX.length());
-      if (System.getenv(var)!=null){
-        ss.out.println(SetProcessor.ENV_PREFIX+var + "=" + System.getenv(var));
+      if (System.getenv(var) != null) {
+        ss.out.println(ENV_PREFIX + var + "=" + System.getenv(var));
         return createProcessorSuccessResponse();
       } else {
         ss.out.println(varname + " is undefined as an environmental variable");
         return new CommandProcessorResponse(1);
       }
-    } else if (varname.indexOf(SetProcessor.HIVECONF_PREFIX)==0) {
-      String var = varname.substring(SetProcessor.HIVECONF_PREFIX.length());
-      if (ss.getConf().get(var)!=null){
-        ss.out.println(SetProcessor.HIVECONF_PREFIX+var + "=" + ss.getConf().get(var));
+    } else if (varname.indexOf(HIVECONF_PREFIX) == 0) {
+      String var = varname.substring(HIVECONF_PREFIX.length());
+      if (ss.getConf().get(var) != null) {
+        ss.out.println(HIVECONF_PREFIX + var + "=" + ss.getConf().get(var));
         return createProcessorSuccessResponse();
       } else {
         ss.out.println(varname + " is undefined as a hive configuration variable");
         return new CommandProcessorResponse(1);
       }
-    } else if (varname.indexOf(SetProcessor.HIVEVAR_PREFIX)==0) {
-      String var = varname.substring(SetProcessor.HIVEVAR_PREFIX.length());
-      if (ss.getHiveVariables().get(var)!=null){
-        ss.out.println(SetProcessor.HIVEVAR_PREFIX+var + "=" + ss.getHiveVariables().get(var));
+    } else if (varname.indexOf(HIVEVAR_PREFIX) == 0) {
+      String var = varname.substring(HIVEVAR_PREFIX.length());
+      if (ss.getHiveVariables().get(var) != null) {
+        ss.out.println(HIVEVAR_PREFIX + var + "=" + ss.getHiveVariables().get(var));
         return createProcessorSuccessResponse();
       } else {
         ss.out.println(varname + " is undefined as a hive variable");

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationFactory.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationFactory.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationFactory.java Sat Jul 26 23:45:46 2014
@@ -19,6 +19,8 @@
 package org.apache.hadoop.hive.ql.security.authorization;
 
 import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
 
 import java.lang.reflect.InvocationHandler;
 import java.lang.reflect.InvocationTargetException;
@@ -27,13 +29,8 @@ import java.lang.reflect.Proxy;
 
 public class AuthorizationFactory {
 
-  public static HiveAuthorizationProvider create(HiveAuthorizationProvider delegated) {
-    return create(delegated, new DefaultAuthorizationExceptionHandler());
-  }
-
-  public static HiveAuthorizationProvider create(final HiveAuthorizationProvider delegated,
-      final AuthorizationExceptionHandler handler) {
-
+  public static <T> T create(
+      final Object delegated, final Class<T> itface, final AuthorizationExceptionHandler handler) {
     InvocationHandler invocation = new InvocationHandler() {
       public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
         invokeAuth(method, args);
@@ -44,27 +41,38 @@ public class AuthorizationFactory {
         try {
           method.invoke(delegated, args);
         } catch (InvocationTargetException e) {
-          if (e.getTargetException() instanceof AuthorizationException) {
-            handler.exception((AuthorizationException) e.getTargetException());
+          if (e.getTargetException() instanceof AuthorizationException ||
+              e.getTargetException() instanceof HiveAuthzPluginException||
+              e.getTargetException() instanceof HiveAccessControlException) {
+            handler.exception((Exception) e.getTargetException());
           }
         }
       }
     };
 
-    return (HiveAuthorizationProvider)Proxy.newProxyInstance(
-        AuthorizationFactory.class.getClassLoader(),
-        new Class[] {HiveAuthorizationProvider.class},
-        invocation);
+    return (T) Proxy.newProxyInstance(
+        AuthorizationFactory.class.getClassLoader(), new Class[]{itface}, invocation);
   }
 
   public static interface AuthorizationExceptionHandler {
-    void exception(AuthorizationException exception) throws AuthorizationException;
+    void exception(Exception exception)
+        throws AuthorizationException, HiveAuthzPluginException, HiveAccessControlException;
   }
 
   public static class DefaultAuthorizationExceptionHandler
       implements AuthorizationExceptionHandler {
-    public void exception(AuthorizationException exception) {
-      throw exception;
+    public void exception(Exception exception) throws
+        AuthorizationException, HiveAuthzPluginException, HiveAccessControlException {
+      if (exception instanceof AuthorizationException) {
+        throw (AuthorizationException) exception;
+      }
+      if (exception instanceof HiveAuthzPluginException) {
+        throw (HiveAuthzPluginException) exception;
+      }
+      if (exception instanceof HiveAccessControlException) {
+        throw (HiveAccessControlException) exception;
+      }
+      throw new RuntimeException(exception);
     }
   }
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java Sat Jul 26 23:45:46 2014
@@ -18,22 +18,32 @@
 package org.apache.hadoop.hive.ql.security.authorization;
 
 import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate;
+import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
 import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
 import org.apache.hadoop.hive.metastore.api.HiveObjectType;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.hooks.Entity;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.hooks.Entity.Type;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity.WriteType;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
+import org.apache.hadoop.hive.ql.plan.PrivilegeDesc;
+import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import org.apache.hadoop.hive.ql.session.SessionState;
+
+import java.util.ArrayList;
+import java.util.List;
 
 /**
  * Utility code shared by hive internal code and sql standard authorization plugin implementation
@@ -48,13 +58,19 @@ public class AuthorizationUtils {
    * @throws HiveException
    */
   public static HivePrincipalType getHivePrincipalType(PrincipalType type) throws HiveException {
+    if (type == null) {
+      return null;
+    }
     switch(type){
     case USER:
       return HivePrincipalType.USER;
     case ROLE:
       return HivePrincipalType.ROLE;
     case GROUP:
-      throw new HiveException(ErrorMsg.UNNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP);
+      if (SessionState.get().getAuthorizationMode() == SessionState.AuthorizationMode.V2) {
+        throw new HiveException(ErrorMsg.UNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP);
+      }
+      return HivePrincipalType.GROUP;
     default:
       //should not happen as we take care of all existing types
       throw new AssertionError("Unsupported authorization type specified");
@@ -68,6 +84,9 @@ public class AuthorizationUtils {
    * @return
    */
   public static HivePrivilegeObjectType getHivePrivilegeObjectType(Type type) {
+    if (type == null){
+      return null;
+    }
     switch(type){
     case DATABASE:
       return HivePrivilegeObjectType.DATABASE;
@@ -80,11 +99,102 @@ public class AuthorizationUtils {
     case PARTITION:
     case DUMMYPARTITION: //need to determine if a different type is needed for dummy partitions
       return HivePrivilegeObjectType.PARTITION;
+    case FUNCTION:
+      return HivePrivilegeObjectType.FUNCTION;
     default:
       return null;
     }
   }
 
+  public static HivePrivilegeObjectType getPrivObjectType(PrivilegeObjectDesc privSubjectDesc) {
+    if (privSubjectDesc.getObject() == null) {
+      return null;
+    }
+    return privSubjectDesc.getTable() ? HivePrivilegeObjectType.TABLE_OR_VIEW :
+        HivePrivilegeObjectType.DATABASE;
+  }
+
+  public static List<HivePrivilege> getHivePrivileges(List<PrivilegeDesc> privileges) {
+    List<HivePrivilege> hivePrivileges = new ArrayList<HivePrivilege>();
+    for(PrivilegeDesc privilege : privileges){
+      Privilege priv = privilege.getPrivilege();
+      hivePrivileges.add(
+          new HivePrivilege(priv.toString(), privilege.getColumns(), priv.getScopeList()));
+    }
+    return hivePrivileges;
+  }
+
+  public static List<HivePrincipal> getHivePrincipals(List<PrincipalDesc> principals)
+      throws HiveException {
+
+    ArrayList<HivePrincipal> hivePrincipals = new ArrayList<HivePrincipal>();
+    for(PrincipalDesc principal : principals){
+      hivePrincipals.add(getHivePrincipal(principal));
+    }
+    return hivePrincipals;
+  }
+
+  public static HivePrincipal getHivePrincipal(PrincipalDesc principal) throws HiveException {
+    if (principal == null) {
+      return null;
+    }
+    return getHivePrincipal(principal.getName(), principal.getType());
+  }
+
+  public static HivePrincipal getHivePrincipal(String name, PrincipalType type) throws HiveException {
+    return new HivePrincipal(name, AuthorizationUtils.getHivePrincipalType(type));
+  }
+
+  public static List<HivePrivilegeInfo> getPrivilegeInfos(List<HiveObjectPrivilege> privs)
+      throws HiveException {
+    List<HivePrivilegeInfo> hivePrivs = new ArrayList<HivePrivilegeInfo>();
+    for (HiveObjectPrivilege priv : privs) {
+      PrivilegeGrantInfo grantorInfo = priv.getGrantInfo();
+      HiveObjectRef privObject = priv.getHiveObject();
+      HivePrincipal hivePrincipal =
+          getHivePrincipal(priv.getPrincipalName(), priv.getPrincipalType());
+      HivePrincipal grantor =
+          getHivePrincipal(grantorInfo.getGrantor(), grantorInfo.getGrantorType());
+      HivePrivilegeObject object = getHiveObjectRef(privObject);
+      HivePrivilege privilege = new HivePrivilege(grantorInfo.getPrivilege(), null);
+      hivePrivs.add(new HivePrivilegeInfo(hivePrincipal, privilege, object, grantor,
+          grantorInfo.isGrantOption(), grantorInfo.getCreateTime()));
+    }
+    return hivePrivs;
+  }
+
+  public static HivePrivilegeObject getHiveObjectRef(HiveObjectRef privObj) throws HiveException {
+    if (privObj == null) {
+      return null;
+    }
+    HivePrivilegeObjectType objType = getHiveObjType(privObj.getObjectType());
+    return new HivePrivilegeObject(objType, privObj.getDbName(), privObj.getObjectName(),
+        privObj.getPartValues(), privObj.getColumnName());
+  }
+
+  public static HivePrivilegeObject getHivePrivilegeObject(
+      PrivilegeObjectDesc privSubjectDesc, List<String> columns) throws HiveException {
+
+    // null means ALL for show grants, GLOBAL for grant/revoke
+    HivePrivilegeObjectType objectType = null;
+
+    String[] dbTable;
+    List<String> partSpec = null;
+    if (privSubjectDesc == null) {
+      dbTable = new String[] {null, null};
+    } else {
+      if (privSubjectDesc.getTable()) {
+        dbTable = Utilities.getDbTableName(privSubjectDesc.getObject());
+      } else {
+        dbTable = new String[] {privSubjectDesc.getObject(), null};
+      }
+      if (privSubjectDesc.getPartSpec() != null) {
+        partSpec = new ArrayList<String>(privSubjectDesc.getPartSpec().values());
+      }
+      objectType = getPrivObjectType(privSubjectDesc);
+    }
+    return new HivePrivilegeObject(objectType, dbTable[0], dbTable[1], partSpec, columns, null);
+  }
 
   /**
    * Convert authorization plugin principal type to thrift principal type
@@ -99,6 +209,8 @@ public class AuthorizationUtils {
     switch(type){
     case USER:
       return PrincipalType.USER;
+    case GROUP:
+      return PrincipalType.GROUP;
     case ROLE:
       return PrincipalType.ROLE;
     default:
@@ -106,7 +218,6 @@ public class AuthorizationUtils {
     }
   }
 
-
   /**
    * Get thrift privilege grant info
    * @param privilege
@@ -134,21 +245,48 @@ public class AuthorizationUtils {
       return null;
     }
     switch(type){
+    case GLOBAL:
+      return HiveObjectType.GLOBAL;
     case DATABASE:
       return HiveObjectType.DATABASE;
     case TABLE_OR_VIEW:
       return HiveObjectType.TABLE;
     case PARTITION:
       return HiveObjectType.PARTITION;
-    case LOCAL_URI:
-    case DFS_URI:
-      throw new HiveException("Unsupported type " + type);
+    case COLUMN:
+      return HiveObjectType.COLUMN;
     default:
-      //should not happen as we have accounted for all types
-      throw new AssertionError("Unsupported type " + type);
+      throw new HiveException("Unsupported type " + type);
     }
   }
 
+  // V1 to V2 conversion.
+  private static HivePrivilegeObjectType getHiveObjType(HiveObjectType type) throws HiveException {
+    if (type == null) {
+      return null;
+    }
+    switch(type){
+      case GLOBAL:
+        if (SessionState.get().getAuthorizationMode() == SessionState.AuthorizationMode.V2) {
+          throw new HiveException(ErrorMsg.UNSUPPORTED_AUTHORIZATION_RESOURCE_TYPE_GLOBAL);
+        }
+        return HivePrivilegeObjectType.GLOBAL;
+      case DATABASE:
+        return HivePrivilegeObjectType.DATABASE;
+      case TABLE:
+        return HivePrivilegeObjectType.TABLE_OR_VIEW;
+      case PARTITION:
+        return HivePrivilegeObjectType.PARTITION;
+      case COLUMN:
+        if (SessionState.get().getAuthorizationMode() == SessionState.AuthorizationMode.V2) {
+          throw new HiveException(ErrorMsg.UNSUPPORTED_AUTHORIZATION_RESOURCE_TYPE_COLUMN);
+        }
+        return HivePrivilegeObjectType.COLUMN;
+      default:
+        //should not happen as we have accounted for all types
+        throw new AssertionError("Unsupported type " + type);
+    }
+  }
 
   /**
    * Convert thrift HiveObjectRef to plugin HivePrivilegeObject
@@ -161,7 +299,7 @@ public class AuthorizationUtils {
       return null;
     }
     HiveObjectType objType = getThriftHiveObjType(privObj.getType());
-    return new HiveObjectRef(objType, privObj.getDbname(), privObj.getTableViewURI(), null, null);
+    return new HiveObjectRef(objType, privObj.getDbname(), privObj.getObjectName(), null, null);
   }
 
   public static HivePrivObjectActionType getActionType(Entity privObject) {

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java Sat Jul 26 23:45:46 2014
@@ -18,7 +18,9 @@
 
 package org.apache.hadoop.hive.ql.security.authorization;
 
+import java.util.ArrayList;
 import java.util.EnumSet;
+import java.util.List;
 
 /**
  * Privilege defines a privilege in Hive. Each privilege has a name and scope associated with it.
@@ -65,6 +67,17 @@ public class Privilege {
         && supportedScopeSet.contains(PrivilegeScope.TABLE_LEVEL_SCOPE);
   }
 
+  public List<String> getScopeList() {
+    if (supportedScopeSet == null) {
+      return null;
+    }
+    List<String> scopes = new ArrayList<String>();
+    for (PrivilegeScope scope : supportedScopeSet) {
+      scopes.add(scope.name());
+    }
+    return scopes;
+  }
+
   @Override
   public String toString() {
     return this.getPriv().toString();

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java Sat Jul 26 23:45:46 2014
@@ -37,10 +37,11 @@ public interface HiveAuthorizationValida
    * @param hiveOpType
    * @param inputHObjs
    * @param outputHObjs
+   * @param context
    * @throws HiveAuthzPluginException
    * @throws HiveAccessControlException
    */
   void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs,
-      List<HivePrivilegeObject> outputHObjs) throws HiveAuthzPluginException, HiveAccessControlException;
+      List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException;
 
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java Sat Jul 26 23:45:46 2014
@@ -146,11 +146,12 @@ public interface HiveAuthorizer {
    * @param hiveOpType
    * @param inputsHObjs
    * @param outputHObjs
+   * @param context
    * @throws HiveAuthzPluginException
    * @throws HiveAccessControlException
    */
   void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputsHObjs,
-      List<HivePrivilegeObject> outputHObjs)
+      List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context)
       throws HiveAuthzPluginException, HiveAccessControlException;
 
   /**

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java Sat Jul 26 23:45:46 2014
@@ -80,8 +80,9 @@ public class HiveAuthorizerImpl implemen
 
   @Override
   public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs,
-      List<HivePrivilegeObject> outputHObjs) throws HiveAuthzPluginException, HiveAccessControlException {
-    authValidator.checkPrivileges(hiveOpType, inputHObjs, outputHObjs);
+      List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context)
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    authValidator.checkPrivileges(hiveOpType, inputHObjs, outputHObjs, context);
   }
 
   @Override

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java Sat Jul 26 23:45:46 2014
@@ -69,6 +69,7 @@ public enum HiveOperationType {
   SHOWINDEXES,
   SHOWPARTITIONS,
   SHOWLOCKS,
+  SHOWCONF,
   CREATEFUNCTION,
   DROPFUNCTION,
   CREATEMACRO,
@@ -113,7 +114,12 @@ public enum HiveOperationType {
   ALTERVIEW_RENAME,
   ALTERTABLE_COMPACT,
   SHOW_COMPACTIONS,
-  SHOW_TRANSACTIONS
-
+  SHOW_TRANSACTIONS,
+  SET,
+  RESET,
+  DFS,
+  ADD,
+  DELETE,
+  COMPILE
 
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java Sat Jul 26 23:45:46 2014
@@ -25,10 +25,19 @@ import org.apache.hadoop.hive.common.cla
  */
 @LimitedPrivate(value = { "" })
 @Evolving
-public class HivePrincipal {
+public class HivePrincipal implements Comparable<HivePrincipal> {
+
+  @Override
+  public int compareTo(HivePrincipal o) {
+    int compare = name.compareTo(o.name);
+    if (compare == 0) {
+      compare = type.compareTo(o.type);
+    }
+    return compare;
+  }
 
   public enum HivePrincipalType{
-    USER, ROLE, UNKNOWN
+    USER, GROUP, ROLE, UNKNOWN
   }
 
   @Override

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java Sat Jul 26 23:45:46 2014
@@ -22,13 +22,14 @@ import java.util.Locale;
 
 import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
 import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
+import org.apache.hadoop.hive.ql.security.authorization.PrivilegeScope;
 
 /**
  * Represents the hive privilege being granted/revoked
  */
 @LimitedPrivate(value = { "" })
 @Evolving
-public class HivePrivilege {
+public class HivePrivilege implements Comparable<HivePrivilege> {
   @Override
   public String toString() {
     return "Privilege [name=" + name + ", columns=" + columns + "]";
@@ -36,10 +37,16 @@ public class HivePrivilege {
 
   private final String name;
   private final List<String> columns;
+  private final List<String> supportedScope;
 
-  public HivePrivilege(String name, List<String> columns){
+  public HivePrivilege(String name, List<String> columns) {
+    this(name, columns, null);
+  }
+
+  public HivePrivilege(String name, List<String> columns, List<String> supportedScope) {
     this.name = name.toUpperCase(Locale.US);
     this.columns = columns;
+    this.supportedScope = supportedScope;
   }
 
   public String getName() {
@@ -50,6 +57,10 @@ public class HivePrivilege {
     return columns;
   }
 
+  public List<String> getSupportedScope() {
+    return supportedScope;
+  }
+
   @Override
   public int hashCode() {
     final int prime = 31;
@@ -82,5 +93,27 @@ public class HivePrivilege {
   }
 
 
+  public boolean supportsScope(PrivilegeScope scope) {
+    return supportedScope != null && supportedScope.contains(scope.name());
+  }
 
+  public int compareTo(HivePrivilege privilege) {
+    int compare = columns != null ?
+        (privilege.columns != null ? compare(columns, privilege.columns) : 1) :
+        (privilege.columns != null ? -1 : 0);
+    if (compare == 0) {
+      compare = name.compareTo(privilege.name);
+    }
+    return compare;
+  }
+
+  private int compare(List<String> o1, List<String> o2) {
+    for (int i = 0; i < Math.min(o1.size(), o2.size()); i++) {
+      int compare = o1.get(i).compareTo(o2.get(i));
+      if (compare != 0) {
+        return compare;
+      }
+    }
+    return o1.size() > o2.size() ? 1 : (o1.size() < o2.size() ? -1 : 0);
+  }
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java Sat Jul 26 23:45:46 2014
@@ -17,6 +17,10 @@
  */
 package org.apache.hadoop.hive.ql.security.authorization.plugin;
 
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
 import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
 import org.apache.hadoop.hive.common.classification.InterfaceStability.Unstable;
 
@@ -25,51 +29,99 @@ import org.apache.hadoop.hive.common.cla
  */
 @LimitedPrivate(value = { "" })
 @Unstable
-public class HivePrivilegeObject {
+public class HivePrivilegeObject implements Comparable<HivePrivilegeObject> {
 
   @Override
-  public String toString() {
-    String name = null;
-    switch (type) {
-    case DATABASE:
-      name = dbname;
-      break;
-    case TABLE_OR_VIEW:
-      name = (dbname == null ? "" : dbname + ".") + tableviewname;
-      break;
-    case LOCAL_URI:
-    case DFS_URI:
-      name = tableviewname;
-      break;
-    case PARTITION:
-      break;
+  public int compareTo(HivePrivilegeObject o) {
+    int compare = type.compareTo(o.type);
+    if (compare == 0) {
+      compare = dbname.compareTo(o.dbname);
+    }
+    if (compare == 0) {
+      compare = objectName != null ?
+          (o.objectName != null ? objectName.compareTo(o.objectName) : 1) :
+          (o.objectName != null ? -1 : 0);
+    }
+    if (compare == 0) {
+      compare = partKeys != null ?
+          (o.partKeys != null ? compare(partKeys, o.partKeys) : 1) :
+          (o.partKeys != null ? -1 : 0);
     }
-    return "Object [type=" + type + ", name=" + name + "]";
+    if (compare == 0) {
+      compare = columns != null ?
+          (o.columns != null ? compare(columns, o.columns) : 1) :
+          (o.columns != null ? -1 : 0);
+    }
+    return compare;
+  }
 
+  private int compare(List<String> o1, List<String> o2) {
+    for (int i = 0; i < Math.min(o1.size(), o2.size()); i++) {
+      int compare = o1.get(i).compareTo(o2.get(i));
+      if (compare != 0) {
+        return compare;
+      }
+    }
+    return o1.size() > o2.size() ? 1 : (o1.size() < o2.size() ? -1 : 0);
   }
 
   public enum HivePrivilegeObjectType {
-    DATABASE, TABLE_OR_VIEW, PARTITION, LOCAL_URI, DFS_URI
-  };
-
+    GLOBAL, DATABASE, TABLE_OR_VIEW, PARTITION, COLUMN, LOCAL_URI, DFS_URI, COMMAND_PARAMS, FUNCTION
+  } ;
   public enum HivePrivObjectActionType {
     OTHER, INSERT, INSERT_OVERWRITE
   };
+
   private final HivePrivilegeObjectType type;
   private final String dbname;
-  private final String tableviewname;
+  private final String objectName;
+  private final List<String> commandParams;
+  private final List<String> partKeys;
+  private final List<String> columns;
   private final HivePrivObjectActionType actionType;
 
-  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableViewURI){
-    this(type, dbname, tableViewURI, HivePrivObjectActionType.OTHER);
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName) {
+    this(type, dbname, objectName, HivePrivObjectActionType.OTHER);
+  }
+
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName
+      , HivePrivObjectActionType actionType) {
+    this(type, dbname, objectName, null, null, actionType, null);
+  }
+
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName,
+      List<String> partKeys, String column) {
+    this(type, dbname, objectName, partKeys,
+        column == null ? null : new ArrayList<String>(Arrays.asList(column)),
+        HivePrivObjectActionType.OTHER, null);
+
+  }
+
+  /**
+   * Create HivePrivilegeObject of type {@link HivePrivilegeObjectType.COMMAND_PARAMS}
+   * @param cmdParams
+   * @return
+   */
+  public static HivePrivilegeObject createHivePrivilegeObject(List<String> cmdParams) {
+    return new HivePrivilegeObject(HivePrivilegeObjectType.COMMAND_PARAMS, null, null, null, null,
+        cmdParams);
+  }
+
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName,
+    List<String> partKeys, List<String> columns, List<String> commandParams) {
+    this(type, dbname, objectName, partKeys, columns, HivePrivObjectActionType.OTHER, commandParams);
   }
 
-  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableViewURI,
-      HivePrivObjectActionType actionType) {
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName,
+      List<String> partKeys, List<String> columns, HivePrivObjectActionType actionType,
+      List<String> commandParams) {
     this.type = type;
     this.dbname = dbname;
-    this.tableviewname = tableViewURI;
+    this.objectName = objectName;
+    this.partKeys = partKeys;
+    this.columns = columns;
     this.actionType = actionType;
+    this.commandParams = commandParams;
   }
 
   public HivePrivilegeObjectType getType() {
@@ -80,11 +132,72 @@ public class HivePrivilegeObject {
     return dbname;
   }
 
-  public String getTableViewURI() {
-    return tableviewname;
+  /**
+   * @return name of table/view/uri/function name
+   */
+  public String getObjectName() {
+    return objectName;
   }
 
   public HivePrivObjectActionType getActionType() {
     return actionType;
   }
+
+  public List<String> getCommandParams() {
+    return commandParams;
+  }
+
+  public List<String> getPartKeys() {
+    return partKeys;
+  }
+
+  public List<String> getColumns() {
+    return columns;
+  }
+
+  @Override
+  public String toString() {
+    String name = null;
+    switch (type) {
+    case DATABASE:
+      name = dbname;
+      break;
+    case TABLE_OR_VIEW:
+    case PARTITION:
+      name = getDbObjectName(dbname, objectName);
+      if (partKeys != null) {
+        name += partKeys.toString();
+      }
+      break;
+    case FUNCTION:
+      name = getDbObjectName(dbname, objectName);
+      break;
+    case COLUMN:
+    case LOCAL_URI:
+    case DFS_URI:
+      name = objectName;
+      break;
+    case COMMAND_PARAMS:
+      name = commandParams.toString();
+      break;
+    }
+
+    // get the string representing action type if its non default action type
+    String actionTypeStr ="";
+    if (actionType != null) {
+      switch (actionType) {
+      case INSERT:
+      case INSERT_OVERWRITE:
+        actionTypeStr = ", action=" + actionType;
+      default:
+      }
+    }
+
+    return "Object [type=" + type + ", name=" + name + actionTypeStr + "]";
+  }
+
+  private String getDbObjectName(String dbname2, String objectName2) {
+    return (dbname == null ? "" : dbname + ".") + objectName;
+  }
+
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java Sat Jul 26 23:45:46 2014
@@ -17,12 +17,14 @@
  */
 package org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd;
 
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
 
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
@@ -45,12 +47,14 @@ public class GrantPrivAuthUtils {
     RequiredPrivileges reqPrivs = getGrantRequiredPrivileges(hivePrivileges);
 
     // check if this user has necessary privileges (reqPrivs) on this object
-    checkRequiredPrivileges(reqPrivs, hivePrivObject, metastoreClient, userName, curRoles, isAdmin);
+    checkRequiredPrivileges(reqPrivs, hivePrivObject, metastoreClient, userName, curRoles, isAdmin,
+        HiveOperationType.GRANT_PRIVILEGE);
   }
 
   private static void checkRequiredPrivileges(
       RequiredPrivileges reqPrivileges, HivePrivilegeObject hivePrivObject,
-      IMetaStoreClient metastoreClient, String userName, List<String> curRoles, boolean isAdmin)
+      IMetaStoreClient metastoreClient, String userName, List<String> curRoles, boolean isAdmin,
+      HiveOperationType opType)
           throws HiveAuthzPluginException, HiveAccessControlException {
 
     // keep track of the principals on which privileges have been checked for
@@ -61,9 +65,11 @@ public class GrantPrivAuthUtils {
         metastoreClient, userName, hivePrivObject, curRoles, isAdmin);
 
     // check if required privileges is subset of available privileges
+    List<String> deniedMessages = new ArrayList<String>();
     Collection<SQLPrivTypeGrant> missingPrivs = reqPrivileges.findMissingPrivs(availPrivs);
-    SQLAuthorizationUtils.assertNoMissingPrivilege(missingPrivs, new HivePrincipal(userName,
-        HivePrincipalType.USER), hivePrivObject);
+    SQLAuthorizationUtils.addMissingPrivMsg(missingPrivs, hivePrivObject, deniedMessages);
+    SQLAuthorizationUtils.assertNoDeniedPermissions(new HivePrincipal(userName,
+        HivePrincipalType.USER), opType, deniedMessages);
   }
 
   private static RequiredPrivileges getGrantRequiredPrivileges(List<HivePrivilege> hivePrivileges)

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java Sat Jul 26 23:45:46 2014
@@ -46,7 +46,7 @@ public class Operation2Privilege {
 
 
     private PrivRequirement(SQLPrivTypeGrant[] privs, IOType ioType) {
-      this(privs, ioType, (HivePrivObjectActionType) null);
+      this(privs, ioType, null);
     }
 
     private PrivRequirement(SQLPrivTypeGrant[] privs, IOType ioType,
@@ -291,6 +291,8 @@ public class Operation2Privilege {
 (null, null));
     op2Priv.put(HiveOperationType.SHOW_TRANSACTIONS, PrivRequirement.newIOPrivRequirement
 (null, null));
+    op2Priv.put(HiveOperationType.SHOWCONF, PrivRequirement.newIOPrivRequirement
+(null, null));
 
     op2Priv.put(HiveOperationType.LOCKTABLE, PrivRequirement.newIOPrivRequirement
 (null, null));
@@ -308,6 +310,19 @@ public class Operation2Privilege {
 (null, ADMIN_PRIV_AR));
     op2Priv.put(HiveOperationType.DESCDATABASE, PrivRequirement.newIOPrivRequirement
 (null, null));
+    op2Priv.put(HiveOperationType.DFS, PrivRequirement.newIOPrivRequirement
+(ADMIN_PRIV_AR, ADMIN_PRIV_AR));
+    op2Priv.put(HiveOperationType.RESET, PrivRequirement.newIOPrivRequirement
+(null, null));
+    op2Priv.put(HiveOperationType.COMPILE, PrivRequirement.newIOPrivRequirement
+(ADMIN_PRIV_AR, ADMIN_PRIV_AR));
+    op2Priv.put(HiveOperationType.ADD, PrivRequirement.newIOPrivRequirement
+(ADMIN_PRIV_AR, ADMIN_PRIV_AR));
+    op2Priv.put(HiveOperationType.DELETE, PrivRequirement.newIOPrivRequirement
+(ADMIN_PRIV_AR, ADMIN_PRIV_AR));
+    // set command is currently not authorized through the API
+    op2Priv.put(HiveOperationType.SET, PrivRequirement.newIOPrivRequirement
+(null, null));
 
     // The following actions are authorized through SQLStdHiveAccessController,
     // and it is not using this privilege mapping, but it might make sense to move it here
@@ -330,11 +345,10 @@ public class Operation2Privilege {
     op2Priv.put(HiveOperationType.SHOW_ROLES, PrivRequirement.newIOPrivRequirement
 (null, null));
     op2Priv.put(HiveOperationType.SHOW_ROLE_GRANT, PrivRequirement.newIOPrivRequirement
-(null,
-        null));
+(null, null));
     op2Priv.put(HiveOperationType.SHOW_ROLE_PRINCIPALS, PrivRequirement.newIOPrivRequirement
-(null,
-        null));
+(null, null));
+
 
 
   }



Mime
View raw message