hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1568352 [1/2] - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/ java/org/apache/hadoop/hive/ql/exec/ java/org/apache/hadoop/hive/ql/hooks/ java/org/apache/hadoop/hive/ql/metadata/ java/org/apache/hadoop/hive/ql/optimizer/ java/org/a...
Date Fri, 14 Feb 2014 16:57:55 GMT
Author: hashutosh
Date: Fri Feb 14 16:57:53 2014
New Revision: 1568352

URL: http://svn.apache.org/r1568352
Log:
HIVE-6250 : sql std auth - view authorization should not underlying table. More tests and fixes. (Thejas Nair via Ashutosh Chauhan)

Added:
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java
    hive/trunk/ql/src/test/queries/clientnegative/authorization_addpartition.q
    hive/trunk/ql/src/test/queries/clientnegative/authorization_createview.q
    hive/trunk/ql/src/test/queries/clientnegative/authorization_ctas.q
    hive/trunk/ql/src/test/queries/clientnegative/authorization_droppartition.q
    hive/trunk/ql/src/test/queries/clientnegative/authorization_insert_noinspriv.q
    hive/trunk/ql/src/test/queries/clientnegative/authorization_insert_noselectpriv.q
    hive/trunk/ql/src/test/queries/clientnegative/authorization_not_owner_alter_tab_rename.q
    hive/trunk/ql/src/test/queries/clientnegative/authorization_not_owner_alter_tab_serdeprop.q
    hive/trunk/ql/src/test/queries/clientnegative/authorization_not_owner_drop_tab.q
    hive/trunk/ql/src/test/queries/clientnegative/authorization_not_owner_drop_view.q
    hive/trunk/ql/src/test/queries/clientnegative/authorization_priv_current_role_neg.q
    hive/trunk/ql/src/test/queries/clientnegative/authorization_select.q
    hive/trunk/ql/src/test/queries/clientnegative/authorization_select_view.q
    hive/trunk/ql/src/test/queries/clientnegative/authorization_set_role_neg1.q
    hive/trunk/ql/src/test/queries/clientnegative/authorization_set_role_neg2.q
    hive/trunk/ql/src/test/queries/clientnegative/authorization_truncate.q
    hive/trunk/ql/src/test/queries/clientpositive/authorization_admin_almighty1.q
    hive/trunk/ql/src/test/queries/clientpositive/authorization_owner_actions.q
    hive/trunk/ql/src/test/queries/clientpositive/authorization_view_sqlstd.q
    hive/trunk/ql/src/test/results/clientnegative/authorization_addpartition.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_createview.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_ctas.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_droppartition.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_insert_noinspriv.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_insert_noselectpriv.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_not_owner_alter_tab_rename.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_not_owner_alter_tab_serdeprop.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_not_owner_drop_tab.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_not_owner_drop_view.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_priv_current_role_neg.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_select.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_select_view.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_set_role_neg1.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_set_role_neg2.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_truncate.q.out
    hive/trunk/ql/src/test/results/clientpositive/authorization_admin_almighty1.q.out
    hive/trunk/ql/src/test/results/clientpositive/authorization_owner_actions.q.out
    hive/trunk/ql/src/test/results/clientpositive/authorization_view_sqlstd.q.out
Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/HiveAuthenticationProvider.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/RequiredPrivileges.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLPrivTypeGrant.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/CreateTableAutomaticGrant.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
    hive/trunk/ql/src/test/queries/clientpositive/authorization_revoke_table_priv.q
    hive/trunk/ql/src/test/results/clientnegative/authorization_create_role_no_admin.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_drop_role_no_admin.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_grant_table_fail_nogrant.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_revoke_table_fail1.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_revoke_table_fail2.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_show_roles_no_admin.q.out
    hive/trunk/ql/src/test/results/clientpositive/authorization_revoke_table_priv.q.out

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Fri Feb 14 16:57:53 2014
@@ -55,7 +55,6 @@ import org.apache.hadoop.hive.ql.exec.Ta
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
 import org.apache.hadoop.hive.ql.hooks.Entity;
-import org.apache.hadoop.hive.ql.hooks.Entity.Type;
 import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext;
 import org.apache.hadoop.hive.ql.hooks.Hook;
 import org.apache.hadoop.hive.ql.hooks.HookContext;
@@ -728,24 +727,43 @@ public class Driver implements CommandPr
       HivePrivilegeObjectType privObjType =
           AuthorizationUtils.getHivePrivilegeObjectType(privObject.getType());
 
-      //support for authorization on partitions or uri needs to be added
-      HivePrivilegeObject hPrivObject = new HivePrivilegeObject(privObjType,
-          getDataBaseName(privObject),
-              privObject.getTable() == null ? null : privObject.getTable().getTableName());
+      if(privObject instanceof ReadEntity && !((ReadEntity)privObject).isDirect()){
+        // In case of views, the underlying views or tables are not direct dependencies
+        // and are not used for authorization checks.
+        // This ReadEntity represents one of the underlying tables/views, so skip it.
+        // See description of the isDirect in ReadEntity
+        continue;
+      }
+
+      //support for authorization on partitions needs to be added
+      String dbname = null;
+      String tableURI = null;
+      switch(privObject.getType()){
+      case DATABASE:
+        dbname = privObject.getDatabase() == null ? null : privObject.getDatabase().getName();
+        break;
+      case TABLE:
+        dbname = privObject.getTable() == null ? null : privObject.getTable().getDbName();
+        tableURI = privObject.getTable() == null ? null : privObject.getTable().getTableName();
+        break;
+      case DFS_DIR:
+      case LOCAL_DIR:
+        tableURI = privObject.getD();
+        break;
+      case DUMMYPARTITION:
+      case PARTITION:
+        // not currently handled
+        break;
+        default:
+          throw new AssertionError("Unexpected object type");
+      }
+
+      HivePrivilegeObject hPrivObject = new HivePrivilegeObject(privObjType, dbname, tableURI);
       hivePrivobjs.add(hPrivObject);
     }
     return hivePrivobjs;
   }
 
-
-  private String getDataBaseName(Entity privObject) {
-    if(privObject.getType() == Type.DATABASE){
-      return privObject.getDatabase() == null ? null : privObject.getDatabase().getName();
-    } else {
-      return privObject.getTable() == null ? null : privObject.getTable().getDbName();
-    }
-  }
-
   private HiveOperationType getHiveOperationType(HiveOperation op) {
     return HiveOperationType.valueOf(op.name());
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Fri Feb 14 16:57:53 2014
@@ -658,7 +658,7 @@ public class DDLTask extends Task<DDLWor
         //only grantInfo is used
         HiveObjectPrivilege thriftObjectPriv = new HiveObjectPrivilege(new HiveObjectRef(
           AuthorizationUtils.getThriftHiveObjType(privObj.getType()),privObj.getDbname(),
-          privObj.getTableviewname(),null,null), principal.getName(),
+          privObj.getTableViewURI(),null,null), principal.getName(),
           AuthorizationUtils.getThriftPrincipalType(principal.getType()), grantInfo);
         privList.add(thriftObjectPriv);
       }
@@ -873,9 +873,7 @@ public class DDLTask extends Task<DDLWor
   }
 
   private HivePrivilegeObjectType getPrivObjectType(PrivilegeObjectDesc privSubjectDesc) {
-    //TODO: This needs to change to support view once view grant/revoke is supported as
-    // part of HIVE-6181
-    return privSubjectDesc.getTable() ? HivePrivilegeObjectType.TABLE : HivePrivilegeObjectType.DATABASE;
+    return privSubjectDesc.getTable() ? HivePrivilegeObjectType.TABLE_OR_VIEW : HivePrivilegeObjectType.DATABASE;
   }
 
   private List<HivePrivilege> getHivePrivileges(List<PrivilegeDesc> privileges) {
@@ -3632,12 +3630,7 @@ public class DDLTask extends Task<DDLWor
    */
   private boolean updateModifiedParameters(Map<String, String> params, HiveConf conf) throws HiveException {
     String user = null;
-    try {
-      user = conf.getUser();
-    } catch (IOException e) {
-      throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "Unable to get current user");
-    }
-
+    user = SessionState.getUserFromAuthenticator();
     params.put("last_modified_by", user);
     params.put("last_modified_time", Long.toString(System.currentTimeMillis() / 1000));
     return true;
@@ -4137,11 +4130,7 @@ public class DDLTask extends Task<DDLWor
   }
 
   private int setGenericTableAttributes(Table tbl) throws HiveException {
-    try {
-      tbl.setOwner(conf.getUser());
-    } catch (IOException e) {
-      throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "Unable to get current user");
-    }
+    tbl.setOwner(SessionState.getUserFromAuthenticator());
     // set create time
     tbl.setCreateTime((int) (System.currentTimeMillis() / 1000));
     return 0;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java Fri Feb 14 16:57:53 2014
@@ -35,10 +35,16 @@ public class ReadEntity extends Entity i
   // Consider a query like: select * from V, where the view V is defined as:
   // select * from T
   // The inputs will contain V and T (parent: V)
+  // T will be marked as an indirect entity using isDirect flag.
+  // This will help in distinguishing from the case where T is a direct dependency
+  // For example in the case of "select * from V join T ..." T would be direct dependency
+  private boolean isDirect = true;
 
   // For views, the entities can be nested - by default, entities are at the top level
   private final Set<ReadEntity> parents = new HashSet<ReadEntity>();
 
+
+
   /**
    * For serialization only.
    */
@@ -74,6 +80,11 @@ public class ReadEntity extends Entity i
     initParent(parent);
   }
 
+  public ReadEntity(Table t, ReadEntity parent, boolean isDirect) {
+    this(t, parent);
+    this.isDirect = isDirect;
+  }
+
   /**
    * Constructor given a partition.
    *
@@ -89,6 +100,12 @@ public class ReadEntity extends Entity i
     initParent(parent);
   }
 
+  public ReadEntity(Partition p, ReadEntity parent, boolean isDirect) {
+    this(p, parent);
+    this.isDirect = isDirect;
+  }
+
+
   public Set<ReadEntity> getParents() {
     return parents;
   }
@@ -109,4 +126,14 @@ public class ReadEntity extends Entity i
       return false;
     }
   }
+
+  public boolean isDirect() {
+    return isDirect;
+  }
+
+  public void setDirect(boolean isDirect) {
+    this.isDirect = isDirect;
+  }
+
+
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Fri Feb 14 16:57:53 2014
@@ -2449,11 +2449,7 @@ private void constructOneLBLocationMap(F
   }
 
   private String getUserName() {
-    SessionState ss = SessionState.get();
-    if (ss != null && ss.getAuthenticator() != null) {
-      return ss.getAuthenticator().getUserName();
-    }
-    return null;
+    return SessionState.getUserFromAuthenticator();
   }
 
   private List<String> getGroupNames() {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java Fri Feb 14 16:57:53 2014
@@ -44,8 +44,8 @@ import org.apache.hadoop.hive.ql.exec.De
 import org.apache.hadoop.hive.ql.exec.DependencyCollectionTask;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
-import org.apache.hadoop.hive.ql.exec.MoveTask;
 import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
+import org.apache.hadoop.hive.ql.exec.MoveTask;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorFactory;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
@@ -102,7 +102,6 @@ import org.apache.hadoop.hive.ql.plan.Ta
 import org.apache.hadoop.hive.ql.plan.TezWork;
 import org.apache.hadoop.hive.ql.stats.StatsFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.mapred.InputFormat;
 
 /**
  * General utility common functions for the Processor to convert operator into
@@ -562,14 +561,18 @@ public final class GenMapRedUtils {
 
     // The table should also be considered a part of inputs, even if the table is a
     // partitioned table and whether any partition is selected or not
+
+    //This read entity is a direct read entity and not an indirect read (that is when
+    // this is being read because it is a dependency of a view).
+    boolean isDirectRead = (parentViewInfo == null);
     PlanUtils.addInput(inputs,
-        new ReadEntity(parseCtx.getTopToTable().get(topOp), parentViewInfo));
+        new ReadEntity(parseCtx.getTopToTable().get(topOp), parentViewInfo, isDirectRead));
 
     for (Partition part : parts) {
       if (part.getTable().isPartitioned()) {
-        PlanUtils.addInput(inputs, new ReadEntity(part, parentViewInfo));
+        PlanUtils.addInput(inputs, new ReadEntity(part, parentViewInfo, isDirectRead));
       } else {
-        PlanUtils.addInput(inputs, new ReadEntity(part.getTable(), parentViewInfo));
+        PlanUtils.addInput(inputs, new ReadEntity(part.getTable(), parentViewInfo, isDirectRead));
       }
 
       // Later the properties have to come from the partition as opposed
@@ -1236,7 +1239,7 @@ public final class GenMapRedUtils {
       // Check if InputFormatClass is valid
       String inputFormatClass = conf.getVar(ConfVars.HIVEMERGEINPUTFORMATBLOCKLEVEL);
       try {
-        Class c = (Class<? extends InputFormat>) Class.forName(inputFormatClass);
+        Class c = Class.forName(inputFormatClass);
 
         LOG.info("RCFile format- Using block level merge");
         cplan = GenMapRedUtils.createRCFileMergeTask(fsInputDesc, finalName,
@@ -1633,7 +1636,7 @@ public final class GenMapRedUtils {
           // merge for a map-only job
           // or for a map-reduce job
           if (currTask.getWork() instanceof TezWork) {
-            return hconf.getBoolVar(ConfVars.HIVEMERGEMAPFILES) || 
+            return hconf.getBoolVar(ConfVars.HIVEMERGEMAPFILES) ||
                 hconf.getBoolVar(ConfVars.HIVEMERGEMAPREDFILES);
           } else if (currTask.getWork() instanceof MapredWork) {
             ReduceWork reduceWork = ((MapredWork) currTask.getWork()).getReduceWork();
@@ -1680,9 +1683,9 @@ public final class GenMapRedUtils {
       Context baseCtx = parseCtx.getContext();
   	  // if we are on viewfs we don't want to use /tmp as tmp dir since rename from /tmp/..
       // to final location /user/hive/warehouse/ will fail later, so instead pick tmp dir
-      // on same namespace as tbl dir. 
-      Path tmpDir = dest.toUri().getScheme().equals("viewfs") ? 
-        baseCtx.getExtTmpPathRelTo(dest.toUri()) : 
+      // on same namespace as tbl dir.
+      Path tmpDir = dest.toUri().getScheme().equals("viewfs") ?
+        baseCtx.getExtTmpPathRelTo(dest.toUri()) :
         baseCtx.getExternalTmpPath(dest.toUri());
 
       FileSinkDesc fileSinkDesc = fsOp.getConf();

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java Fri Feb 14 16:57:53 2014
@@ -124,11 +124,7 @@ public class HiveAuthorizationTaskFactor
       }
     }
 
-    String userName = null;
-    if (SessionState.get() != null
-        && SessionState.get().getAuthenticator() != null) {
-      userName = SessionState.get().getAuthenticator().getUserName();
-    }
+    String userName = SessionState.getUserFromAuthenticator();
 
     GrantDesc grantDesc = new GrantDesc(privilegeObj, privilegeDesc,
         principalDesc, userName, PrincipalType.USER, grantOption);
@@ -247,11 +243,7 @@ public class HiveAuthorizationTaskFactor
       roles.add(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(i).getText()));
     }
 
-    String roleOwnerName = "";
-    if (SessionState.get() != null
-        && SessionState.get().getAuthenticator() != null) {
-      roleOwnerName = SessionState.get().getAuthenticator().getUserName();
-    }
+    String roleOwnerName = SessionState.getUserFromAuthenticator();
 
     //until change is made to use the admin option. Default to false with V2 authorization
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Fri Feb 14 16:57:53 2014
@@ -895,12 +895,15 @@ public final class PlanUtils {
   // is already present, make sure the parents are added.
   // Consider the query:
   // select * from (select * from V2 union all select * from V3) subq;
-  // where both V2 and V3 depend on V1
+  // where both V2 and V3 depend on V1 (eg V2 : select * from V1, V3: select * from V1),
   // addInput would be called twice for V1 (one with parent V2 and the other with parent V3).
   // When addInput is called for the first time for V1, V1 (parent V2) is added to inputs.
   // When addInput is called for the second time for V1, the input V1 from inputs is picked up,
   // and it's parents are enhanced to include V2 and V3
-  // The inputs will contain: (V2, no parent), (V3, no parent), (v1, parents(V2, v3))
+  // The inputs will contain: (V2, no parent), (V3, no parent), (V1, parents(V2, v3))
+  //
+  // If the ReadEntity is already present and another ReadEntity with same name is
+  // added, then the isDirect flag is updated to be the OR of values of both.
   public static ReadEntity addInput(Set<ReadEntity> inputs, ReadEntity newInput) {
     // If the input is already present, make sure the new parent is added to the input.
     if (inputs.contains(newInput)) {
@@ -908,6 +911,7 @@ public final class PlanUtils {
         if (input.equals(newInput)) {
           if ((newInput.getParents() != null) && (!newInput.getParents().isEmpty())) {
             input.getParents().addAll(newInput.getParents());
+            input.setDirect(input.isDirect() || newInput.isDirect());
           }
           return input;
         }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/HiveAuthenticationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/HiveAuthenticationProvider.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/HiveAuthenticationProvider.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/HiveAuthenticationProvider.java Fri Feb 14 16:57:53 2014
@@ -36,6 +36,11 @@ public interface HiveAuthenticationProvi
 
   public void destroy() throws HiveException;
 
+  /**
+   * This function is meant to be used only for hive internal implementations of this interface.
+   * SessionState is not a public interface.
+   * @param ss SessionState that created this instance
+   */
   public void setSessionState(SessionState ss);
 
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java Fri Feb 14 16:57:53 2014
@@ -68,10 +68,11 @@ public class AuthorizationUtils {
     case DATABASE:
       return HivePrivilegeObjectType.DATABASE;
     case TABLE:
-      return HivePrivilegeObjectType.TABLE;
+      return HivePrivilegeObjectType.TABLE_OR_VIEW;
     case LOCAL_DIR:
+      return HivePrivilegeObjectType.LOCAL_URI;
     case DFS_DIR:
-      return HivePrivilegeObjectType.URI;
+      return HivePrivilegeObjectType.DFS_URI;
     case PARTITION:
     case DUMMYPARTITION: //need to determine if a different type is needed for dummy partitions
       return HivePrivilegeObjectType.PARTITION;
@@ -127,12 +128,12 @@ public class AuthorizationUtils {
     switch(type){
     case DATABASE:
       return HiveObjectType.DATABASE;
-    case TABLE:
+    case TABLE_OR_VIEW:
       return HiveObjectType.TABLE;
     case PARTITION:
       return HiveObjectType.PARTITION;
-    case URI:
-    case VIEW:
+    case LOCAL_URI:
+    case DFS_URI:
       throw new HiveException("Unsupported type " + type);
     default:
       //should not happen as we have accounted for all types
@@ -149,7 +150,7 @@ public class AuthorizationUtils {
    */
   public static HiveObjectRef getThriftHiveObjectRef(HivePrivilegeObject privObj) throws HiveException {
     HiveObjectType objType = getThriftHiveObjType(privObj.getType());
-    return new HiveObjectRef(objType, privObj.getDbname(), privObj.getTableviewname(), null, null);
+    return new HiveObjectRef(objType, privObj.getDbname(), privObj.getTableViewURI(), null, null);
   }
 
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactory.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactory.java Fri Feb 14 16:57:53 2014
@@ -22,6 +22,11 @@ import org.apache.hadoop.hive.common.cla
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 /**
  * Factory for getting current valid instance of IMetaStoreClient
+ * Metastore client cannot be cached in authorization interface as that
+ * can get invalidated between the calls with the logic in Hive class.
+ * The standard way of getting metastore client object is through Hive.get().getMSC().
+ * But Hive class is not a public interface, so this factory helps in hiding Hive
+ * class from the authorization interface users.
  */
 @LimitedPrivate(value = { "" })
 @Evolving

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java Fri Feb 14 16:57:53 2014
@@ -29,19 +29,34 @@ public class HivePrivilegeObject {
 
   @Override
   public String toString() {
-    return "Hive Object [type=" + type + ", dbname=" + dbname + ", table/viewname="
-        + tableviewname + "]";
+    String name = null;
+    switch (type) {
+    case DATABASE:
+      name = dbname;
+      break;
+    case TABLE_OR_VIEW:
+      name = (dbname == null ? "" : dbname + ".") + tableviewname;
+      break;
+    case LOCAL_URI:
+    case DFS_URI:
+      name = tableviewname;
+      break;
+    case PARTITION:
+      break;
+    }
+    return "Object [type=" + type + ", name=" + name + "]";
+
   }
 
-  public enum HivePrivilegeObjectType { DATABASE, TABLE, VIEW, PARTITION, URI};
+  public enum HivePrivilegeObjectType { DATABASE, TABLE_OR_VIEW, PARTITION, LOCAL_URI, DFS_URI};
   private final HivePrivilegeObjectType type;
   private final String dbname;
   private final String tableviewname;
 
-  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableviewname){
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableViewURI){
     this.type = type;
     this.dbname = dbname;
-    this.tableviewname = tableviewname;
+    this.tableviewname = tableViewURI;
   }
 
   public HivePrivilegeObjectType getType() {
@@ -52,7 +67,7 @@ public class HivePrivilegeObject {
     return dbname;
   }
 
-  public String getTableviewname() {
+  public String getTableViewURI() {
     return tableviewname;
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java Fri Feb 14 16:57:53 2014
@@ -27,6 +27,7 @@ import org.apache.hadoop.hive.ql.securit
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRole;
 
 /**
  * Utility class to authorize grant/revoke privileges
@@ -35,7 +36,7 @@ public class GrantPrivAuthUtils {
 
   static void authorize(List<HivePrincipal> hivePrincipals, List<HivePrivilege> hivePrivileges,
       HivePrivilegeObject hivePrivObject, boolean grantOption, IMetaStoreClient metastoreClient,
-      String userName)
+      String userName, List<HiveRole> curRoles, boolean isAdmin)
           throws HiveAuthzPluginException, HiveAccessControlException {
 
     // check if this user has grant privileges for this privileges on this
@@ -44,23 +45,13 @@ public class GrantPrivAuthUtils {
     // map priv being granted to required privileges
     RequiredPrivileges reqPrivs = getGrantRequiredPrivileges(hivePrivileges);
 
-    // api for checking required privileges for a user
-    checkRequiredPrivileges(hivePrincipals, reqPrivs, hivePrivObject, metastoreClient, userName);
+    // check if this user has necessary privileges (reqPrivs) on this object
+    checkRequiredPrivileges(reqPrivs, hivePrivObject, metastoreClient, userName, curRoles, isAdmin);
   }
 
-  private static void checkRequiredPrivileges(List<HivePrincipal> hivePrincipals,
-      RequiredPrivileges reqPrivs, HivePrivilegeObject hivePrivObject,
-      IMetaStoreClient metastoreClient, String userName)
-          throws HiveAuthzPluginException, HiveAccessControlException {
-
-  for (HivePrincipal hivePrincipal : hivePrincipals) {
-      checkRequiredPrivileges(hivePrincipal, reqPrivs, hivePrivObject, metastoreClient, userName);
-    }
-  }
-
-  private static void checkRequiredPrivileges(HivePrincipal hivePrincipal,
+  private static void checkRequiredPrivileges(
       RequiredPrivileges reqPrivileges, HivePrivilegeObject hivePrivObject,
-      IMetaStoreClient metastoreClient, String userName)
+      IMetaStoreClient metastoreClient, String userName, List<HiveRole> curRoles, boolean isAdmin)
           throws HiveAuthzPluginException, HiveAccessControlException {
 
     // keep track of the principals on which privileges have been checked for
@@ -68,7 +59,7 @@ public class GrantPrivAuthUtils {
 
     // get privileges for this user and its roles on this object
     RequiredPrivileges availPrivs = SQLAuthorizationUtils.getPrivilegesFromMetaStore(
-        metastoreClient, userName, hivePrivObject);
+        metastoreClient, userName, hivePrivObject, curRoles, isAdmin);
 
     // check if required privileges is subset of available privileges
     Collection<SQLPrivTypeGrant> missingPrivs = reqPrivileges.findMissingPrivs(availPrivs);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java Fri Feb 14 16:57:53 2014
@@ -52,22 +52,23 @@ public class Operation2Privilege {
   private static SQLPrivTypeGrant[] SEL_NOGRANT_AR = arr(SQLPrivTypeGrant.SELECT_NOGRANT);
   private static SQLPrivTypeGrant[] SEL_GRANT_AR = arr(SQLPrivTypeGrant.SELECT_WGRANT);
   private static SQLPrivTypeGrant[] ADMIN_PRIV_AR = arr(SQLPrivTypeGrant.ADMIN_PRIV);
+  private static SQLPrivTypeGrant[] INS_NOGRANT_AR = arr(SQLPrivTypeGrant.INSERT_NOGRANT);
+  private static SQLPrivTypeGrant[] DEL_NOGRANT_AR = arr(SQLPrivTypeGrant.DELETE_NOGRANT);
+
 
   static {
     op2Priv = new HashMap<HiveOperationType, InOutPrivs>();
 
     op2Priv.put(HiveOperationType.EXPLAIN, new InOutPrivs(SEL_NOGRANT_AR,
         SEL_NOGRANT_AR)); //??
-    op2Priv.put(HiveOperationType.LOAD, new InOutPrivs(ADMIN_PRIV_AR, null));
-    // select with grant for exporting contents
-    op2Priv.put(HiveOperationType.EXPORT, new InOutPrivs(SEL_GRANT_AR, null));
-
-    op2Priv.put(HiveOperationType.IMPORT, new InOutPrivs(ADMIN_PRIV_AR, null));
 
     op2Priv.put(HiveOperationType.CREATEDATABASE, new InOutPrivs(ADMIN_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.DROPDATABASE, new InOutPrivs(ADMIN_PRIV_AR, null));
+
+    op2Priv.put(HiveOperationType.DROPDATABASE, new InOutPrivs(OWNER_PRIV_AR, null));
     //this should be database usage privilege once it is supported
     op2Priv.put(HiveOperationType.SWITCHDATABASE, new InOutPrivs(null, null));
+
+    // lock operations not controlled for now
     op2Priv.put(HiveOperationType.LOCKDB, new InOutPrivs(null, null));
     op2Priv.put(HiveOperationType.UNLOCKDB, new InOutPrivs(null, null));
 
@@ -78,49 +79,83 @@ public class Operation2Privilege {
     //meta store check command - require admin priv
     op2Priv.put(HiveOperationType.MSCK, new InOutPrivs(ADMIN_PRIV_AR, null));
 
+
     //alter table commands require table ownership
-    op2Priv.put(HiveOperationType.ALTERTABLE_ADDCOLS, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_REPLACECOLS, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_RENAMECOL, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_RENAMEPART, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_RENAME, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_DROPPARTS, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_ADDPARTS, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_TOUCH, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_ARCHIVE, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_UNARCHIVE, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_PROPERTIES, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_SERIALIZER, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_PARTCOLTYPE, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERPARTITION_SERIALIZER, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_SERDEPROPERTIES, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERPARTITION_SERDEPROPERTIES, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_CLUSTER_SORT, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_BUCKETNUM, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERPARTITION_BUCKETNUM, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_PROTECTMODE, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERPARTITION_PROTECTMODE, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_FILEFORMAT, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERPARTITION_FILEFORMAT, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_LOCATION, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERPARTITION_LOCATION, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_MERGEFILES, new InOutPrivs(null, null));
-    op2Priv.put(HiveOperationType.ALTERPARTITION_MERGEFILES, new InOutPrivs(null, null));
-    op2Priv.put(HiveOperationType.ALTERTABLE_SKEWED, new InOutPrivs(null, null));
-    op2Priv.put(HiveOperationType.ALTERTBLPART_SKEWED_LOCATION, new InOutPrivs(null, null));
+    // There should not be output object, but just in case the table is incorrectly added
+    // to output instead of input, adding owner requirement on output will catch that as well├┐
+    op2Priv.put(HiveOperationType.ALTERTABLE_ADDCOLS, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_REPLACECOLS, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_RENAMECOL, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_RENAMEPART, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_RENAME, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_TOUCH, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_ARCHIVE, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_UNARCHIVE, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_PROPERTIES, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_SERIALIZER, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_PARTCOLTYPE, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERPARTITION_SERIALIZER, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_SERDEPROPERTIES, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERPARTITION_SERDEPROPERTIES, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_CLUSTER_SORT, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_BUCKETNUM, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERPARTITION_BUCKETNUM, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_PROTECTMODE, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERPARTITION_PROTECTMODE, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_FILEFORMAT, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERPARTITION_FILEFORMAT, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_LOCATION, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERPARTITION_LOCATION, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_MERGEFILES, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERPARTITION_MERGEFILES, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_SKEWED, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTBLPART_SKEWED_LOCATION, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.TRUNCATETABLE, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+
+    //table ownership for create/drop/alter index
+    op2Priv.put(HiveOperationType.CREATEINDEX, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.DROPINDEX, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERINDEX_REBUILD, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERINDEX_PROPS, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+
+    // require view ownership for alter/drop view
+    op2Priv.put(HiveOperationType.ALTERVIEW_PROPERTIES, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.DROPVIEW_PROPERTIES, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERVIEW_RENAME, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.DROPVIEW, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR));
 
     op2Priv.put(HiveOperationType.ANALYZE_TABLE, new InOutPrivs(arr(SQLPrivTypeGrant.SELECT_NOGRANT, SQLPrivTypeGrant.INSERT_NOGRANT), null));
     op2Priv.put(HiveOperationType.SHOWDATABASES, new InOutPrivs(null, null));
     op2Priv.put(HiveOperationType.SHOWTABLES, new InOutPrivs(null, null));
 
+    // operations that require insert/delete privileges
+    op2Priv.put(HiveOperationType.ALTERTABLE_DROPPARTS, new InOutPrivs(DEL_NOGRANT_AR, null));
+    op2Priv.put(HiveOperationType.ALTERTABLE_ADDPARTS, new InOutPrivs(INS_NOGRANT_AR, null));
+
+    // select with grant for exporting contents
+    op2Priv.put(HiveOperationType.EXPORT, new InOutPrivs(SEL_GRANT_AR, null));
+    op2Priv.put(HiveOperationType.IMPORT, new InOutPrivs(INS_NOGRANT_AR, null));
+
+    // operations require select priv
     op2Priv.put(HiveOperationType.SHOWCOLUMNS, new InOutPrivs(SEL_NOGRANT_AR, null));
     op2Priv.put(HiveOperationType.SHOW_TABLESTATUS, new InOutPrivs(SEL_NOGRANT_AR, null));
     op2Priv.put(HiveOperationType.SHOW_TBLPROPERTIES, new InOutPrivs(SEL_NOGRANT_AR, null));
+    op2Priv.put(HiveOperationType.CREATETABLE_AS_SELECT, new InOutPrivs(SEL_NOGRANT_AR, OWNER_PRIV_AR));
 
-    //show create table is more sensitive information, includes table properties etc
+    // QUERY,LOAD op can contain an insert & ovewrite, so require insert+delete privileges on output
+    op2Priv.put(HiveOperationType.QUERY, new InOutPrivs(SEL_NOGRANT_AR,
+        arr(SQLPrivTypeGrant.INSERT_NOGRANT, SQLPrivTypeGrant.DELETE_NOGRANT)));
+    op2Priv.put(HiveOperationType.LOAD, new InOutPrivs(SEL_NOGRANT_AR,
+        arr(SQLPrivTypeGrant.INSERT_NOGRANT, SQLPrivTypeGrant.DELETE_NOGRANT)));
+
+    // show create table is more sensitive information, includes table properties etc
     // for now require select WITH GRANT
     op2Priv.put(HiveOperationType.SHOW_CREATETABLE, new InOutPrivs(SEL_GRANT_AR, null));
 
+    // for now allow only create-view with 'select with grant'
+    // the owner will also have select with grant privileges on new view
+    op2Priv.put(HiveOperationType.CREATEVIEW, new InOutPrivs(SEL_GRANT_AR, null));
+
     op2Priv.put(HiveOperationType.SHOWFUNCTIONS, new InOutPrivs(null, null));
     op2Priv.put(HiveOperationType.SHOWINDEXES, new InOutPrivs(null, null));
     op2Priv.put(HiveOperationType.SHOWPARTITIONS, new InOutPrivs(null, null));
@@ -129,21 +164,6 @@ public class Operation2Privilege {
     op2Priv.put(HiveOperationType.DROPFUNCTION, new InOutPrivs(null, null));
     op2Priv.put(HiveOperationType.CREATEMACRO, new InOutPrivs(null, null));
     op2Priv.put(HiveOperationType.DROPMACRO, new InOutPrivs(null, null));
-    op2Priv.put(HiveOperationType.CREATEVIEW, new InOutPrivs(SEL_GRANT_AR, null));
-
-    // require view ownership
-    op2Priv.put(HiveOperationType.DROPVIEW, new InOutPrivs(OWNER_PRIV_AR, null));
-
-    //table ownership for create/drop/alter index
-    op2Priv.put(HiveOperationType.CREATEINDEX, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.DROPINDEX, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERINDEX_REBUILD, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERINDEX_PROPS, new InOutPrivs(OWNER_PRIV_AR, null));
-
-    // require view ownership for alter/drop view
-    op2Priv.put(HiveOperationType.ALTERVIEW_PROPERTIES, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.DROPVIEW_PROPERTIES, new InOutPrivs(OWNER_PRIV_AR, null));
-    op2Priv.put(HiveOperationType.ALTERVIEW_RENAME, new InOutPrivs(OWNER_PRIV_AR, null));
 
     op2Priv.put(HiveOperationType.LOCKTABLE, new InOutPrivs(null, null));
     op2Priv.put(HiveOperationType.UNLOCKTABLE, new InOutPrivs(null, null));
@@ -151,13 +171,7 @@ public class Operation2Privilege {
     // require db ownership
     op2Priv.put(HiveOperationType.CREATETABLE, new InOutPrivs(OWNER_PRIV_AR, null));
 
-    // require table ownership
-    op2Priv.put(HiveOperationType.TRUNCATETABLE, new InOutPrivs(OWNER_PRIV_AR, null));
-
-    op2Priv.put(HiveOperationType.CREATETABLE_AS_SELECT, new InOutPrivs(OWNER_PRIV_AR, SEL_NOGRANT_AR));
-    op2Priv.put(HiveOperationType.QUERY, new InOutPrivs(SEL_NOGRANT_AR, null));
-
-    op2Priv.put(HiveOperationType.ALTERDATABASE, new InOutPrivs(ADMIN_PRIV_AR, null));
+    op2Priv.put(HiveOperationType.ALTERDATABASE, new InOutPrivs(OWNER_PRIV_AR, null));
     op2Priv.put(HiveOperationType.DESCDATABASE, new InOutPrivs(null, null));
 
     // The following actions are authorized through SQLStdHiveAccessController,

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/RequiredPrivileges.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/RequiredPrivileges.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/RequiredPrivileges.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/RequiredPrivileges.java Fri Feb 14 16:57:53 2014
@@ -28,6 +28,8 @@ import org.apache.hadoop.hive.ql.securit
 /**
  * Captures privilege sets, and can be used to compare required and available privileges
  * to find missing privileges (if any).
+ * ADMIN_PRIV is considered a special privilege, if the user has that, then no privilege is
+ * missing.
  */
 public class RequiredPrivileges {
 
@@ -56,6 +58,12 @@ public class RequiredPrivileges {
    */
   public Collection<SQLPrivTypeGrant> findMissingPrivs(RequiredPrivileges availPrivs) {
     MissingPrivilegeCapturer missingPrivCapturer = new MissingPrivilegeCapturer();
+
+    if(availPrivs.privilegeGrantSet.contains(SQLPrivTypeGrant.ADMIN_PRIV)){
+      //you are an admin! You have all privileges, no missing privileges
+      return missingPrivCapturer.getMissingPrivileges();
+    }
+    // check the mere mortals!
     for (SQLPrivTypeGrant requiredPriv : privilegeGrantSet) {
       if (!availPrivs.privilegeGrantSet.contains(requiredPriv)) {
         missingPrivCapturer.addMissingPrivilege(requiredPriv);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java Fri Feb 14 16:57:53 2014
@@ -21,6 +21,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Locale;
@@ -45,6 +46,7 @@ import org.apache.hadoop.hive.ql.securit
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRole;
 import org.apache.thrift.TException;
 
 public class SQLAuthorizationUtils {
@@ -121,7 +123,7 @@ public class SQLAuthorizationUtils {
     case DATABASE:
       return HivePrivilegeObjectType.DATABASE;
     case TABLE:
-      return HivePrivilegeObjectType.TABLE;
+      return HivePrivilegeObjectType.TABLE_OR_VIEW;
     case COLUMN:
     case GLOBAL:
     case PARTITION:
@@ -151,16 +153,22 @@ public class SQLAuthorizationUtils {
 
   /**
    * Get the privileges this user(userName argument) has on the object
-   * (hivePrivObject argument)
+   * (hivePrivObject argument) If isAdmin is true, adds an admin privilege as
+   * well.
    *
    * @param metastoreClient
    * @param userName
    * @param hivePrivObject
+   * @param curRoles
+   *          current active roles for user
+   * @param isAdmin
+   *          if user can run as admin user
    * @return
    * @throws HiveAuthzPluginException
    */
   static RequiredPrivileges getPrivilegesFromMetaStore(IMetaStoreClient metastoreClient,
-      String userName, HivePrivilegeObject hivePrivObject) throws HiveAuthzPluginException {
+      String userName, HivePrivilegeObject hivePrivObject, List<HiveRole> curRoles, boolean isAdmin)
+          throws HiveAuthzPluginException {
 
     // get privileges for this user and its role on this object
     PrincipalPrivilegeSet thrifPrivs = null;
@@ -175,6 +183,8 @@ public class SQLAuthorizationUtils {
       throwGetPrivErr(e, hivePrivObject, userName);
     }
 
+    filterPrivsByCurrentRoles(thrifPrivs, curRoles);
+
     // convert to RequiredPrivileges
     RequiredPrivileges privs = getRequiredPrivsFromThrift(thrifPrivs);
 
@@ -182,11 +192,42 @@ public class SQLAuthorizationUtils {
     if (isOwner(metastoreClient, userName, hivePrivObject)) {
       privs.addPrivilege(SQLPrivTypeGrant.OWNER_PRIV);
     }
+    if (isAdmin) {
+      privs.addPrivilege(SQLPrivTypeGrant.ADMIN_PRIV);
+    }
 
     return privs;
   }
 
   /**
+   * Remove any role privileges that don't belong to the roles in curRoles
+   * @param thriftPrivs
+   * @param curRoles
+   * @return
+   */
+  private static void filterPrivsByCurrentRoles(PrincipalPrivilegeSet thriftPrivs,
+      List<HiveRole> curRoles) {
+    // check if there are privileges to be filtered
+    if(thriftPrivs == null || thriftPrivs.getRolePrivileges() == null
+        || thriftPrivs.getRolePrivilegesSize() == 0
+        ){
+      // no privileges to filter
+      return;
+    }
+
+    // add the privs for roles in curRoles to new role-to-priv map
+    Map<String, List<PrivilegeGrantInfo>> filteredRolePrivs = new HashMap<String, List<PrivilegeGrantInfo>>();
+    for(HiveRole role : curRoles){
+      String roleName = role.getRoleName();
+      List<PrivilegeGrantInfo> privs = thriftPrivs.getRolePrivileges().get(roleName);
+      if(privs != null){
+        filteredRolePrivs.put(roleName, privs);
+      }
+    }
+    thriftPrivs.setRolePrivileges(filteredRolePrivs);
+  }
+
+  /**
    * Check if user is owner of the given object
    *
    * @param metastoreClient
@@ -200,10 +241,10 @@ public class SQLAuthorizationUtils {
   private static boolean isOwner(IMetaStoreClient metastoreClient, String userName,
       HivePrivilegeObject hivePrivObject) throws HiveAuthzPluginException {
     //for now, check only table
-    if(hivePrivObject.getType() == HivePrivilegeObjectType.TABLE){
+    if(hivePrivObject.getType() == HivePrivilegeObjectType.TABLE_OR_VIEW){
       Table thriftTableObj = null;
       try {
-        thriftTableObj = metastoreClient.getTable(hivePrivObject.getDbname(), hivePrivObject.getTableviewname());
+        thriftTableObj = metastoreClient.getTable(hivePrivObject.getDbname(), hivePrivObject.getTableViewURI());
       } catch (MetaException e) {
         throwGetTableErr(e, hivePrivObject);
       } catch (NoSuchObjectException e) {
@@ -224,7 +265,8 @@ public class SQLAuthorizationUtils {
 
   private static void throwGetPrivErr(Exception e, HivePrivilegeObject hivePrivObject,
       String userName) throws HiveAuthzPluginException {
-    String msg = "Error getting privileges on " + hivePrivObject + " for " + userName;
+    String msg = "Error getting privileges on " + hivePrivObject + " for " + userName + ": "
+      + e.getMessage();
     throw new HiveAuthzPluginException(msg, e);
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLPrivTypeGrant.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLPrivTypeGrant.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLPrivTypeGrant.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLPrivTypeGrant.java Fri Feb 14 16:57:53 2014
@@ -29,8 +29,8 @@ public enum SQLPrivTypeGrant {
   UPDATE_WGRANT(SQLPrivilegeType.UPDATE, true),
   DELETE_NOGRANT(SQLPrivilegeType.DELETE, false),
   DELETE_WGRANT(SQLPrivilegeType.DELETE, true),
-  OWNER_PRIV("Object ownership"),
-  ADMIN_PRIV("Admin privilege"); // This one can be used to deny permission for performing the operation
+  OWNER_PRIV("OBJECT OWNERSHIP"),
+  ADMIN_PRIV("ADMIN PRIVILEGE"); // This one can be used to deny permission for performing the operation
 
   private final SQLPrivilegeType privType;
   private final boolean withGrant;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java Fri Feb 14 16:57:53 2014
@@ -42,7 +42,6 @@ import org.apache.hadoop.hive.ql.securit
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
@@ -61,6 +60,8 @@ public class SQLStdHiveAccessController 
   private String currentUserName;
   private List<HiveRole> currentRoles;
   private HiveRole adminRole;
+  private final String ADMIN_ONLY_MSG = "User has to belong to ADMIN role and "
+      + "have it as current role, for this action.";
 
   SQLStdHiveAccessController(HiveMetastoreClientFactory metastoreClientFactory, HiveConf conf,
       HiveAuthenticationProvider authenticator) throws HiveAuthzPluginException {
@@ -102,7 +103,7 @@ public class SQLStdHiveAccessController 
       return currentRoles;
     } catch (Exception e) {
         throw new HiveAuthzPluginException("Failed to retrieve roles for "+
-            currentUserName, e);
+            currentUserName + ": " + e.getMessage(), e);
     }
   }
 
@@ -112,15 +113,12 @@ public class SQLStdHiveAccessController 
       HivePrincipal grantorPrincipal, boolean grantOption)
           throws HiveAuthzPluginException, HiveAccessControlException {
 
-    // expand ALL privileges, if any
-    hivePrivileges = expandAllPrivileges(hivePrivileges);
-
-    SQLAuthorizationUtils.validatePrivileges(hivePrivileges);
+    hivePrivileges = expandAndValidatePrivileges(hivePrivileges);
 
     IMetaStoreClient metastoreClient = metastoreClientFactory.getHiveMetastoreClient();
     // authorize the grant
     GrantPrivAuthUtils.authorize(hivePrincipals, hivePrivileges, hivePrivObject, grantOption,
-        metastoreClient, authenticator.getUserName());
+        metastoreClient, authenticator.getUserName(), getCurrentRoles(), isUserAdmin());
 
     // grant
     PrivilegeBag privBag = getThriftPrivilegesBag(hivePrincipals, hivePrivileges, hivePrivObject,
@@ -128,10 +126,18 @@ public class SQLStdHiveAccessController 
     try {
       metastoreClient.grant_privileges(privBag);
     } catch (Exception e) {
-      throw new HiveAuthzPluginException("Error granting privileges", e);
+      throw new HiveAuthzPluginException("Error granting privileges: " + e.getMessage(), e);
     }
   }
 
+  private List<HivePrivilege> expandAndValidatePrivileges(List<HivePrivilege> hivePrivileges)
+      throws HiveAuthzPluginException {
+    // expand ALL privileges, if any
+    hivePrivileges = expandAllPrivileges(hivePrivileges);
+    SQLAuthorizationUtils.validatePrivileges(hivePrivileges);
+    return hivePrivileges;
+  }
+
   private List<HivePrivilege> expandAllPrivileges(List<HivePrivilege> hivePrivileges) {
     Set<HivePrivilege> hivePrivSet = new HashSet<HivePrivilege>();
     for (HivePrivilege hivePrivilege : hivePrivileges) {
@@ -196,7 +202,8 @@ public class SQLStdHiveAccessController 
       List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject,
       HivePrincipal grantorPrincipal, boolean grantOption)
           throws HiveAuthzPluginException, HiveAccessControlException {
-    SQLAuthorizationUtils.validatePrivileges(hivePrivileges);
+
+    hivePrivileges = expandAndValidatePrivileges(hivePrivileges);
 
     IMetaStoreClient metastoreClient = metastoreClientFactory.getHiveMetastoreClient();
     // authorize the revoke, and get the set of privileges to be revoked
@@ -221,9 +228,9 @@ public class SQLStdHiveAccessController 
   public void createRole(String roleName, HivePrincipal adminGrantor)
       throws HiveAuthzPluginException, HiveAccessControlException {
     // only user belonging to admin role can create new roles.
-    if (!this.isUserAdmin(new HivePrincipal(currentUserName,HivePrincipalType.USER))) {
+    if (!isUserAdmin()) {
       throw new HiveAccessControlException("Current user : " + currentUserName+ " is not"
-      + " allowed to add roles. Only users belonging to admin role can add new roles.");
+      + " allowed to add roles. " + ADMIN_ONLY_MSG);
     }
     try {
       String grantorName = adminGrantor == null ? null : adminGrantor.getName();
@@ -237,9 +244,9 @@ public class SQLStdHiveAccessController 
   @Override
   public void dropRole(String roleName) throws HiveAuthzPluginException, HiveAccessControlException {
     // only user belonging to admin role can drop existing role
-    if (!this.isUserAdmin(new HivePrincipal(currentUserName,HivePrincipalType.USER))) {
+    if (!isUserAdmin()) {
       throw new HiveAccessControlException("Current user : " + currentUserName+ " is not"
-      + " allowed to drop role. Only users belonging to admin role can drop roles.");
+      + " allowed to drop role. " + ADMIN_ONLY_MSG);
     }
     try {
       metastoreClientFactory.getHiveMetastoreClient().drop_role(roleName);
@@ -260,7 +267,7 @@ public class SQLStdHiveAccessController 
       return hiveRoles;
     } catch (Exception e) {
       throw new HiveAuthzPluginException("Error listing roles for user "
-          + hivePrincipal.getName(), e);
+          + hivePrincipal.getName() + ": " + e.getMessage(), e);
     }
   }
 
@@ -268,9 +275,9 @@ public class SQLStdHiveAccessController 
   public void grantRole(List<HivePrincipal> hivePrincipals, List<String> roleNames,
     boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException,
     HiveAccessControlException {
-    if (!this.isUserAdmin(new HivePrincipal(currentUserName,HivePrincipalType.USER))) {
+    if (!isUserAdmin()) {
       throw new HiveAccessControlException("Current user : " + currentUserName+ " is not"
-        + " allowed to grant role. Currently only users belonging to admin role can grant roles.");
+        + " allowed to grant role. Currently " + ADMIN_ONLY_MSG);
     }
     for (HivePrincipal hivePrincipal : hivePrincipals) {
       for (String roleName : roleNames) {
@@ -300,9 +307,9 @@ public class SQLStdHiveAccessController 
       throw new HiveAuthzPluginException("Revoking only the admin privileges on "
         + "role is not currently supported");
     }
-    if (!this.isUserAdmin(new HivePrincipal(currentUserName,HivePrincipalType.USER))) {
+    if (!isUserAdmin()) {
       throw new HiveAccessControlException("Current user : " + currentUserName+ " is not"
-          + " allowed to revoke role. Currently only users belonging to admin role can revoke roles.");
+          + " allowed to revoke role. " + ADMIN_ONLY_MSG);
     }
     for (HivePrincipal hivePrincipal : hivePrincipals) {
       for (String roleName : roleNames) {
@@ -312,7 +319,7 @@ public class SQLStdHiveAccessController 
               AuthorizationUtils.getThriftPrincipalType(hivePrincipal.getType()));
         } catch (Exception e) {
           String msg = "Error revoking roles for " + hivePrincipal.getName() + " to role "
-              + roleName;
+              + roleName + ": " + e.getMessage();
           throw new HiveAuthzPluginException(msg, e);
         }
       }
@@ -322,9 +329,9 @@ public class SQLStdHiveAccessController 
   @Override
   public List<String> getAllRoles() throws HiveAuthzPluginException, HiveAccessControlException {
     // only user belonging to admin role can list role
-    if (!this.isUserAdmin(new HivePrincipal(currentUserName,HivePrincipalType.USER))) {
+    if (!isUserAdmin()) {
       throw new HiveAccessControlException("Current user : " + currentUserName+ " is not"
-        + " allowed to list roles. Only users belonging to admin role can list roles.");
+        + " allowed to list roles. " + ADMIN_ONLY_MSG);
     }
     try {
       return metastoreClientFactory.getHiveMetastoreClient().listRoleNames();
@@ -372,7 +379,7 @@ public class SQLStdHiveAccessController 
       return resPrivInfos;
 
     } catch (Exception e) {
-      throw new HiveAuthzPluginException("Error showing privileges", e);
+      throw new HiveAuthzPluginException("Error showing privileges: "+ e.getMessage(), e);
     }
 
   }
@@ -383,7 +390,7 @@ public class SQLStdHiveAccessController 
     case DATABASE:
       return HivePrivilegeObjectType.DATABASE;
     case TABLE:
-      return HivePrivilegeObjectType.TABLE;
+      return HivePrivilegeObjectType.TABLE_OR_VIEW;
     case COLUMN:
     case GLOBAL:
     case PARTITION:
@@ -429,11 +436,10 @@ public class SQLStdHiveAccessController 
   }
 
   /**
-   * @param principal
    * @return true only if current role of user is Admin
    * @throws HiveAuthzPluginException
    */
-  private boolean isUserAdmin(HivePrincipal principal) throws HiveAuthzPluginException {
+  boolean isUserAdmin() throws HiveAuthzPluginException {
     List<HiveRole> roles;
     try {
       roles = getCurrentRoles();

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java Fri Feb 14 16:57:53 2014
@@ -20,34 +20,51 @@ package org.apache.hadoop.hive.ql.securi
 import java.util.Collection;
 import java.util.List;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationValidator;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationValidator;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
 
 public class SQLStdHiveAuthorizationValidator implements HiveAuthorizationValidator {
 
   private final HiveMetastoreClientFactory metastoreClientFactory;
   private final HiveConf conf;
   private final HiveAuthenticationProvider authenticator;
+  private final SQLStdHiveAccessController privController;
+  public static final Log LOG = LogFactory.getLog(HiveMetaStore.class);
 
   public SQLStdHiveAuthorizationValidator(HiveMetastoreClientFactory metastoreClientFactory,
-      HiveConf conf, HiveAuthenticationProvider authenticator) {
+      HiveConf conf, HiveAuthenticationProvider authenticator,
+      SQLStdHiveAccessController privController) {
+
     this.metastoreClientFactory = metastoreClientFactory;
     this.conf = conf;
     this.authenticator = authenticator;
+    this.privController = privController;
   }
 
   @Override
   public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs,
       List<HivePrivilegeObject> outputHObjs) throws HiveAuthzPluginException, HiveAccessControlException {
+
+    if(LOG.isDebugEnabled()){
+      String msg = "Checking privileges for operation " + hiveOpType + " by user "
+        +  authenticator.getUserName() + " on " + " input objects " + inputHObjs
+        + " and output objects " + outputHObjs;
+      LOG.debug(msg);
+    }
+
     String userName = authenticator.getUserName();
     IMetaStoreClient metastoreClient = metastoreClientFactory.getHiveMetastoreClient();
 
@@ -69,13 +86,23 @@ public class SQLStdHiveAuthorizationVali
 
     // check if this user has these privileges on the objects
     for (HivePrivilegeObject hObj : hObjs) {
-      // get the privileges that this user has on the object
-      RequiredPrivileges availPrivs = SQLAuthorizationUtils.getPrivilegesFromMetaStore(
-          metastoreClient, userName, hObj);
-      Collection<SQLPrivTypeGrant> missingPriv = requiredInpPrivs
-          .findMissingPrivs(availPrivs);
-      SQLAuthorizationUtils.assertNoMissingPrivilege(missingPriv, new HivePrincipal(userName,
-          HivePrincipalType.USER), hObj);
+      if (hObj.getType() == HivePrivilegeObjectType.LOCAL_URI) {
+
+      } else if (hObj.getType() == HivePrivilegeObjectType.DFS_URI) {
+
+      } else if (hObj.getType() == HivePrivilegeObjectType.PARTITION) {
+        // sql std authorization is managing privileges at the table/view levels only
+        // ignore partitions
+      } else {
+        // get the privileges that this user has on the object
+        RequiredPrivileges availPrivs = SQLAuthorizationUtils.getPrivilegesFromMetaStore(
+            metastoreClient, userName, hObj, privController.getCurrentRoles(),
+            privController.isUserAdmin());
+        Collection<SQLPrivTypeGrant> missingPriv = requiredInpPrivs
+            .findMissingPrivs(availPrivs);
+        SQLAuthorizationUtils.assertNoMissingPrivilege(missingPriv, new HivePrincipal(userName,
+            HivePrincipalType.USER), hObj);
+      }
     }
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java Fri Feb 14 16:57:53 2014
@@ -31,10 +31,12 @@ public class SQLStdHiveAuthorizerFactory
   @Override
   public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
       HiveConf conf, HiveAuthenticationProvider authenticator) throws HiveAuthzPluginException {
-
+    SQLStdHiveAccessController privilegeManager =
+        new SQLStdHiveAccessController(metastoreClientFactory, conf, authenticator);
     return new HiveAuthorizerImpl(
-        new SQLStdHiveAccessController(metastoreClientFactory, conf, authenticator),
-        new SQLStdHiveAuthorizationValidator(metastoreClientFactory, conf, authenticator)
+        privilegeManager,
+        new SQLStdHiveAuthorizationValidator(metastoreClientFactory, conf, authenticator,
+            privilegeManager)
         );
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/CreateTableAutomaticGrant.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/CreateTableAutomaticGrant.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/CreateTableAutomaticGrant.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/CreateTableAutomaticGrant.java Fri Feb 14 16:57:53 2014
@@ -35,6 +35,10 @@ public class CreateTableAutomaticGrant {
   private Map<String, List<PrivilegeGrantInfo>> groupGrants;
   private Map<String, List<PrivilegeGrantInfo>> roleGrants;
 
+  // the owner can change, also owner might appear in user grants as well
+  // so keep owner privileges separate from userGrants
+  private List<PrivilegeGrantInfo> ownerGrant;
+
   public static CreateTableAutomaticGrant create(HiveConf conf)
       throws HiveException {
     CreateTableAutomaticGrant grants = new CreateTableAutomaticGrant();
@@ -44,20 +48,10 @@ public class CreateTableAutomaticGrant {
         HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_GROUP_GRANTS));
     grants.roleGrants = getGrantMap(HiveConf.getVar(conf,
         HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_ROLE_GRANTS));
-    
-    String grantor = null;
-    if (SessionState.get() != null
-        && SessionState.get().getAuthenticator() != null) {
-      grantor = SessionState.get().getAuthenticator().getUserName();
-      List<PrivilegeGrantInfo> ownerGrant = getGrantorInfoList(HiveConf.getVar(conf,
-          HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS));
-      if(ownerGrant != null) {
-        if (grants.userGrants == null) {
-          grants.userGrants = new HashMap<String, List<PrivilegeGrantInfo>>();
-        }
-        grants.userGrants.put(grantor, ownerGrant);
-      }
-    }
+
+    grants.ownerGrant = getGrantorInfoList(HiveConf.getVar(conf,
+        HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS));
+
     return grants;
   }
 
@@ -94,13 +88,11 @@ public class CreateTableAutomaticGrant {
     if (privList == null || privList.trim().equals("")) {
       return null;
     }
-    checkPrivilege(privList);
+    validatePrivilege(privList);
     String[] grantArray = privList.split(",");
     List<PrivilegeGrantInfo> grantInfoList = new ArrayList<PrivilegeGrantInfo>();
-    String grantor = null;
-    if (SessionState.get().getAuthenticator() != null) {
-      grantor = SessionState.get().getAuthenticator().getUserName();  
-    }
+    String grantor = SessionState.getUserFromAuthenticator();
+
     for (String grant : grantArray) {
       grantInfoList.add(new PrivilegeGrantInfo(grant, -1, grantor,
           PrincipalType.USER, true));
@@ -108,7 +100,7 @@ public class CreateTableAutomaticGrant {
     return grantInfoList;
   }
 
-  private static void checkPrivilege(String ownerGrantsInConfig)
+  private static void validatePrivilege(String ownerGrantsInConfig)
       throws HiveException {
     String[] ownerGrantArray = ownerGrantsInConfig.split(",");
     // verify the config
@@ -121,7 +113,15 @@ public class CreateTableAutomaticGrant {
   }
 
   public Map<String, List<PrivilegeGrantInfo>> getUserGrants() {
-    return userGrants;
+    Map<String, List<PrivilegeGrantInfo>> curUserGrants = new HashMap<String, List<PrivilegeGrantInfo>>();
+    String owner = SessionState.getUserFromAuthenticator();
+    if (owner != null && ownerGrant != null) {
+      curUserGrants.put(owner, ownerGrant);
+    }
+    if (userGrants != null) {
+      curUserGrants.putAll(userGrants);
+    }
+    return curUserGrants;
   }
 
   public Map<String, List<PrivilegeGrantInfo>> getGroupGrants() {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=1568352&r1=1568351&r2=1568352&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Fri Feb 14 16:57:53 2014
@@ -558,6 +558,19 @@ public class SessionState {
     }
   }
 
+  /**
+   *
+   * @return username from current SessionState authenticator. username will be
+   *         null if there is no current SessionState object or authenticator is
+   *         null.
+   */
+  public static String getUserFromAuthenticator() {
+    if (SessionState.get() != null && SessionState.get().getAuthenticator() != null) {
+      return SessionState.get().getAuthenticator().getUserName();
+    }
+    return null;
+  }
+
   public static boolean registerJar(String newJar) {
     LogHelper console = getConsole();
     try {

Added: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java?rev=1568352&view=auto
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java (added)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java Fri Feb 14 16:57:53 2014
@@ -0,0 +1,196 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.plan;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.Serializable;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
+import org.apache.hadoop.hive.ql.parse.ParseException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Test if ReadEntity isDirect flag is set correctly to indicate if
+ * the entity represents a direct or indirect dependency. See documentation
+ * of flag in ReadEntity.
+ */
+public class TestReadEntityDirect {
+
+  @BeforeClass
+  public static void onetimeSetup() throws CommandNeedRetryException {
+    Driver driver = createDriver();
+    int ret = driver.run("create table t1(i int)").getResponseCode();
+    assertEquals("Checking command success", 0, ret);
+    ret = driver.run("create view v1 as select * from t1").getResponseCode();
+    assertEquals("Checking command success", 0, ret);
+  }
+
+  @Before
+  public void setup() {
+    CheckInputReadEntityDirect.readEntities = null;
+  }
+
+  /**
+   * No views in the query so it should be a direct entity
+   *
+   * @throws ParseException
+   */
+  @Test
+  public void testSelectEntityDirect() throws ParseException {
+    Driver driver = createDriver();
+    int ret = driver.compile("select * from t1");
+    assertEquals("Checking command success", 0, ret);
+    assertEquals(1, CheckInputReadEntityDirect.readEntities.size());
+    assertTrue("isDirect", CheckInputReadEntityDirect.readEntities.iterator().next().isDirect());
+  }
+
+  /**
+   * Underlying table of view should be marked as indirect
+   *
+   * @throws ParseException
+   */
+  @Test
+  public void testSelectEntityInDirect() throws ParseException {
+    Driver driver = createDriver();
+    int ret = driver.compile("select * from v1");
+    assertEquals("Checking command success", 0, ret);
+    assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
+    for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
+      if (readEntity.getName().equals("default@t1")) {
+        assertFalse("not direct", readEntity.isDirect());
+      } else if (readEntity.getName().equals("default@v1")) {
+        assertTrue("direct", readEntity.isDirect());
+      } else {
+        fail("unexpected entity name " + readEntity.getName());
+      }
+    }
+  }
+
+  /**
+   * Underlying table of view should be marked as direct, as it is also accessed
+   * directly in the join query
+   *
+   * @throws ParseException
+   */
+  @Test
+  public void testSelectEntityViewDirectJoin() throws ParseException {
+    Driver driver = createDriver();
+    int ret = driver.compile("select * from v1 join t1 on (v1.i = t1.i)");
+    assertEquals("Checking command success", 0, ret);
+    assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
+    for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
+      if (readEntity.getName().equals("default@t1")) {
+        assertTrue("direct", readEntity.isDirect());
+      } else if (readEntity.getName().equals("default@v1")) {
+        assertTrue("direct", readEntity.isDirect());
+      } else {
+        fail("unexpected entity name " + readEntity.getName());
+      }
+    }
+  }
+
+  /**
+   * Underlying table of view should be marked as direct, as it is also accessed
+   * directly in the union-all query
+   *
+   * @throws ParseException
+   */
+  @Test
+  public void testSelectEntityViewDirectUnion() throws ParseException {
+    Driver driver = createDriver();
+    int ret = driver.compile("select * from ( select * from v1 union all select * from t1) uv1t1");
+    assertEquals("Checking command success", 0, ret);
+    assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
+    for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
+      if (readEntity.getName().equals("default@t1")) {
+        assertTrue("direct", readEntity.isDirect());
+      } else if (readEntity.getName().equals("default@v1")) {
+        assertTrue("direct", readEntity.isDirect());
+      } else {
+        fail("unexpected entity name " + readEntity.getName());
+      }
+    }
+  }
+
+  /**
+   * Underlying table of view should be marked as indirect. Query with join of views and aliases
+   *
+   * @throws ParseException
+   */
+  @Test
+  public void testSelectEntityInDirectJoinAlias() throws ParseException {
+    Driver driver = createDriver();
+    int ret = driver.compile("select * from v1 as a join v1 as b on (a.i = b.i)");
+    assertEquals("Checking command success", 0, ret);
+    assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
+    for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
+      if (readEntity.getName().equals("default@t1")) {
+        assertFalse("not direct", readEntity.isDirect());
+      } else if (readEntity.getName().equals("default@v1")) {
+        assertTrue("direct", readEntity.isDirect());
+      } else {
+        fail("unexpected entity name " + readEntity.getName());
+      }
+    }
+  }
+
+  /**
+   * Create driver with the test hook set in config
+   */
+  private static Driver createDriver() {
+    HiveConf conf = new HiveConf(Driver.class);
+    conf.setVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK,
+        CheckInputReadEntityDirect.class.getName());
+    HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
+    SessionState.start(conf);
+    Driver driver = new Driver(conf);
+    driver.init();
+    return driver;
+  }
+
+  /**
+   * Hook used in the test to capture the set of ReadEntities
+   */
+  public static class CheckInputReadEntityDirect extends AbstractSemanticAnalyzerHook {
+    public static Set<ReadEntity> readEntities;
+
+    @Override
+    public void postAnalyze(HiveSemanticAnalyzerHookContext context,
+        List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+      readEntities = context.getInputs();
+    }
+
+  }
+
+}

Added: hive/trunk/ql/src/test/queries/clientnegative/authorization_addpartition.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/authorization_addpartition.q?rev=1568352&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/authorization_addpartition.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/authorization_addpartition.q Fri Feb 14 16:57:53 2014
@@ -0,0 +1,8 @@
+set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
+set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
+set hive.security.authorization.enabled=true;
+
+-- check add partition without insert privilege
+create table tpart(i int, j int) partitioned by (k string);         
+set user.name=user1;
+alter table tpart add partition (k = 'abc') location 'file:${system:test.tmp.dir}/temp' ;

Added: hive/trunk/ql/src/test/queries/clientnegative/authorization_createview.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/authorization_createview.q?rev=1568352&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/authorization_createview.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/authorization_createview.q Fri Feb 14 16:57:53 2014
@@ -0,0 +1,10 @@
+set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
+set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
+set hive.security.authorization.enabled=true;
+
+-- check create view without select privileges
+create table t1(i int);
+set user.name=user1;
+create view v1 as select * from t1;
+
+

Added: hive/trunk/ql/src/test/queries/clientnegative/authorization_ctas.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/authorization_ctas.q?rev=1568352&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/authorization_ctas.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/authorization_ctas.q Fri Feb 14 16:57:53 2014
@@ -0,0 +1,10 @@
+set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
+set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
+set hive.security.authorization.enabled=true;
+
+-- check query without select privilege fails
+create table t1(i int);
+
+set user.name=user1;
+create table t2 as select * from t1;
+

Added: hive/trunk/ql/src/test/queries/clientnegative/authorization_droppartition.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/authorization_droppartition.q?rev=1568352&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/authorization_droppartition.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/authorization_droppartition.q Fri Feb 14 16:57:53 2014
@@ -0,0 +1,9 @@
+set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
+set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
+set hive.security.authorization.enabled=true;
+
+-- check drop partition without delete privilege
+create table tpart(i int, j int) partitioned by (k string);
+alter table tpart add partition (k = 'abc') location 'file:${system:test.tmp.dir}/temp' ;
+set user.name=user1;
+alter table tpart drop partition (k = 'abc');

Added: hive/trunk/ql/src/test/queries/clientnegative/authorization_insert_noinspriv.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/authorization_insert_noinspriv.q?rev=1568352&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/authorization_insert_noinspriv.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/authorization_insert_noinspriv.q Fri Feb 14 16:57:53 2014
@@ -0,0 +1,11 @@
+set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
+set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
+set hive.security.authorization.enabled=true;
+
+-- check insert without select priv
+create table t1(i int);
+
+set user.name=user1;
+create table user2tab(i int);
+insert into table t1 select * from user2tab;
+



Mime
View raw message