hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From the...@apache.org
Subject svn commit: r1612646 [1/2] - in /hive/trunk: itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/ itests/hive-unit/src/test/java/org/apache/hive/jdbc/ metastore/src/java/org/apache/hadoop/hive/metastore/ ql/src/java/org/apache/hadoop/hive/ql/ ql...
Date Tue, 22 Jul 2014 19:00:50 GMT
Author: thejas
Date: Tue Jul 22 19:00:49 2014
New Revision: 1612646

URL: http://svn.apache.org/r1612646
Log:
HIVE-7026 : Support newly added role related APIs for v1 authorizer (Navis via Thejas Nair)

Added:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java
    hive/trunk/ql/src/test/queries/clientpositive/authorization_show_role_principals_v1.q
    hive/trunk/ql/src/test/results/clientpositive/authorization_show_role_principals_v1.q.out
Removed:
    hive/trunk/ql/src/test/queries/clientnegative/authorization_show_role_principals_v1.q
    hive/trunk/ql/src/test/results/clientnegative/authorization_show_role_principals_v1.q.out
Modified:
    hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
    hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
    hive/trunk/ql/src/test/results/clientnegative/authorization_caseinsensitivity.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_fail_1.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_fail_3.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_fail_4.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_fail_5.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_fail_7.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_part.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_public_create.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_public_drop.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_role_cycles1.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_role_cycles2.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_role_grant.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_role_grant2.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_role_grant_nosuchrole.q.out
    hive/trunk/ql/src/test/results/clientpositive/alter_rename_partition_authorization.q.out
    hive/trunk/ql/src/test/results/clientpositive/authorization_1.q.out
    hive/trunk/ql/src/test/results/clientpositive/authorization_2.q.out
    hive/trunk/ql/src/test/results/clientpositive/authorization_3.q.out
    hive/trunk/ql/src/test/results/clientpositive/authorization_4.q.out
    hive/trunk/ql/src/test/results/clientpositive/authorization_5.q.out
    hive/trunk/ql/src/test/results/clientpositive/authorization_6.q.out
    hive/trunk/ql/src/test/results/clientpositive/authorization_9.q.out
    hive/trunk/ql/src/test/results/clientpositive/keyword_1.q.out
    hive/trunk/ql/src/test/results/clientpositive/show_roles.q.out

Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=1612646&r1=1612645&r2=1612646&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Tue Jul 22 19:00:49 2014
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.jdbc;
 
-import static org.apache.hadoop.hive.ql.exec.ExplainTask.EXPL_COLUMN_NAME;
 import static org.apache.hadoop.hive.conf.SystemVariables.SET_COLUMN_NAME;
+import static org.apache.hadoop.hive.ql.exec.ExplainTask.EXPL_COLUMN_NAME;
 
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
@@ -1158,7 +1158,7 @@ public class TestJdbcDriver extends Test
     assertEquals("", res.getString(4));     // column
     assertEquals("hive_test_user", res.getString(5));
     assertEquals("USER", res.getString(6));
-    assertEquals("Select", res.getString(7));
+    assertEquals("SELECT", res.getString(7));
     assertEquals(false, res.getBoolean(8)); // grant option
     assertEquals(-1, res.getLong(9));
     assertNotNull(res.getString(10));       // grantor

Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1612646&r1=1612645&r2=1612646&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java Tue Jul 22 19:00:49 2014
@@ -18,8 +18,8 @@
 
 package org.apache.hive.jdbc;
 
-import static org.apache.hadoop.hive.ql.exec.ExplainTask.EXPL_COLUMN_NAME;
 import static org.apache.hadoop.hive.conf.SystemVariables.SET_COLUMN_NAME;
+import static org.apache.hadoop.hive.ql.exec.ExplainTask.EXPL_COLUMN_NAME;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
@@ -53,7 +53,6 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.processors.DfsProcessor;
-import org.apache.hadoop.hive.ql.processors.SetProcessor;
 import org.apache.hive.common.util.HiveVersionInfo;
 import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
 import org.apache.hive.service.cli.operation.ClassicTableTypeMapping;
@@ -1997,7 +1996,7 @@ public class TestJdbcDriver2 {
     assertEquals("", res.getString(4));     // column
     assertEquals("hive_test_user", res.getString(5));
     assertEquals("USER", res.getString(6));
-    assertEquals("Select", res.getString(7));
+    assertEquals("SELECT", res.getString(7));
     assertEquals(false, res.getBoolean(8)); // grant option
     assertEquals(-1, res.getLong(9));
     assertNotNull(res.getString(10));       // grantor

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java?rev=1612646&r1=1612645&r2=1612646&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java Tue Jul 22 19:00:49 2014
@@ -89,8 +89,7 @@ public class Warehouse {
     try {
       Class<? extends MetaStoreFS> handlerClass = (Class<? extends MetaStoreFS>) Class
           .forName(handlerClassStr, true, JavaUtils.getClassLoader());
-      MetaStoreFS handler = (MetaStoreFS) ReflectionUtils.newInstance(
-          handlerClass, conf);
+      MetaStoreFS handler = ReflectionUtils.newInstance(handlerClass, conf);
       return handler;
     } catch (ClassNotFoundException e) {
       throw new MetaException("Error in loading MetaStoreFS handler."
@@ -563,4 +562,12 @@ public class Warehouse {
     return values;
   }
 
+  public static Map<String, String> makeSpecFromValues(List<FieldSchema> partCols,
+      List<String> values) {
+    Map<String, String> spec = new LinkedHashMap<String, String>();
+    for (int i = 0; i < values.size(); i++) {
+      spec.put(partCols.get(i).getName(), values.get(i));
+    }
+    return spec;
+  }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java?rev=1612646&r1=1612645&r2=1612646&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java Tue Jul 22 19:00:49 2014
@@ -371,10 +371,14 @@ public enum ErrorMsg {
   INVALID_DIR(10252, "{0} is not a directory", true),
   NO_VALID_LOCATIONS(10253, "Could not find any valid location to place the jars. " +
       "Please update hive.jar.directory or hive.user.install.directory with a valid location", false),
-  UNNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP(10254,
+  UNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP(10254,
       "Principal type GROUP is not supported in this authorization setting", "28000"),
   INVALID_TABLE_NAME(10255, "Invalid table name {0}", true),
   INSERT_INTO_IMMUTABLE_TABLE(10256, "Inserting into a non-empty immutable table is not allowed"),
+  UNSUPPORTED_AUTHORIZATION_RESOURCE_TYPE_GLOBAL(10257,
+      "Resource type GLOBAL is not supported in this authorization setting", "28000"),
+  UNSUPPORTED_AUTHORIZATION_RESOURCE_TYPE_COLUMN(10258,
+      "Resource type COLUMN is not supported in this authorization setting", "28000"),
 
   TXNMGR_NOT_SPECIFIED(10260, "Transaction manager not specified correctly, " +
       "set hive.txn.manager"),

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1612646&r1=1612645&r2=1612646&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Tue Jul 22 19:00:49 2014
@@ -66,17 +66,12 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse;
-import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
-import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
-import org.apache.hadoop.hive.metastore.api.HiveObjectType;
 import org.apache.hadoop.hive.metastore.api.Index;
 import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
-import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
-import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
 import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.ShowCompactResponse;
@@ -169,15 +164,13 @@ import org.apache.hadoop.hive.ql.plan.Un
 import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
-import org.apache.hadoop.hive.ql.security.authorization.Privilege;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveV1Authorizer;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
@@ -493,14 +486,21 @@ public class DDLTask extends Task<DDLWor
         return exchangeTablePartition(db, alterTableExchangePartition);
       }
     } catch (Throwable e) {
-      setException(e);
-      LOG.error(stringifyException(e));
+      failed(e);
       return 1;
     }
     assert false;
     return 0;
   }
 
+  private void failed(Throwable e) {
+    while (e.getCause() != null && e.getClass() == RuntimeException.class) {
+      e = e.getCause();
+    }
+    setException(e);
+    LOG.error(stringifyException(e));
+  }
+
   private int showConf(Hive db, ShowConfDesc showConf) throws Exception {
     ConfVars conf = HiveConf.getConfVars(showConf.getConfName());
     if (conf == null) {
@@ -564,187 +564,46 @@ public class DDLTask extends Task<DDLWor
     return ret;
   }
 
-  private int grantOrRevokeRole(GrantRevokeRoleDDL grantOrRevokeRoleDDL)
-      throws HiveException {
-    try {
-      boolean grantRole = grantOrRevokeRoleDDL.getGrant();
-      List<PrincipalDesc> principals = grantOrRevokeRoleDDL.getPrincipalDesc();
-      List<String> roles = grantOrRevokeRoleDDL.getRoles();
-
-      if(SessionState.get().isAuthorizationModeV2()){
-        return grantOrRevokeRoleV2(grantOrRevokeRoleDDL);
-      }
-
-      for (PrincipalDesc principal : principals) {
-        String userName = principal.getName();
-        for (String roleName : roles) {
-          if (grantRole) {
-            db.grantRole(roleName, userName, principal.getType(),
-                grantOrRevokeRoleDDL.getGrantor(), grantOrRevokeRoleDDL
-                .getGrantorType(), grantOrRevokeRoleDDL.isGrantOption());
-          } else {
-            db.revokeRole(roleName, userName, principal.getType(),
-                grantOrRevokeRoleDDL.isGrantOption());
-          }
-        }
-      }
-    } catch (Exception e) {
-      throw new HiveException(e);
+  private HiveAuthorizer getSessionAuthorizer() {
+    HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
+    if (authorizer == null) {
+      authorizer = new HiveV1Authorizer(conf, db);
     }
-    return 0;
+    return authorizer;
   }
 
-  private int grantOrRevokeRoleV2(GrantRevokeRoleDDL grantOrRevokeRoleDDL) throws HiveException {
-    HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
+  private int grantOrRevokeRole(GrantRevokeRoleDDL grantOrRevokeRoleDDL)
+      throws HiveException {
+    HiveAuthorizer authorizer = getSessionAuthorizer();
     //convert to the types needed for plugin api
     HivePrincipal grantorPrinc = null;
     if(grantOrRevokeRoleDDL.getGrantor() != null){
       grantorPrinc = new HivePrincipal(grantOrRevokeRoleDDL.getGrantor(),
-          getHivePrincipalType(grantOrRevokeRoleDDL.getGrantorType()));
+          AuthorizationUtils.getHivePrincipalType(grantOrRevokeRoleDDL.getGrantorType()));
     }
-    List<HivePrincipal> hivePrincipals = getHivePrincipals(grantOrRevokeRoleDDL.getPrincipalDesc());
+    List<HivePrincipal> principals =
+        AuthorizationUtils.getHivePrincipals(grantOrRevokeRoleDDL.getPrincipalDesc());
     List<String> roles = grantOrRevokeRoleDDL.getRoles();
 
-    if(grantOrRevokeRoleDDL.getGrant()){
-      authorizer.grantRole(hivePrincipals, roles,
-          grantOrRevokeRoleDDL.isGrantOption(), grantorPrinc);
-    }
-    else{
-      authorizer.revokeRole(hivePrincipals, roles,
-          grantOrRevokeRoleDDL.isGrantOption(), grantorPrinc);
+    boolean grantOption = grantOrRevokeRoleDDL.isGrantOption();
+    if (grantOrRevokeRoleDDL.getGrant()) {
+      authorizer.grantRole(principals, roles, grantOption, grantorPrinc);
+    } else {
+      authorizer.revokeRole(principals, roles, grantOption, grantorPrinc);
     }
     return 0;
   }
 
   private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException {
 
-    if(SessionState.get().isAuthorizationModeV2()){
-      return showGrantsV2(showGrantDesc);
-    }
-
-    PrincipalDesc principalDesc = showGrantDesc.getPrincipalDesc();
-    PrivilegeObjectDesc hiveObjectDesc = showGrantDesc.getHiveObj();
-    String principalName = principalDesc == null ? null : principalDesc.getName();
-    PrincipalType type = principalDesc == null ? null : principalDesc.getType();
-    List<HiveObjectPrivilege> privs = new ArrayList<HiveObjectPrivilege>();
-    try {
-      if (hiveObjectDesc == null) {
-        privs.addAll(db.showPrivilegeGrant(HiveObjectType.GLOBAL, principalName, type,
-            null, null, null, null));
-      } else if (hiveObjectDesc != null && hiveObjectDesc.getObject() == null) {
-        privs.addAll(db.showPrivilegeGrant(null, principalName, type, null, null, null, null));
-      } else {
-        String obj = hiveObjectDesc.getObject();
-        boolean notFound = true;
-        String dbName = null;
-        String tableName = null;
-        Table tableObj = null;
-        Database dbObj = null;
-
-        if (hiveObjectDesc.getTable()) {
-          String[] dbTab = splitTableName(obj);
-          dbName = dbTab[0];
-          tableName = dbTab[1];
-          dbObj = db.getDatabase(dbName);
-          tableObj = db.getTable(dbName, tableName);
-          notFound = (dbObj == null || tableObj == null);
-        } else {
-          dbName = hiveObjectDesc.getObject();
-          dbObj = db.getDatabase(dbName);
-          notFound = (dbObj == null);
-        }
-        if (notFound) {
-          throw new HiveException(obj + " can not be found");
-        }
-
-        String partName = null;
-        List<String> partValues = null;
-        if (hiveObjectDesc.getPartSpec() != null) {
-          partName = Warehouse
-              .makePartName(hiveObjectDesc.getPartSpec(), false);
-          partValues = Warehouse.getPartValuesFromPartName(partName);
-        }
-
-        if (!hiveObjectDesc.getTable()) {
-          // show database level privileges
-          privs.addAll(db.showPrivilegeGrant(HiveObjectType.DATABASE,
-              principalName, type, dbName, null, null, null));
-        } else {
-          if (showGrantDesc.getColumns() != null) {
-            // show column level privileges
-            for (String columnName : showGrantDesc.getColumns()) {
-              privs.addAll(db.showPrivilegeGrant(
-                  HiveObjectType.COLUMN, principalName,
-                  type, dbName, tableName, partValues,
-                  columnName));
-            }
-          } else if (hiveObjectDesc.getPartSpec() != null) {
-            // show partition level privileges
-            privs.addAll(db.showPrivilegeGrant(
-                HiveObjectType.PARTITION, principalName, type,
-                dbName, tableName, partValues, null));
-          } else {
-            // show table level privileges
-            privs.addAll(db.showPrivilegeGrant(
-                HiveObjectType.TABLE, principalName, type,
-                dbName, tableName, null, null));
-          }
-        }
-      }
-      boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST);
-      writeToFile(writeGrantInfo(privs, testMode), showGrantDesc.getResFile());
-    } catch (FileNotFoundException e) {
-      LOG.info("show table status: " + stringifyException(e));
-      return 1;
-    } catch (IOException e) {
-      LOG.info("show table status: " + stringifyException(e));
-      return 1;
-    } catch (Exception e) {
-      e.printStackTrace();
-      throw new HiveException(e);
-    }
-    return 0;
-  }
-
-  private static String[] splitTableName(String fullName) {
-    String[] dbTab = fullName.split("\\.");
-    String[] result = new String[2];
-    if (dbTab.length == 2) {
-      result[0] = dbTab[0];
-      result[1] = dbTab[1];
-    } else {
-      result[0] = SessionState.get().getCurrentDatabase();
-      result[1] = fullName;
-    }
-    return result;
-  }
-
-  private int showGrantsV2(ShowGrantDesc showGrantDesc) throws HiveException {
-    HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
+    HiveAuthorizer authorizer = getSessionAuthorizer();
     try {
       List<HivePrivilegeInfo> privInfos = authorizer.showPrivileges(
-          getHivePrincipal(showGrantDesc.getPrincipalDesc()),
-          getHivePrivilegeObject(showGrantDesc.getHiveObj())
-          );
-      List<HiveObjectPrivilege> privList = new ArrayList<HiveObjectPrivilege>();
-      for(HivePrivilegeInfo privInfo : privInfos){
-        HivePrincipal principal = privInfo.getPrincipal();
-        HivePrivilegeObject privObj = privInfo.getObject();
-        HivePrivilege priv = privInfo.getPrivilege();
-
-        PrivilegeGrantInfo grantInfo =
-            AuthorizationUtils.getThriftPrivilegeGrantInfo(priv, privInfo.getGrantorPrincipal(),
-                privInfo.isGrantOption(), privInfo.getGrantTime());
-
-        //only grantInfo is used
-        HiveObjectPrivilege thriftObjectPriv = new HiveObjectPrivilege(new HiveObjectRef(
-            AuthorizationUtils.getThriftHiveObjType(privObj.getType()),privObj.getDbname(),
-            privObj.getTableViewURI(),null,null), principal.getName(),
-            AuthorizationUtils.getThriftPrincipalType(principal.getType()), grantInfo);
-        privList.add(thriftObjectPriv);
-      }
+          AuthorizationUtils.getHivePrincipal(showGrantDesc.getPrincipalDesc()),
+          AuthorizationUtils.getHivePrivilegeObject(showGrantDesc.getHiveObj(),
+              showGrantDesc.getColumns()));
       boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST);
-      writeToFile(writeGrantInfo(privList, testMode), showGrantDesc.getResFile());
+      writeToFile(writeGrantInfo(privInfos, testMode), showGrantDesc.getResFile());
     } catch (IOException e) {
       throw new HiveException("Error in show grant statement", e);
     }
@@ -756,156 +615,15 @@ public class DDLTask extends Task<DDLWor
       String grantor, PrincipalType grantorType, boolean grantOption, boolean isGrant)
           throws HiveException {
 
-    if(SessionState.get().isAuthorizationModeV2()){
-      return grantOrRevokePrivilegesV2(principals, privileges, privSubjectDesc, grantor,
-          grantorType, grantOption, isGrant);
-    }
-
-    if (privileges == null || privileges.size() == 0) {
-      console.printError("No privilege found.");
-      return 1;
-    }
-
-    String dbName = null;
-    String tableName = null;
-    Table tableObj = null;
-    Database dbObj = null;
-
-    try {
-
-      if (privSubjectDesc != null) {
-        if (privSubjectDesc.getPartSpec() != null && isGrant) {
-          throw new HiveException("Grant does not support partition level.");
-        }
-        String obj = privSubjectDesc.getObject();
-
-        //get the db, table objects
-        if (privSubjectDesc.getTable()) {
-          String[] dbTable = Utilities.getDbTableName(obj);
-          dbName = dbTable[0];
-          tableName = dbTable[1];
-
-          dbObj = db.getDatabase(dbName);
-          if (dbObj == null) {
-            throwNotFound("Database", dbName);
-          }
-          tableObj = db.getTable(dbName, tableName);
-          if (tableObj == null) {
-            throwNotFound("Table", obj);
-          }
-        } else {
-          dbName = privSubjectDesc.getObject();
-          dbObj = db.getDatabase(dbName);
-          if (dbObj == null) {
-            throwNotFound("Database", dbName);
-          }
-        }
-      }
-
-      PrivilegeBag privBag = new PrivilegeBag();
-      if (privSubjectDesc == null) {
-        for (int idx = 0; idx < privileges.size(); idx++) {
-          Privilege priv = privileges.get(idx).getPrivilege();
-          if (privileges.get(idx).getColumns() != null
-              && privileges.get(idx).getColumns().size() > 0) {
-            throw new HiveException(
-                "For user-level privileges, column sets should be null. columns="
-                    + privileges.get(idx).getColumns().toString());
-          }
-
-          privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(
-              HiveObjectType.GLOBAL, null, null, null, null), null, null,
-              new PrivilegeGrantInfo(priv.toString(), 0, grantor, grantorType,
-                  grantOption)));
-        }
-      } else {
-        org.apache.hadoop.hive.metastore.api.Partition partObj = null;
-        List<String> partValues = null;
-        if (tableObj != null) {
-          if ((!tableObj.isPartitioned())
-              && privSubjectDesc.getPartSpec() != null) {
-            throw new HiveException(
-                "Table is not partitioned, but partition name is present: partSpec="
-                    + privSubjectDesc.getPartSpec().toString());
-          }
-
-          if (privSubjectDesc.getPartSpec() != null) {
-            partObj = db.getPartition(tableObj, privSubjectDesc.getPartSpec(),
-                false).getTPartition();
-            partValues = partObj.getValues();
-          }
-        }
-
-        for (PrivilegeDesc privDesc : privileges) {
-          List<String> columns = privDesc.getColumns();
-          Privilege priv = privDesc.getPrivilege();
-          if (columns != null && columns.size() > 0) {
-            if (!priv.supportColumnLevel()) {
-              throw new HiveException(priv.toString()
-                  + " does not support column level.");
-            }
-            if (privSubjectDesc == null || tableName == null) {
-              throw new HiveException(
-                  "For user-level/database-level privileges, column sets should be null. columns="
-                      + columns);
-            }
-            for (int i = 0; i < columns.size(); i++) {
-              privBag.addToPrivileges(new HiveObjectPrivilege(
-                  new HiveObjectRef(HiveObjectType.COLUMN, dbName, tableName,
-                      partValues, columns.get(i)), null, null,  new PrivilegeGrantInfo(priv.toString(), 0, grantor, grantorType, grantOption)));
-            }
-          } else {
-            if (privSubjectDesc.getTable()) {
-              if (privSubjectDesc.getPartSpec() != null) {
-                privBag.addToPrivileges(new HiveObjectPrivilege(
-                    new HiveObjectRef(HiveObjectType.PARTITION, dbName,
-                        tableName, partValues, null), null, null,  new PrivilegeGrantInfo(priv.toString(), 0, grantor, grantorType, grantOption)));
-              } else {
-                privBag
-                .addToPrivileges(new HiveObjectPrivilege(
-                    new HiveObjectRef(HiveObjectType.TABLE, dbName,
-                        tableName, null, null), null, null, new PrivilegeGrantInfo(priv.toString(), 0, grantor, grantorType, grantOption)));
-              }
-            } else {
-              privBag.addToPrivileges(new HiveObjectPrivilege(
-                  new HiveObjectRef(HiveObjectType.DATABASE, dbName, null,
-                      null, null), null, null, new PrivilegeGrantInfo(priv.toString(), 0, grantor, grantorType, grantOption)));
-            }
-          }
-        }
-      }
-
-      for (PrincipalDesc principal : principals) {
-        for (int i = 0; i < privBag.getPrivileges().size(); i++) {
-          HiveObjectPrivilege objPrivs = privBag.getPrivileges().get(i);
-          objPrivs.setPrincipalName(principal.getName());
-          objPrivs.setPrincipalType(principal.getType());
-        }
-        if (isGrant) {
-          db.grantPrivileges(privBag);
-        } else {
-          db.revokePrivileges(privBag, grantOption);
-        }
-        
-      }
-    } catch (Exception e) {
-      console.printError("Error: " + e.getMessage());
-      return 1;
-    }
-
-    return 0;
-  }
-
-  private int grantOrRevokePrivilegesV2(List<PrincipalDesc> principals,
-      List<PrivilegeDesc> privileges, PrivilegeObjectDesc privSubjectDesc, String grantor,
-      PrincipalType grantorType, boolean grantOption, boolean isGrant) throws HiveException {
-    HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
+    HiveAuthorizer authorizer = getSessionAuthorizer();
 
     //Convert to object types used by the authorization plugin interface
-    List<HivePrincipal> hivePrincipals = getHivePrincipals(principals);
-    List<HivePrivilege> hivePrivileges = getHivePrivileges(privileges);
-    HivePrivilegeObject hivePrivObject = getHivePrivilegeObject(privSubjectDesc);
-    HivePrincipal grantorPrincipal = new HivePrincipal(grantor, getHivePrincipalType(grantorType));
+    List<HivePrincipal> hivePrincipals = AuthorizationUtils.getHivePrincipals(principals);
+    List<HivePrivilege> hivePrivileges = AuthorizationUtils.getHivePrivileges(privileges);
+    HivePrivilegeObject hivePrivObject = AuthorizationUtils.getHivePrivilegeObject(privSubjectDesc, null);
+
+    HivePrincipal grantorPrincipal = new HivePrincipal(
+        grantor, AuthorizationUtils.getHivePrincipalType(grantorType));
 
     if(isGrant){
       authorizer.grantPrivileges(hivePrincipals, hivePrivileges, hivePrivObject,
@@ -918,123 +636,8 @@ public class DDLTask extends Task<DDLWor
     return 0;
   }
 
-  private HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc privSubjectDesc)
-      throws HiveException {
-    if(privSubjectDesc == null){
-      return new HivePrivilegeObject(null, null, null);
-    }
-    String [] dbTable = Utilities.getDbTableName(privSubjectDesc.getObject());
-    return new HivePrivilegeObject(getPrivObjectType(privSubjectDesc), dbTable[0], dbTable[1]);
-  }
-
-  private HivePrincipalType getHivePrincipalType(PrincipalType type) throws HiveException {
-    if(type == null){
-      return null;
-    }
-
-    switch(type){
-    case USER:
-      return HivePrincipalType.USER;
-    case ROLE:
-      return HivePrincipalType.ROLE;
-    case GROUP:
-      throw new HiveException(ErrorMsg.UNNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP);
-    default:
-      //should not happen as we take care of all existing types
-      throw new AssertionError("Unsupported authorization type specified");
-    }
-  }
-
-  private HivePrivilegeObjectType getPrivObjectType(PrivilegeObjectDesc privSubjectDesc) {
-    if (privSubjectDesc.getObject() == null) {
-      return null;
-    }
-    return privSubjectDesc.getTable() ? HivePrivilegeObjectType.TABLE_OR_VIEW : HivePrivilegeObjectType.DATABASE;
-  }
-
-  private List<HivePrivilege> getHivePrivileges(List<PrivilegeDesc> privileges) {
-    List<HivePrivilege> hivePrivileges = new ArrayList<HivePrivilege>();
-    for(PrivilegeDesc privilege : privileges){
-      hivePrivileges.add(
-          new HivePrivilege(privilege.getPrivilege().toString(), privilege.getColumns()));
-    }
-    return hivePrivileges;
-  }
-
-  private List<HivePrincipal> getHivePrincipals(List<PrincipalDesc> principals) throws HiveException {
-    ArrayList<HivePrincipal> hivePrincipals = new ArrayList<HivePrincipal>();
-    for(PrincipalDesc principal : principals){
-      hivePrincipals.add(getHivePrincipal(principal));
-    }
-    return hivePrincipals;
-  }
-
-  private HivePrincipal getHivePrincipal(PrincipalDesc principal) throws HiveException {
-    if (principal == null) {
-      return null;
-    }
-    return new HivePrincipal(principal.getName(),
-        AuthorizationUtils.getHivePrincipalType(principal.getType()));
-  }
-
-  private void throwNotFound(String objType, String objName) throws HiveException {
-    throw new HiveException(objType + " " + objName + " not found");
-  }
-
-  private int roleDDL(RoleDDLDesc roleDDLDesc) throws HiveException, IOException {
-    if(SessionState.get().isAuthorizationModeV2()){
-      return roleDDLV2(roleDDLDesc);
-    }
-
-    DataOutputStream outStream = null;
-    RoleDDLDesc.RoleOperation operation = roleDDLDesc.getOperation();
-    try {
-      if (operation.equals(RoleDDLDesc.RoleOperation.CREATE_ROLE)) {
-        db.createRole(roleDDLDesc.getName(), roleDDLDesc.getRoleOwnerName());
-      } else if (operation.equals(RoleDDLDesc.RoleOperation.DROP_ROLE)) {
-        db.dropRole(roleDDLDesc.getName());
-      } else if (operation.equals(RoleDDLDesc.RoleOperation.SHOW_ROLE_GRANT)) {
-        boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST);
-        List<RolePrincipalGrant> roleGrants = db.getRoleGrantInfoForPrincipal(roleDDLDesc.getName(), roleDDLDesc.getPrincipalType());
-        writeToFile(writeRoleGrantsInfo(roleGrants, testMode), roleDDLDesc.getResFile());
-      } else if (operation.equals(RoleDDLDesc.RoleOperation.SHOW_ROLES)) {
-        List<String> roleNames = db.getAllRoleNames();
-        //sort the list to get sorted (deterministic) output (for ease of testing)
-        Collections.sort(roleNames);
-        Path resFile = new Path(roleDDLDesc.getResFile());
-        FileSystem fs = resFile.getFileSystem(conf);
-        outStream = fs.create(resFile);
-        for (String roleName : roleNames) {
-          outStream.writeBytes(roleName);
-          outStream.write(terminator);
-        }
-        outStream.close();
-        outStream = null;
-      } else if (operation.equals(RoleDDLDesc.RoleOperation.SHOW_ROLE_PRINCIPALS)) {
-        throw new HiveException("Show role principals is not currently supported in "
-            + "authorization mode V1");
-      }
-      else {
-        throw new HiveException("Unkown role operation "
-            + operation.getOperationName());
-      }
-    } catch (HiveException e) {
-      console.printError("Error in role operation "
-          + operation.getOperationName() + " on role name "
-          + roleDDLDesc.getName() + ", error message " + e.getMessage());
-      return 1;
-    } catch (IOException e) {
-      LOG.info("role ddl exception: " + stringifyException(e));
-      return 1;
-    } finally {
-      IOUtils.closeStream(outStream);
-    }
-
-    return 0;
-  }
-
-  private int roleDDLV2(RoleDDLDesc roleDDLDesc) throws HiveException, IOException {
-    HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
+  private int roleDDL(RoleDDLDesc roleDDLDesc) throws Exception {
+    HiveAuthorizer authorizer = getSessionAuthorizer();
     RoleDDLDesc.RoleOperation operation = roleDDLDesc.getOperation();
     //call the appropriate hive authorizer function
     switch(operation){
@@ -1047,7 +650,7 @@ public class DDLTask extends Task<DDLWor
     case SHOW_ROLE_GRANT:
       boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST);
       List<HiveRoleGrant> roles = authorizer.getRoleGrantInfoForPrincipal(
-          new HivePrincipal(roleDDLDesc.getName(), getHivePrincipalType(roleDDLDesc.getPrincipalType())));
+          AuthorizationUtils.getHivePrincipal(roleDDLDesc.getName(), roleDDLDesc.getPrincipalType()));
       writeToFile(writeRolesGrantedInfo(roles, testMode), roleDDLDesc.getResFile());
       break;
     case SHOW_ROLES:
@@ -2802,7 +2405,7 @@ public class DDLTask extends Task<DDLWor
       LOG.warn("show function: " + stringifyException(e));
       return 1;
     } catch (Exception e) {
-      throw new HiveException(e.toString());
+      throw new HiveException(e.toString(), e);
     } finally {
       IOUtils.closeStream(outStream);
     }
@@ -3491,7 +3094,7 @@ public class DDLTask extends Task<DDLWor
           // when column name is specified in describe table DDL, colPath will
           // will be table_name.column_name
           String colName = colPath.split("\\.")[1];
-          String[] dbTab = splitTableName(tableName);
+          String[] dbTab = Utilities.getDbTableName(tableName);
           List<String> colNames = new ArrayList<String>();
           colNames.add(colName.toLowerCase());
           if (null == part) {
@@ -3541,28 +3144,41 @@ public class DDLTask extends Task<DDLWor
     }
   }
 
-  static String writeGrantInfo(List<HiveObjectPrivilege> privileges, boolean testMode) {
+  static String writeGrantInfo(List<HivePrivilegeInfo> privileges, boolean testMode) {
     if (privileges == null || privileges.isEmpty()) {
       return "";
     }
     StringBuilder builder = new StringBuilder();
     //sort the list to get sorted (deterministic) output (for ease of testing)
-    Collections.sort(privileges);
-
-    for (HiveObjectPrivilege privilege : privileges) {
-      HiveObjectRef resource = privilege.getHiveObject();
-      PrivilegeGrantInfo grantInfo = privilege.getGrantInfo();
-
-      appendNonNull(builder, resource.getDbName(), true);
-      appendNonNull(builder, resource.getObjectName());
-      appendNonNull(builder, resource.getPartValues());
-      appendNonNull(builder, resource.getColumnName());
-      appendNonNull(builder, privilege.getPrincipalName());
-      appendNonNull(builder, privilege.getPrincipalType());
-      appendNonNull(builder, grantInfo.getPrivilege());
-      appendNonNull(builder, grantInfo.isGrantOption());
-      appendNonNull(builder, testMode ? -1 : grantInfo.getCreateTime() * 1000L);
-      appendNonNull(builder, grantInfo.getGrantor());
+    Collections.sort(privileges, new Comparator<HivePrivilegeInfo>() {
+      @Override
+      public int compare(HivePrivilegeInfo o1, HivePrivilegeInfo o2) {
+        int compare = o1.getObject().compareTo(o2.getObject());
+        if (compare == 0) {
+          compare = o1.getPrincipal().compareTo(o2.getPrincipal());
+        }
+        if (compare == 0) {
+          compare = o1.getPrivilege().compareTo(o2.getPrivilege());
+        }
+        return compare;
+      }
+    });
+
+    for (HivePrivilegeInfo privilege : privileges) {
+      HivePrincipal principal = privilege.getPrincipal();
+      HivePrivilegeObject resource = privilege.getObject();
+      HivePrincipal grantor = privilege.getGrantorPrincipal();
+
+      appendNonNull(builder, resource.getDbname(), true);
+      appendNonNull(builder, resource.getTableViewURI());
+      appendNonNull(builder, resource.getPartKeys());
+      appendNonNull(builder, resource.getColumns());
+      appendNonNull(builder, principal.getName());
+      appendNonNull(builder, principal.getType());
+      appendNonNull(builder, privilege.getPrivilege().getName());
+      appendNonNull(builder, privilege.isGrantOption());
+      appendNonNull(builder, testMode ? -1 : privilege.getGrantTime() * 1000L);
+      appendNonNull(builder, grantor.getName());
     }
     return builder.toString();
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java?rev=1612646&r1=1612645&r2=1612646&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java Tue Jul 22 19:00:49 2014
@@ -12,10 +12,7 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.hive.common.FileUtils;
-import org.apache.hadoop.hive.conf.HiveConf;;
-import org.apache.hadoop.hive.metastore.HiveMetaHook;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaHookLoader;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java?rev=1612646&r1=1612645&r2=1612646&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java Tue Jul 22 19:00:49 2014
@@ -18,22 +18,32 @@
 package org.apache.hadoop.hive.ql.security.authorization;
 
 import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate;
+import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
 import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
 import org.apache.hadoop.hive.metastore.api.HiveObjectType;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.hooks.Entity;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.hooks.Entity.Type;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity.WriteType;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
+import org.apache.hadoop.hive.ql.plan.PrivilegeDesc;
+import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import org.apache.hadoop.hive.ql.session.SessionState;
+
+import java.util.ArrayList;
+import java.util.List;
 
 /**
  * Utility code shared by hive internal code and sql standard authorization plugin implementation
@@ -48,13 +58,19 @@ public class AuthorizationUtils {
    * @throws HiveException
    */
   public static HivePrincipalType getHivePrincipalType(PrincipalType type) throws HiveException {
+    if (type == null) {
+      return null;
+    }
     switch(type){
     case USER:
       return HivePrincipalType.USER;
     case ROLE:
       return HivePrincipalType.ROLE;
     case GROUP:
-      throw new HiveException(ErrorMsg.UNNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP);
+      if (SessionState.get().getAuthorizationMode() == SessionState.AuthorizationMode.V2) {
+        throw new HiveException(ErrorMsg.UNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP);
+      }
+      return HivePrincipalType.GROUP;
     default:
       //should not happen as we take care of all existing types
       throw new AssertionError("Unsupported authorization type specified");
@@ -68,6 +84,9 @@ public class AuthorizationUtils {
    * @return
    */
   public static HivePrivilegeObjectType getHivePrivilegeObjectType(Type type) {
+    if (type == null){
+      return null;
+    }
     switch(type){
     case DATABASE:
       return HivePrivilegeObjectType.DATABASE;
@@ -85,6 +104,95 @@ public class AuthorizationUtils {
     }
   }
 
+  public static HivePrivilegeObjectType getPrivObjectType(PrivilegeObjectDesc privSubjectDesc) {
+    if (privSubjectDesc.getObject() == null) {
+      return null;
+    }
+    return privSubjectDesc.getTable() ? HivePrivilegeObjectType.TABLE_OR_VIEW :
+        HivePrivilegeObjectType.DATABASE;
+  }
+
+  public static List<HivePrivilege> getHivePrivileges(List<PrivilegeDesc> privileges) {
+    List<HivePrivilege> hivePrivileges = new ArrayList<HivePrivilege>();
+    for(PrivilegeDesc privilege : privileges){
+      Privilege priv = privilege.getPrivilege();
+      hivePrivileges.add(
+          new HivePrivilege(priv.toString(), privilege.getColumns(), priv.getScopeList()));
+    }
+    return hivePrivileges;
+  }
+
+  public static List<HivePrincipal> getHivePrincipals(List<PrincipalDesc> principals)
+      throws HiveException {
+
+    ArrayList<HivePrincipal> hivePrincipals = new ArrayList<HivePrincipal>();
+    for(PrincipalDesc principal : principals){
+      hivePrincipals.add(getHivePrincipal(principal));
+    }
+    return hivePrincipals;
+  }
+
+  public static HivePrincipal getHivePrincipal(PrincipalDesc principal) throws HiveException {
+    if (principal == null) {
+      return null;
+    }
+    return getHivePrincipal(principal.getName(), principal.getType());
+  }
+
+  public static HivePrincipal getHivePrincipal(String name, PrincipalType type) throws HiveException {
+    return new HivePrincipal(name, AuthorizationUtils.getHivePrincipalType(type));
+  }
+
+  public static List<HivePrivilegeInfo> getPrivilegeInfos(List<HiveObjectPrivilege> privs)
+      throws HiveException {
+    List<HivePrivilegeInfo> hivePrivs = new ArrayList<HivePrivilegeInfo>();
+    for (HiveObjectPrivilege priv : privs) {
+      PrivilegeGrantInfo grantorInfo = priv.getGrantInfo();
+      HiveObjectRef privObject = priv.getHiveObject();
+      HivePrincipal hivePrincipal =
+          getHivePrincipal(priv.getPrincipalName(), priv.getPrincipalType());
+      HivePrincipal grantor =
+          getHivePrincipal(grantorInfo.getGrantor(), grantorInfo.getGrantorType());
+      HivePrivilegeObject object = getHiveObjectRef(privObject);
+      HivePrivilege privilege = new HivePrivilege(grantorInfo.getPrivilege(), null);
+      hivePrivs.add(new HivePrivilegeInfo(hivePrincipal, privilege, object, grantor,
+          grantorInfo.isGrantOption(), grantorInfo.getCreateTime()));
+    }
+    return hivePrivs;
+  }
+
+  public static HivePrivilegeObject getHiveObjectRef(HiveObjectRef privObj) throws HiveException {
+    if (privObj == null) {
+      return null;
+    }
+    HivePrivilegeObjectType objType = getHiveObjType(privObj.getObjectType());
+    return new HivePrivilegeObject(objType, privObj.getDbName(), privObj.getObjectName(),
+        privObj.getPartValues(), privObj.getColumnName());
+  }
+
+  public static HivePrivilegeObject getHivePrivilegeObject(
+      PrivilegeObjectDesc privSubjectDesc, List<String> columns) throws HiveException {
+
+    // null means ALL for show grants, GLOBAL for grant/revoke
+    HivePrivilegeObjectType objectType = null;
+
+    String[] dbTable;
+    List<String> partSpec = null;
+    if (privSubjectDesc == null) {
+      dbTable = new String[] {null, null};
+    } else {
+      if (privSubjectDesc.getTable()) {
+        dbTable = Utilities.getDbTableName(privSubjectDesc.getObject());
+      } else {
+        dbTable = new String[] {privSubjectDesc.getObject(), null};
+      }
+      if (privSubjectDesc.getPartSpec() != null) {
+        partSpec = new ArrayList<String>(privSubjectDesc.getPartSpec().values());
+      }
+      objectType = getPrivObjectType(privSubjectDesc);
+    }
+    return new HivePrivilegeObject(objectType, dbTable[0], dbTable[1], partSpec, columns, null);
+  }
 
   /**
    * Convert authorization plugin principal type to thrift principal type
@@ -99,6 +207,8 @@ public class AuthorizationUtils {
     switch(type){
     case USER:
       return PrincipalType.USER;
+    case GROUP:
+      return PrincipalType.GROUP;
     case ROLE:
       return PrincipalType.ROLE;
     default:
@@ -106,7 +216,6 @@ public class AuthorizationUtils {
     }
   }
 
-
   /**
    * Get thrift privilege grant info
    * @param privilege
@@ -134,12 +243,16 @@ public class AuthorizationUtils {
       return null;
     }
     switch(type){
+    case GLOBAL:
+      return HiveObjectType.GLOBAL;
     case DATABASE:
       return HiveObjectType.DATABASE;
     case TABLE_OR_VIEW:
       return HiveObjectType.TABLE;
     case PARTITION:
       return HiveObjectType.PARTITION;
+    case COLUMN:
+      return HiveObjectType.COLUMN;
     case LOCAL_URI:
     case DFS_URI:
       throw new HiveException("Unsupported type " + type);
@@ -149,6 +262,33 @@ public class AuthorizationUtils {
     }
   }
 
+  // V1 to V2 conversion.
+  private static HivePrivilegeObjectType getHiveObjType(HiveObjectType type) throws HiveException {
+    if (type == null) {
+      return null;
+    }
+    switch(type){
+      case GLOBAL:
+        if (SessionState.get().getAuthorizationMode() == SessionState.AuthorizationMode.V2) {
+          throw new HiveException(ErrorMsg.UNSUPPORTED_AUTHORIZATION_RESOURCE_TYPE_GLOBAL);
+        }
+        return HivePrivilegeObjectType.GLOBAL;
+      case DATABASE:
+        return HivePrivilegeObjectType.DATABASE;
+      case TABLE:
+        return HivePrivilegeObjectType.TABLE_OR_VIEW;
+      case PARTITION:
+        return HivePrivilegeObjectType.PARTITION;
+      case COLUMN:
+        if (SessionState.get().getAuthorizationMode() == SessionState.AuthorizationMode.V2) {
+          throw new HiveException(ErrorMsg.UNSUPPORTED_AUTHORIZATION_RESOURCE_TYPE_COLUMN);
+        }
+        return HivePrivilegeObjectType.COLUMN;
+      default:
+        //should not happen as we have accounted for all types
+        throw new AssertionError("Unsupported type " + type);
+    }
+  }
 
   /**
    * Convert thrift HiveObjectRef to plugin HivePrivilegeObject

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java?rev=1612646&r1=1612645&r2=1612646&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java Tue Jul 22 19:00:49 2014
@@ -18,7 +18,9 @@
 
 package org.apache.hadoop.hive.ql.security.authorization;
 
+import java.util.ArrayList;
 import java.util.EnumSet;
+import java.util.List;
 
 /**
  * Privilege defines a privilege in Hive. Each privilege has a name and scope associated with it.
@@ -65,6 +67,17 @@ public class Privilege {
         && supportedScopeSet.contains(PrivilegeScope.TABLE_LEVEL_SCOPE);
   }
 
+  public List<String> getScopeList() {
+    if (supportedScopeSet == null) {
+      return null;
+    }
+    List<String> scopes = new ArrayList<String>();
+    for (PrivilegeScope scope : supportedScopeSet) {
+      scopes.add(scope.name());
+    }
+    return scopes;
+  }
+
   @Override
   public String toString() {
     return this.getPriv().toString();

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java?rev=1612646&r1=1612645&r2=1612646&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java Tue Jul 22 19:00:49 2014
@@ -25,10 +25,19 @@ import org.apache.hadoop.hive.common.cla
  */
 @LimitedPrivate(value = { "" })
 @Evolving
-public class HivePrincipal {
+public class HivePrincipal implements Comparable<HivePrincipal> {
+
+  @Override
+  public int compareTo(HivePrincipal o) {
+    int compare = name.compareTo(o.name);
+    if (compare == 0) {
+      compare = type.compareTo(o.type);
+    }
+    return compare;
+  }
 
   public enum HivePrincipalType{
-    USER, ROLE, UNKNOWN
+    USER, GROUP, ROLE, UNKNOWN
   }
 
   @Override

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java?rev=1612646&r1=1612645&r2=1612646&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java Tue Jul 22 19:00:49 2014
@@ -22,13 +22,14 @@ import java.util.Locale;
 
 import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
 import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
+import org.apache.hadoop.hive.ql.security.authorization.PrivilegeScope;
 
 /**
  * Represents the hive privilege being granted/revoked
  */
 @LimitedPrivate(value = { "" })
 @Evolving
-public class HivePrivilege {
+public class HivePrivilege implements Comparable<HivePrivilege> {
   @Override
   public String toString() {
     return "Privilege [name=" + name + ", columns=" + columns + "]";
@@ -36,10 +37,16 @@ public class HivePrivilege {
 
   private final String name;
   private final List<String> columns;
+  private final List<String> supportedScope;
 
-  public HivePrivilege(String name, List<String> columns){
+  public HivePrivilege(String name, List<String> columns) {
+    this(name, columns, null);
+  }
+
+  public HivePrivilege(String name, List<String> columns, List<String> supportedScope) {
     this.name = name.toUpperCase(Locale.US);
     this.columns = columns;
+    this.supportedScope = supportedScope;
   }
 
   public String getName() {
@@ -50,6 +57,10 @@ public class HivePrivilege {
     return columns;
   }
 
+  public List<String> getSupportedScope() {
+    return supportedScope;
+  }
+
   @Override
   public int hashCode() {
     final int prime = 31;
@@ -82,5 +93,27 @@ public class HivePrivilege {
   }
 
 
+  public boolean supportsScope(PrivilegeScope scope) {
+    return supportedScope != null && supportedScope.contains(scope.name());
+  }
 
+  public int compareTo(HivePrivilege privilege) {
+    int compare = columns != null ?
+        (privilege.columns != null ? compare(columns, privilege.columns) : 1) :
+        (privilege.columns != null ? -1 : 0);
+    if (compare == 0) {
+      compare = name.compareTo(privilege.name);
+    }
+    return compare;
+  }
+
+  private int compare(List<String> o1, List<String> o2) {
+    for (int i = 0; i < Math.min(o1.size(), o2.size()); i++) {
+      int compare = o1.get(i).compareTo(o2.get(i));
+      if (compare != 0) {
+        return compare;
+      }
+    }
+    return o1.size() > o2.size() ? 1 : (o1.size() < o2.size() ? -1 : 0);
+  }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java?rev=1612646&r1=1612645&r2=1612646&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java Tue Jul 22 19:00:49 2014
@@ -21,14 +21,16 @@ import java.util.List;
 
 import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
 import org.apache.hadoop.hive.common.classification.InterfaceStability.Unstable;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+
+import java.util.ArrayList;
+import java.util.Arrays;
 
 /**
  * Represents the object on which privilege is being granted/revoked
  */
 @LimitedPrivate(value = { "" })
 @Unstable
-public class HivePrivilegeObject {
+public class HivePrivilegeObject implements Comparable<HivePrivilegeObject> {
 
   @Override
   public String toString() {
@@ -38,8 +40,13 @@ public class HivePrivilegeObject {
       name = dbname;
       break;
     case TABLE_OR_VIEW:
+    case PARTITION:
       name = (dbname == null ? "" : dbname + ".") + tableviewname;
+      if (partKeys != null) {
+        name += partKeys.toString();
+      }
       break;
+    case COLUMN:
     case LOCAL_URI:
     case DFS_URI:
       name = tableviewname;
@@ -47,33 +54,74 @@ public class HivePrivilegeObject {
     case COMMAND_PARAMS:
       name = commandParams.toString();
       break;
-    case PARTITION:
-      break;
     }
     return "Object [type=" + type + ", name=" + name + "]";
 
   }
 
-  public enum HivePrivilegeObjectType {
-    DATABASE, TABLE_OR_VIEW, PARTITION, LOCAL_URI, DFS_URI, COMMAND_PARAMS
-  };
+  @Override
+  public int compareTo(HivePrivilegeObject o) {
+    int compare = type.compareTo(o.type);
+    if (compare == 0) {
+      compare = dbname.compareTo(o.dbname);
+    }
+    if (compare == 0) {
+      compare = tableviewname != null ?
+          (o.tableviewname != null ? tableviewname.compareTo(o.tableviewname) : 1) :
+          (o.tableviewname != null ? -1 : 0);
+    }
+    if (compare == 0) {
+      compare = partKeys != null ?
+          (o.partKeys != null ? compare(partKeys, o.partKeys) : 1) :
+          (o.partKeys != null ? -1 : 0);
+    }
+    if (compare == 0) {
+      compare = columns != null ?
+          (o.columns != null ? compare(columns, o.columns) : 1) :
+          (o.columns != null ? -1 : 0);
+    }
+    return compare;
+  }
+
+  private int compare(List<String> o1, List<String> o2) {
+    for (int i = 0; i < Math.min(o1.size(), o2.size()); i++) {
+      int compare = o1.get(i).compareTo(o2.get(i));
+      if (compare != 0) {
+        return compare;
+      }
+    }
+    return o1.size() > o2.size() ? 1 : (o1.size() < o2.size() ? -1 : 0);
+  }
 
+  public enum HivePrivilegeObjectType {
+    GLOBAL, DATABASE, TABLE_OR_VIEW, PARTITION, COLUMN, LOCAL_URI, DFS_URI, COMMAND_PARAMS
+  } ;
   public enum HivePrivObjectActionType {
     OTHER, INSERT, INSERT_OVERWRITE
   };
+
   private final HivePrivilegeObjectType type;
   private final String dbname;
   private final String tableviewname;
   private final List<String> commandParams;
+  private final List<String> partKeys;
+  private final List<String> columns;
   private final HivePrivObjectActionType actionType;
 
-  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableViewURI){
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableViewURI) {
     this(type, dbname, tableViewURI, HivePrivObjectActionType.OTHER);
   }
 
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableViewURI
+      , HivePrivObjectActionType actionType) {
+    this(type, dbname, tableViewURI, null, null, actionType, null);
+  }
+
   public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableViewURI,
-      HivePrivObjectActionType actionType) {
-    this(type, dbname, tableViewURI, actionType, null);
+      List<String> partKeys, String column) {
+    this(type, dbname, tableViewURI, partKeys,
+        column == null ? null : new ArrayList<String>(Arrays.asList(column)),
+        HivePrivObjectActionType.OTHER, null);
   }
 
   /**
@@ -82,15 +130,23 @@ public class HivePrivilegeObject {
    * @return
    */
   public static HivePrivilegeObject createHivePrivilegeObject(List<String> cmdParams) {
-    return new HivePrivilegeObject(HivePrivilegeObjectType.COMMAND_PARAMS, null, null, null,
+    return new HivePrivilegeObject(HivePrivilegeObjectType.COMMAND_PARAMS, null, null, null, null,
         cmdParams);
   }
 
   public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableViewURI,
-      HivePrivObjectActionType actionType, List<String> commandParams) {
+    List<String> partKeys, List<String> columns, List<String> commandParams) {
+    this(type, dbname, tableViewURI, partKeys, columns, HivePrivObjectActionType.OTHER, commandParams);
+  }
+
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableViewURI,
+      List<String> partKeys, List<String> columns, HivePrivObjectActionType actionType,
+      List<String> commandParams) {
     this.type = type;
     this.dbname = dbname;
     this.tableviewname = tableViewURI;
+    this.partKeys = partKeys;
+    this.columns = columns;
     this.actionType = actionType;
     this.commandParams = commandParams;
   }
@@ -114,4 +170,12 @@ public class HivePrivilegeObject {
   public List<String> getCommandParams() {
     return commandParams;
   }
+
+  public List<String> getPartKeys() {
+    return partKeys;
+  }
+
+  public List<String> getColumns() {
+    return columns;
+  }
 }

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java?rev=1612646&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java Tue Jul 22 19:00:49 2014
@@ -0,0 +1,374 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security.authorization.plugin;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
+import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
+import org.apache.hadoop.hive.metastore.api.HiveObjectType;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
+import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
+import org.apache.hadoop.hive.metastore.api.Role;
+import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
+import org.apache.hadoop.hive.ql.security.authorization.PrivilegeScope;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAccessController;
+import org.apache.hadoop.hive.ql.session.SessionState;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+public class HiveV1Authorizer implements HiveAuthorizer {
+
+  private final HiveConf conf;
+  private final Hive hive;
+
+  public HiveV1Authorizer(HiveConf conf, Hive hive) {
+    this.conf = conf;
+    this.hive = hive;
+  }
+
+  @Override
+  public VERSION getVersion() {
+    return VERSION.V1;
+  }
+
+  @Override
+  public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputsHObjs,
+      List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context)
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    throw new UnsupportedOperationException("Should not be called for v1 authorizer");
+  }
+
+  @Override
+  public void grantPrivileges(
+      List<HivePrincipal> principals, List<HivePrivilege> privileges, HivePrivilegeObject privObject,
+      HivePrincipal grantor, boolean grantOption)
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    try {
+      PrivilegeBag privBag = toPrivilegeBag(privileges, privObject, grantor, grantOption);
+      grantOrRevokePrivs(principals, privBag, true, grantOption);
+    } catch (Exception e) {
+      throw new HiveAuthzPluginException(e);
+    }
+  }
+
+  @Override
+  public void revokePrivileges(
+      List<HivePrincipal> principals, List<HivePrivilege> privileges, HivePrivilegeObject privObject,
+      HivePrincipal grantor, boolean grantOption)
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    try {
+      PrivilegeBag privBag = toPrivilegeBag(privileges, privObject, grantor, grantOption);
+      grantOrRevokePrivs(principals, privBag, false, grantOption);
+    } catch (Exception e) {
+      throw new HiveAuthzPluginException(e);
+    }
+  }
+
+  private void grantOrRevokePrivs(List<HivePrincipal> principals, PrivilegeBag privBag,
+      boolean isGrant, boolean grantOption) throws HiveException {
+    for (HivePrincipal principal : principals) {
+      PrincipalType type = AuthorizationUtils.getThriftPrincipalType(principal.getType());
+      for (HiveObjectPrivilege priv : privBag.getPrivileges()) {
+        priv.setPrincipalName(principal.getName());
+        priv.setPrincipalType(type);
+      }
+      if (isGrant) {
+        hive.grantPrivileges(privBag);
+      } else {
+        hive.revokePrivileges(privBag, grantOption);
+      }
+    }
+  }
+
+  private PrivilegeBag toPrivilegeBag(List<HivePrivilege> privileges,
+      HivePrivilegeObject privObject, HivePrincipal grantor, boolean grantOption)
+      throws HiveException {
+
+    PrivilegeBag privBag = new PrivilegeBag();
+    if (privileges.isEmpty()) {
+      return privBag;
+    }
+    String grantorName = grantor.getName();
+    PrincipalType grantorType = AuthorizationUtils.getThriftPrincipalType(grantor.getType());
+    if (privObject.getType() == null ||
+        privObject.getType() == HivePrivilegeObject.HivePrivilegeObjectType.GLOBAL) {
+      for (HivePrivilege priv : privileges) {
+        List<String> columns = priv.getColumns();
+        if (columns != null && !columns.isEmpty()) {
+          throw new HiveException(
+              "For user-level privileges, column sets should be null. columns=" +
+                  columns.toString());
+        }
+        privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(
+            HiveObjectType.GLOBAL, null, null, null, null), null, null,
+            new PrivilegeGrantInfo(priv.getName(), 0, grantor.getName(), grantorType,
+                grantOption)));
+      }
+      return privBag;
+    }
+
+    if (privObject.getPartKeys() != null && grantOption) {
+      throw new HiveException("Grant does not support partition level.");
+    }
+    Database dbObj = hive.getDatabase(privObject.getDbname());
+    if (dbObj == null) {
+      throw new HiveException("Database " + privObject.getDbname() + " does not exists");
+    }
+    Table tableObj = null;
+    if (privObject.getTableViewURI() != null) {
+      tableObj = hive.getTable(dbObj.getName(), privObject.getTableViewURI());
+    }
+
+    List<String> partValues = null;
+    if (tableObj != null) {
+      if ((!tableObj.isPartitioned())
+          && privObject.getPartKeys() != null) {
+        throw new HiveException(
+            "Table is not partitioned, but partition name is present: partSpec="
+                + privObject.getPartKeys());
+      }
+
+      if (privObject.getPartKeys() != null) {
+        Map<String, String> partSpec =
+            Warehouse.makeSpecFromValues(tableObj.getPartitionKeys(), privObject.getPartKeys());
+        Partition partObj = hive.getPartition(tableObj, partSpec, false).getTPartition();
+        partValues = partObj.getValues();
+      }
+    }
+
+    for (HivePrivilege priv : privileges) {
+      List<String> columns = priv.getColumns();
+      if (columns != null && !columns.isEmpty()) {
+        if (!priv.supportsScope(PrivilegeScope.COLUMN_LEVEL_SCOPE)) {
+          throw new HiveException(priv.getName() + " does not support column level privilege.");
+        }
+        if (tableObj == null) {
+          throw new HiveException(
+              "For user-level/database-level privileges, column sets should be null. columns="
+                  + columns);
+        }
+        for (int i = 0; i < columns.size(); i++) {
+          privBag.addToPrivileges(new HiveObjectPrivilege(
+              new HiveObjectRef(HiveObjectType.COLUMN, dbObj.getName(), tableObj.getTableName(),
+                  partValues, columns.get(i)), null, null,
+              new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption)));
+        }
+      } else if (tableObj == null) {
+        privBag.addToPrivileges(new HiveObjectPrivilege(
+            new HiveObjectRef(HiveObjectType.DATABASE, dbObj.getName(), null,
+                null, null), null, null,
+            new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption)));
+      } else if (partValues == null) {
+        privBag.addToPrivileges(new HiveObjectPrivilege(
+            new HiveObjectRef(HiveObjectType.TABLE, dbObj.getName(), tableObj.getTableName(),
+                null, null), null, null,
+            new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption)));
+      } else {
+        privBag.addToPrivileges(new HiveObjectPrivilege(
+            new HiveObjectRef(HiveObjectType.PARTITION, dbObj.getName(), tableObj.getTableName(),
+                partValues, null), null, null,
+            new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption)));
+      }
+    }
+    return privBag;
+  }
+
+  @Override
+  public void createRole(String roleName, HivePrincipal adminGrantor) throws HiveAuthzPluginException, HiveAccessControlException {
+    try {
+      hive.createRole(roleName, adminGrantor == null ? null : adminGrantor.getName());
+    } catch (HiveException e) {
+      throw new HiveAuthzPluginException(e);
+    }
+  }
+
+  @Override
+  public void dropRole(String roleName) throws HiveAuthzPluginException, HiveAccessControlException {
+    try {
+      hive.dropRole(roleName);
+    } catch (HiveException e) {
+      throw new HiveAuthzPluginException(e);
+    }
+  }
+
+  @Override
+  public List<HiveRoleGrant> getPrincipalGrantInfoForRole(String roleName) throws HiveAuthzPluginException, HiveAccessControlException {
+    try {
+      return SQLStdHiveAccessController.getHiveRoleGrants(hive.getMSC(), roleName);
+    } catch (Exception e) {
+      throw new HiveAuthzPluginException(e);
+    }
+  }
+
+  @Override
+  public List<HiveRoleGrant> getRoleGrantInfoForPrincipal(HivePrincipal principal) throws HiveAuthzPluginException, HiveAccessControlException {
+    PrincipalType type = AuthorizationUtils.getThriftPrincipalType(principal.getType());
+    try {
+      List<HiveRoleGrant> grants = new ArrayList<HiveRoleGrant>();
+      for (RolePrincipalGrant grant : hive.getRoleGrantInfoForPrincipal(principal.getName(), type)) {
+        grants.add(new HiveRoleGrant(grant));
+      }
+      return grants;
+    } catch (HiveException e) {
+      throw new HiveAuthzPluginException(e);
+    }
+  }
+
+  @Override
+  public void grantRole(List<HivePrincipal> principals, List<String> roles, boolean grantOption,
+      HivePrincipal grantor) throws HiveAuthzPluginException, HiveAccessControlException {
+    try {
+      grantOrRevokeRole(principals, roles, grantOption, grantor, true);
+    } catch (HiveException e) {
+      throw new HiveAuthzPluginException(e);
+    }
+  }
+
+  @Override
+  public void revokeRole(List<HivePrincipal> principals, List<String> roles, boolean grantOption,
+      HivePrincipal grantor) throws HiveAuthzPluginException, HiveAccessControlException {
+    try {
+      grantOrRevokeRole(principals, roles, grantOption, grantor, false);
+    } catch (HiveException e) {
+      throw new HiveAuthzPluginException(e);
+    }
+  }
+
+  private void grantOrRevokeRole(List<HivePrincipal> principals, List<String> roles,
+      boolean grantOption, HivePrincipal grantor, boolean isGrant) throws HiveException {
+    PrincipalType grantorType = AuthorizationUtils.getThriftPrincipalType(grantor.getType());
+    for (HivePrincipal principal : principals) {
+      PrincipalType principalType = AuthorizationUtils.getThriftPrincipalType(principal.getType());
+      String userName = principal.getName();
+      for (String roleName : roles) {
+        if (isGrant) {
+          hive.grantRole(roleName, userName, principalType,
+              grantor.getName(), grantorType, grantOption);
+        } else {
+          hive.revokeRole(roleName, userName, principalType, grantOption);
+        }
+      }
+    }
+  }
+
+  @Override
+  public List<String> getAllRoles() throws HiveAuthzPluginException, HiveAccessControlException {
+    try {
+      return hive.getAllRoleNames();
+    } catch (HiveException e) {
+      throw new HiveAuthzPluginException(e);
+    }
+  }
+
+  @Override
+  public List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj)
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    String name = principal == null ? null : principal.getName();
+    PrincipalType type =
+        AuthorizationUtils.getThriftPrincipalType(principal == null ? null : principal.getType());
+
+    List<HiveObjectPrivilege> privs = new ArrayList<HiveObjectPrivilege>();
+    try {
+      if (privObj == null) {
+        // show user level privileges
+        privs.addAll(hive.showPrivilegeGrant(HiveObjectType.GLOBAL, name, type,
+            null, null, null, null));
+      } else if (privObj.getDbname() == null) {
+        // show all privileges
+        privs.addAll(hive.showPrivilegeGrant(null, name, type, null, null, null, null));
+      } else {
+        Database dbObj = hive.getDatabase(privObj.getDbname());;
+        if (dbObj == null) {
+          throw new HiveException("Database " + privObj.getDbname() + " does not exists");
+        }
+        Table tableObj = null;
+        if (privObj.getTableViewURI() != null) {
+          tableObj = hive.getTable(dbObj.getName(), privObj.getTableViewURI());
+        }
+        List<String> partValues = privObj.getPartKeys();
+
+        if (tableObj == null) {
+          // show database level privileges
+          privs.addAll(hive.showPrivilegeGrant(HiveObjectType.DATABASE,
+              name, type, dbObj.getName(), null, null, null));
+        } else {
+          List<String> columns = privObj.getColumns();
+          if (columns != null && !columns.isEmpty()) {
+            // show column level privileges
+            for (String columnName : columns) {
+              privs.addAll(hive.showPrivilegeGrant(HiveObjectType.COLUMN, name, type,
+                  dbObj.getName(), tableObj.getTableName(), partValues, columnName));
+            }
+          } else if (partValues == null) {
+            // show table level privileges
+            privs.addAll(hive.showPrivilegeGrant(HiveObjectType.TABLE, name, type,
+                dbObj.getName(), tableObj.getTableName(), null, null));
+          } else {
+            // show partition level privileges
+            privs.addAll(hive.showPrivilegeGrant(HiveObjectType.PARTITION, name, type,
+                dbObj.getName(), tableObj.getTableName(), partValues, null));
+          }
+        }
+      }
+      return AuthorizationUtils.getPrivilegeInfos(privs);
+    } catch (Exception ex) {
+      throw new HiveAuthzPluginException(ex);
+    }
+  }
+
+  @Override
+  public void setCurrentRole(String roleName) throws HiveAccessControlException, HiveAuthzPluginException {
+    throw new HiveAuthzPluginException("Unsupported operation 'setCurrentRole' for V1 auth");
+  }
+
+  @Override
+  public List<String> getCurrentRoleNames() throws HiveAuthzPluginException {
+
+    String userName = SessionState.get().getUserName();
+    if (userName == null) {
+      userName = SessionState.getUserFromAuthenticator();
+    }
+    if (userName == null) {
+      throw new HiveAuthzPluginException("Cannot resolve current user name");
+    }
+    try {
+      List<String> roleNames = new ArrayList<String>();
+      for (Role role : hive.listRoles(userName, PrincipalType.USER)) {
+        roleNames.add(role.getRoleName());
+      }
+      return roleNames;
+    } catch (HiveException e) {
+      throw new HiveAuthzPluginException(e);
+    }
+  }
+
+  @Override
+  public void applyAuthorizationConfigPolicy(HiveConf hiveConf) {
+  }
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java?rev=1612646&r1=1612645&r2=1612646&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java Tue Jul 22 19:00:49 2014
@@ -186,8 +186,12 @@ public class SQLAuthorizationUtils {
     // get privileges for this user and its role on this object
     PrincipalPrivilegeSet thrifPrivs = null;
     try {
+      HiveObjectRef objectRef = AuthorizationUtils.getThriftHiveObjectRef(hivePrivObject);
+      if (objectRef.getObjectType() == null) {
+        objectRef.setObjectType(HiveObjectType.GLOBAL);
+      }
       thrifPrivs = metastoreClient.get_privilege_set(
-          AuthorizationUtils.getThriftHiveObjectRef(hivePrivObject), userName, null);
+          objectRef, userName, null);
     } catch (MetaException e) {
       throwGetPrivErr(e, hivePrivObject, userName);
     } catch (TException e) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java?rev=1612646&r1=1612645&r2=1612646&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java Tue Jul 22 19:00:49 2014
@@ -350,19 +350,24 @@ public class SQLStdHiveAccessController 
         + " allowed get principals in a role. " + ADMIN_ONLY_MSG);
     }
     try {
-      GetPrincipalsInRoleResponse princGrantInfo =
-          metastoreClientFactory.getHiveMetastoreClient().get_principals_in_role(new GetPrincipalsInRoleRequest(roleName));
-
-      List<HiveRoleGrant> hiveRoleGrants = new ArrayList<HiveRoleGrant>();
-      for(RolePrincipalGrant thriftRoleGrant :  princGrantInfo.getPrincipalGrants()){
-        hiveRoleGrants.add(new HiveRoleGrant(thriftRoleGrant));
-      }
-      return hiveRoleGrants;
+      return getHiveRoleGrants(metastoreClientFactory.getHiveMetastoreClient(), roleName);
     } catch (Exception e) {
       throw new HiveAuthzPluginException("Error getting principals for all roles", e);
     }
   }
 
+  public static List<HiveRoleGrant> getHiveRoleGrants(IMetaStoreClient client, String roleName)
+      throws Exception {
+    GetPrincipalsInRoleRequest request = new GetPrincipalsInRoleRequest(roleName);
+    GetPrincipalsInRoleResponse princGrantInfo = client.get_principals_in_role(request);
+
+    List<HiveRoleGrant> hiveRoleGrants = new ArrayList<HiveRoleGrant>();
+    for(RolePrincipalGrant thriftRoleGrant :  princGrantInfo.getPrincipalGrants()){
+      hiveRoleGrants.add(new HiveRoleGrant(thriftRoleGrant));
+    }
+    return hiveRoleGrants;
+  }
+
   @Override
   public List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj)
       throws HiveAuthzPluginException {
@@ -416,7 +421,7 @@ public class SQLStdHiveAccessController 
 
         HivePrivilegeObject resPrivObj = new HivePrivilegeObject(
             getPluginObjType(msObjRef.getObjectType()), msObjRef.getDbName(),
-            msObjRef.getObjectName());
+            msObjRef.getObjectName(), msObjRef.getPartValues(), msObjRef.getColumnName());
 
         // result grantor principal
         HivePrincipal grantorPrincipal = new HivePrincipal(msGrantInfo.getGrantor(),

Added: hive/trunk/ql/src/test/queries/clientpositive/authorization_show_role_principals_v1.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/authorization_show_role_principals_v1.q?rev=1612646&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/authorization_show_role_principals_v1.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/authorization_show_role_principals_v1.q Tue Jul 22 19:00:49 2014
@@ -0,0 +1,6 @@
+create role role1;
+grant role1 to user user1 with admin option;
+grant role1 to user user2 with admin option;
+show role grant user user1;
+show role grant user user2;
+show principals role1;

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_caseinsensitivity.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_caseinsensitivity.q.out?rev=1612646&r1=1612645&r2=1612646&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_caseinsensitivity.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_caseinsensitivity.q.out Tue Jul 22 19:00:49 2014
@@ -59,4 +59,4 @@ testrole
 
 PREHOOK: query: create role TESTRoLE
 PREHOOK: type: CREATEROLE
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidObjectException(message:Role testrole already exists.)
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Role testrole already exists.

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_fail_1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_fail_1.q.out?rev=1612646&r1=1612645&r2=1612646&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_fail_1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_fail_1.q.out Tue Jul 22 19:00:49 2014
@@ -14,5 +14,4 @@ POSTHOOK: Output: default@authorization_
 PREHOOK: query: grant Create on table authorization_fail_1 to user hive_test_user
 PREHOOK: type: GRANT_PRIVILEGE
 PREHOOK: Output: default@authorization_fail_1
-Error: java.lang.RuntimeException: InvalidObjectException(message:Create is already granted on table [default,authorization_fail_1] by hive_test_user)
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: InvalidObjectException(message:CREATE is already granted on table [default,authorization_fail_1] by hive_test_user)

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_fail_3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_fail_3.q.out?rev=1612646&r1=1612645&r2=1612646&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_fail_3.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_fail_3.q.out Tue Jul 22 19:00:49 2014
@@ -26,7 +26,7 @@ PREHOOK: query: show grant user hive_tes
 PREHOOK: type: SHOW_GRANT
 POSTHOOK: query: show grant user hive_test_user on table authorization_fail_3
 POSTHOOK: type: SHOW_GRANT
-default	authorization_fail_3			hive_test_user	USER	Create	false	-1	hive_test_user
+default	authorization_fail_3			hive_test_user	USER	CREATE	false	-1	hive_test_user
 PREHOOK: query: show grant user hive_test_user on table authorization_fail_3 partition (ds='2010')
 PREHOOK: type: SHOW_GRANT
 POSTHOOK: query: show grant user hive_test_user on table authorization_fail_3 partition (ds='2010')

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_fail_4.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_fail_4.q.out?rev=1612646&r1=1612645&r2=1612646&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_fail_4.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_fail_4.q.out Tue Jul 22 19:00:49 2014
@@ -40,12 +40,12 @@ PREHOOK: query: show grant user hive_tes
 PREHOOK: type: SHOW_GRANT
 POSTHOOK: query: show grant user hive_test_user on table authorization_fail_4
 POSTHOOK: type: SHOW_GRANT
-default	authorization_fail_4			hive_test_user	USER	Alter	false	-1	hive_test_user
-default	authorization_fail_4			hive_test_user	USER	Create	false	-1	hive_test_user
+default	authorization_fail_4			hive_test_user	USER	ALTER	false	-1	hive_test_user
+default	authorization_fail_4			hive_test_user	USER	CREATE	false	-1	hive_test_user
 PREHOOK: query: show grant user hive_test_user on table authorization_fail_4 partition (ds='2010')
 PREHOOK: type: SHOW_GRANT
 POSTHOOK: query: show grant user hive_test_user on table authorization_fail_4 partition (ds='2010')
 POSTHOOK: type: SHOW_GRANT
-default	authorization_fail_4	[2010]		hive_test_user	USER	Alter	false	-1	hive_test_user
-default	authorization_fail_4	[2010]		hive_test_user	USER	Create	false	-1	hive_test_user
+default	authorization_fail_4	[2010]		hive_test_user	USER	ALTER	false	-1	hive_test_user
+default	authorization_fail_4	[2010]		hive_test_user	USER	CREATE	false	-1	hive_test_user
 Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_fail_4, partitionName:ds=2010, columnName:key}. Use SHOW GRANT to get more details.



Mime
View raw message