hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From khorg...@apache.org
Subject svn commit: r1607753 - in /hive/trunk: conf/ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/ itests/util/src/main/java/org/apache/hadoop/hive/ql/security/ metastore/src...
Date Thu, 03 Jul 2014 21:43:21 GMT
Author: khorgath
Date: Thu Jul  3 21:43:20 2014
New Revision: 1607753

URL: http://svn.apache.org/r1607753
Log:
HIVE-7209 : allow metastore authorization api calls to be restricted to certain invokers (Thejas Nair via Sushanth Sowmyan, Ashutosh Chauhan)

Added:
    hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthorizationApiAuthorizer.java
    hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInEmbed.java
    hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInRemote.java
    hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreAuthorizationCallEvent.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/MetaStoreAuthzAPIAuthorizerEmbedOnly.java
Modified:
    hive/trunk/conf/hive-default.xml.template
    hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java
    hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreEventContext.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java

Modified: hive/trunk/conf/hive-default.xml.template
URL: http://svn.apache.org/viewvc/hive/trunk/conf/hive-default.xml.template?rev=1607753&r1=1607752&r2=1607753&view=diff
==============================================================================
--- hive/trunk/conf/hive-default.xml.template (original)
+++ hive/trunk/conf/hive-default.xml.template Thu Jul  3 21:43:20 2014
@@ -1624,8 +1624,9 @@
 <property>
   <name>hive.security.metastore.authorization.manager</name>
   <value>org.apache.hadoop.hive.ql.security.authorization.DefaultHiveMetastoreAuthorizationProvider</value>
-  <description>authorization manager class name to be used in the metastore for authorization.
-  The user defined authorization class should implement interface org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider. 
+  <description>Names of authorization manager classes (comma separated) to be used in the metastore for authorization.
+  The user defined authorization class should implement interface org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider.
+  All authorization manager classes have to successfully authorize the metastore api call for the command execution to be allowed.
   </description>
 </property>
 

Added: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthorizationApiAuthorizer.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthorizationApiAuthorizer.java?rev=1607753&view=auto
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthorizationApiAuthorizer.java (added)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthorizationApiAuthorizer.java Thu Jul  3 21:43:20 2014
@@ -0,0 +1,211 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.util.ArrayList;
+
+import org.apache.commons.lang3.exception.ExceptionUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
+import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
+import org.apache.hadoop.hive.metastore.api.Role;
+import org.apache.hadoop.hive.ql.security.authorization.MetaStoreAuthzAPIAuthorizerEmbedOnly;
+import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.junit.Test;
+
+/**
+ * Test case for {@link MetaStoreAuthzAPIAuthorizerEmbedOnly} The authorizer is
+ * supposed to allow api calls for metastore in embedded mode while disallowing
+ * them in remote metastore mode. Note that this is an abstract class, the
+ * subclasses that set the mode and the tests here get run as part of their
+ * testing.
+ */
+public abstract class TestAuthorizationApiAuthorizer {
+  protected static boolean isRemoteMetastoreMode;
+  private static HiveConf hiveConf;
+  private static HiveMetaStoreClient msc;
+
+  protected static void setup() throws Exception {
+    System.err.println("Running with remoteMode = " + isRemoteMetastoreMode);
+    System.setProperty("hive.metastore.pre.event.listeners",
+        AuthorizationPreEventListener.class.getName());
+    System.setProperty("hive.security.metastore.authorization.manager",
+        MetaStoreAuthzAPIAuthorizerEmbedOnly.class.getName());
+
+    hiveConf = new HiveConf();
+    if (isRemoteMetastoreMode) {
+      int port = MetaStoreUtils.findFreePort();
+      MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
+      hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
+    }
+    hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
+    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
+    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
+    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+
+    msc = new HiveMetaStoreClient(hiveConf, null);
+
+  }
+
+  interface FunctionInvoker {
+    public void invoke() throws Exception;
+  }
+
+  /**
+   * Test the if authorization failed/passed for FunctionInvoker that invokes a metastore client
+   * api call
+   * @param mscFunctionInvoker
+   * @throws Exception
+   */
+  private void testFunction(FunctionInvoker mscFunctionInvoker) throws Exception {
+    boolean caughtEx = false;
+    try {
+      try {
+        mscFunctionInvoker.invoke();
+      } catch (RuntimeException e) {
+        // A hack to verify that authorization check passed. Exception can be thrown be cause
+        // the functions are not being called with valid params.
+        // verify that exception has come from ObjectStore code, which means that the
+        // authorization checks passed.
+        String exStackString = ExceptionUtils.getStackTrace(e);
+        assertTrue("Verifying this exception came after authorization check",
+            exStackString.contains("org.apache.hadoop.hive.metastore.ObjectStore"));
+        // If its not an exception caused by auth check, ignore it
+      }
+      assertFalse("Authz Exception should have been thrown in remote mode", isRemoteMetastoreMode);
+      System.err.println("No auth exception thrown");
+    } catch (MetaException e) {
+      System.err.println("Caught exception");
+      caughtEx = true;
+      assertTrue(e.getMessage().contains(MetaStoreAuthzAPIAuthorizerEmbedOnly.errMsg));
+    }
+    if (!isRemoteMetastoreMode) {
+      assertFalse("No exception should be thrown in embedded mode", caughtEx);
+    }
+  }
+
+  @Test
+  public void testGrantPriv() throws Exception {
+    FunctionInvoker invoker = new FunctionInvoker() {
+      @Override
+      public void invoke() throws Exception {
+        msc.grant_privileges(new PrivilegeBag(new ArrayList<HiveObjectPrivilege>()));
+      }
+    };
+    testFunction(invoker);
+  }
+
+  @Test
+  public void testRevokePriv() throws Exception {
+    FunctionInvoker invoker = new FunctionInvoker() {
+      @Override
+      public void invoke() throws Exception {
+        msc.revoke_privileges(new PrivilegeBag(new ArrayList<HiveObjectPrivilege>()));
+      }
+    };
+    testFunction(invoker);
+  }
+
+  @Test
+  public void testGrantRole() throws Exception {
+    FunctionInvoker invoker = new FunctionInvoker() {
+      @Override
+      public void invoke() throws Exception {
+        msc.grant_role(null, null, null, null, null, true);
+      }
+    };
+    testFunction(invoker);
+  }
+
+  @Test
+  public void testRevokeRole() throws Exception {
+    FunctionInvoker invoker = new FunctionInvoker() {
+      @Override
+      public void invoke() throws Exception {
+        msc.revoke_role(null, null, null);
+      }
+    };
+    testFunction(invoker);
+  }
+
+  @Test
+  public void testCreateRole() throws Exception {
+    FunctionInvoker invoker = new FunctionInvoker() {
+      @Override
+      public void invoke() throws Exception {
+        msc.create_role(new Role());
+      }
+    };
+    testFunction(invoker);
+  }
+
+  @Test
+  public void testDropRole() throws Exception {
+    FunctionInvoker invoker = new FunctionInvoker() {
+      @Override
+      public void invoke() throws Exception {
+        msc.drop_role(null);
+      }
+    };
+    testFunction(invoker);
+  }
+
+  @Test
+  public void testListRoles() throws Exception {
+    FunctionInvoker invoker = new FunctionInvoker() {
+      @Override
+      public void invoke() throws Exception {
+        msc.list_roles(null, null);
+      }
+    };
+    testFunction(invoker);
+  }
+
+  @Test
+  public void testGetPrivSet() throws Exception {
+    FunctionInvoker invoker = new FunctionInvoker() {
+      @Override
+      public void invoke() throws Exception {
+        msc.get_privilege_set(new HiveObjectRef(), null, new ArrayList<String>());
+      }
+    };
+    testFunction(invoker);
+  }
+
+  @Test
+  public void testListPriv() throws Exception {
+    FunctionInvoker invoker = new FunctionInvoker() {
+      @Override
+      public void invoke() throws Exception {
+        msc.list_privileges(null, PrincipalType.USER, new HiveObjectRef());
+      }
+    };
+    testFunction(invoker);
+  }
+
+
+
+}

Added: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInEmbed.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInEmbed.java?rev=1607753&view=auto
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInEmbed.java (added)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInEmbed.java Thu Jul  3 21:43:20 2014
@@ -0,0 +1,16 @@
+package org.apache.hadoop.hive.metastore;
+
+import org.junit.BeforeClass;
+
+/**
+ * Test {@link TestAuthorizationApiAuthorizer} in embedded mode of metastore
+ */
+public class TestAuthzApiEmbedAuthorizerInEmbed extends TestAuthorizationApiAuthorizer {
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    isRemoteMetastoreMode = false; // embedded metastore mode
+    TestAuthorizationApiAuthorizer.setup();
+  }
+
+}

Added: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInRemote.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInRemote.java?rev=1607753&view=auto
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInRemote.java (added)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInRemote.java Thu Jul  3 21:43:20 2014
@@ -0,0 +1,16 @@
+package org.apache.hadoop.hive.metastore;
+
+import org.junit.BeforeClass;
+
+/**
+ * Test {@link TestAuthorizationApiAuthorizer} in remote mode of metastore
+ */
+public class TestAuthzApiEmbedAuthorizerInRemote extends TestAuthorizationApiAuthorizer {
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    isRemoteMetastoreMode = true; // remote metastore mode
+    TestAuthorizationApiAuthorizer.setup();
+  }
+
+}

Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java?rev=1607753&r1=1607752&r2=1607753&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java Thu Jul  3 21:43:20 2014
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.hive.ql.security;
 
-import java.io.IOException;
-import java.net.ServerSocket;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -81,14 +79,6 @@ public class TestAuthorizationPreEventLi
     driver = new Driver(clientHiveConf);
   }
 
-  private static String getFreeAvailablePort() throws IOException {
-    ServerSocket socket = new ServerSocket(0);
-    socket.setReuseAddress(true);
-    int port = socket.getLocalPort();
-    socket.close();
-    return "" + port;
-  }
-
   @Override
   protected void tearDown() throws Exception {
     super.tearDown();

Added: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java?rev=1607753&view=auto
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java (added)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java Thu Jul  3 21:43:20 2014
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.cli.CliSessionState;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.security.DummyHiveMetastoreAuthorizationProvider.AuthCallContext;
+import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Test case for verifying that multiple
+ * {@link org.apache.hadoop.hive.metastore.AuthorizationPreEventListener}s can
+ * be set and they get called.
+ */
+public class TestMultiAuthorizationPreEventListener {
+  private static HiveConf clientHiveConf;
+  private static HiveMetaStoreClient msc;
+  private static Driver driver;
+
+  @BeforeClass
+  public static void setUp() throws Exception {
+
+
+    int port = MetaStoreUtils.findFreePort();
+
+    System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname,
+        AuthorizationPreEventListener.class.getName());
+
+    // Set two dummy classes as authorizatin managers. Two instances should get created.
+    System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER.varname,
+        DummyHiveMetastoreAuthorizationProvider.class.getName() + ","
+            + DummyHiveMetastoreAuthorizationProvider.class.getName());
+
+    System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER.varname,
+        HadoopDefaultMetastoreAuthenticator.class.getName());
+
+    MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
+
+    clientHiveConf = new HiveConf();
+
+    clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
+    clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+
+    SessionState.start(new CliSessionState(clientHiveConf));
+    msc = new HiveMetaStoreClient(clientHiveConf, null);
+    driver = new Driver(clientHiveConf);
+  }
+
+  @Test
+  public void testMultipleAuthorizationListners() throws Exception {
+    String dbName = "hive" + this.getClass().getSimpleName().toLowerCase();
+    List<AuthCallContext> authCalls = DummyHiveMetastoreAuthorizationProvider.authCalls;
+    int listSize = 0;
+    assertEquals(listSize, authCalls.size());
+
+    driver.run("create database " + dbName);
+    // verify that there are two calls because of two instances of the authorization provider
+    listSize = 2;
+    assertEquals(listSize, authCalls.size());
+
+    // verify that the actual action also went through
+    Database db = msc.getDatabase(dbName);
+    Database dbFromEvent = (Database)assertAndExtractSingleObjectFromEvent(listSize, authCalls,
+        DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.DB);
+    validateCreateDb(db,dbFromEvent);
+  }
+
+  public Object assertAndExtractSingleObjectFromEvent(int listSize,
+      List<AuthCallContext> authCalls,
+      DummyHiveMetastoreAuthorizationProvider.AuthCallContextType callType) {
+    assertEquals(listSize, authCalls.size());
+    assertEquals(1,authCalls.get(listSize-1).authObjects.size());
+
+    assertEquals(callType,authCalls.get(listSize-1).type);
+    return (authCalls.get(listSize-1).authObjects.get(0));
+  }
+
+
+  private void validateCreateDb(Database expectedDb, Database actualDb) {
+    assertEquals(expectedDb.getName(), actualDb.getName());
+    assertEquals(expectedDb.getLocationUri(), actualDb.getLocationUri());
+  }
+
+
+}

Modified: hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java?rev=1607753&r1=1607752&r2=1607753&view=diff
==============================================================================
--- hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java (original)
+++ hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java Thu Jul  3 21:43:20 2014
@@ -33,6 +33,10 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider;
 import org.apache.hadoop.hive.ql.security.authorization.Privilege;
 
+/**
+ * Dummy implementation for use by unit tests. Tracks the context of calls made to
+ * its authorize functions in {@link AuthCallContext}
+ */
 public class DummyHiveMetastoreAuthorizationProvider implements HiveMetastoreAuthorizationProvider {
 
 
@@ -43,7 +47,8 @@ public class DummyHiveMetastoreAuthoriza
     DB,
     TABLE,
     PARTITION,
-    TABLE_AND_PARTITION
+    TABLE_AND_PARTITION,
+    AUTHORIZATION
   };
 
   class AuthCallContext {
@@ -200,5 +205,12 @@ public class DummyHiveMetastoreAuthoriza
     debugLog("DHMAP.setMetaStoreHandler");
   }
 
+  @Override
+  public void authorizeAuthorizationApiInvocation() throws HiveException, AuthorizationException {
+    debugLog("DHMAP.authorizeauthapi");
+    authCalls.add(new AuthCallContext(AuthCallContextType.AUTHORIZATION, null, null));
+  }
+
+
 
 }

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1607753&r1=1607752&r2=1607753&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Thu Jul  3 21:43:20 2014
@@ -92,7 +92,6 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
 import org.apache.hadoop.hive.metastore.api.HiveObjectType;
 import org.apache.hadoop.hive.metastore.api.Index;
-import org.apache.hadoop.hive.metastore.api.IndexAlreadyExistsException;
 import org.apache.hadoop.hive.metastore.api.InvalidInputException;
 import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
 import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
@@ -148,6 +147,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.events.PreAddPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.PreAlterPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent;
+import org.apache.hadoop.hive.metastore.events.PreAuthorizationCallEvent;
 import org.apache.hadoop.hive.metastore.events.PreCreateDatabaseEvent;
 import org.apache.hadoop.hive.metastore.events.PreCreateTableEvent;
 import org.apache.hadoop.hive.metastore.events.PreDropDatabaseEvent;
@@ -194,6 +194,11 @@ import com.google.common.collect.Lists;
 public class HiveMetaStore extends ThriftHiveMetastore {
   public static final Log LOG = LogFactory.getLog(HiveMetaStore.class);
 
+  // boolean that tells if the HiveMetaStore (remote) server is being used.
+  // Can be used to determine if the calls to metastore api (HMSHandler) are being made with
+  // embedded metastore or a remote one
+  private static boolean isMetaStoreRemote = false;
+
   /** A fixed date format to be used for hive partition column values. */
   public static final DateFormat PARTITION_DATE_FORMAT;
   static {
@@ -621,7 +626,7 @@ public class HiveMetaStore extends Thrif
       logAuditEvent(m);
     }
 
-    public String startFunction(String function, String extraLogInfo) {
+    private String startFunction(String function, String extraLogInfo) {
       incrementCounter(function);
       logInfo((getIpAddress() == null ? "" : "source:" + getIpAddress() + " ") +
           function + extraLogInfo);
@@ -634,26 +639,26 @@ public class HiveMetaStore extends Thrif
       return function;
     }
 
-    public String startFunction(String function) {
+    private String startFunction(String function) {
       return startFunction(function, "");
     }
 
-    public String startTableFunction(String function, String db, String tbl) {
+    private String startTableFunction(String function, String db, String tbl) {
       return startFunction(function, " : db=" + db + " tbl=" + tbl);
     }
 
-    public String startMultiTableFunction(String function, String db, List<String> tbls) {
+    private String startMultiTableFunction(String function, String db, List<String> tbls) {
       String tableNames = join(tbls, ",");
       return startFunction(function, " : db=" + db + " tbls=" + tableNames);
     }
 
-    public String startPartitionFunction(String function, String db, String tbl,
+    private String startPartitionFunction(String function, String db, String tbl,
         List<String> partVals) {
       return startFunction(function, " : db=" + db + " tbl=" + tbl
           + "[" + join(partVals, ",") + "]");
     }
 
-    public String startPartitionFunction(String function, String db, String tbl,
+    private String startPartitionFunction(String function, String db, String tbl,
         Map<String, String> partName) {
       return startFunction(function, " : db=" + db + " tbl=" + tbl + "partition=" + partName);
     }
@@ -661,12 +666,12 @@ public class HiveMetaStore extends Thrif
     private void endFunction(String function, boolean successful, Exception e) {
       endFunction(function, successful, e, null);
     }
-    public void endFunction(String function, boolean successful, Exception e,
+    private void endFunction(String function, boolean successful, Exception e,
                             String inputTableName) {
       endFunction(function, new MetaStoreEndFunctionContext(successful, e, inputTableName));
     }
 
-    public void endFunction(String function, MetaStoreEndFunctionContext context) {
+    private void endFunction(String function, MetaStoreEndFunctionContext context) {
       try {
         Metrics.endScope(function);
       } catch (IOException e) {
@@ -1653,13 +1658,6 @@ public class HiveMetaStore extends Thrif
       return tables;
     }
 
-    public boolean set_table_parameters(String dbname, String name,
-        Map<String, String> params) throws NoSuchObjectException, MetaException {
-      endFunction(startTableFunction("set_table_parameters", dbname, name), false, null, name);
-      // TODO Auto-generated method stub
-      return false;
-    }
-
     private Partition append_partition_common(RawStore ms, String dbName, String tableName,
         List<String> part_vals, EnvironmentContext envContext) throws InvalidObjectException,
         AlreadyExistsException, MetaException {
@@ -2722,13 +2720,6 @@ public class HiveMetaStore extends Thrif
       return;
     }
 
-    public boolean create_index(Index index_def)
-        throws IndexAlreadyExistsException, MetaException {
-      endFunction(startFunction("create_index"), false, null);
-      // TODO Auto-generated method stub
-      throw new MetaException("Not yet implemented");
-    }
-
     @Override
     public void alter_index(final String dbname, final String base_table_name,
         final String index_name, final Index newIndex)
@@ -3823,6 +3814,7 @@ public class HiveMetaStore extends Thrif
     public PrincipalPrivilegeSet get_privilege_set(HiveObjectRef hiveObject,
         String userName, List<String> groupNames) throws MetaException,
         TException {
+      firePreEvent(new PreAuthorizationCallEvent(this));
       if (hiveObject.getObjectType() == HiveObjectType.COLUMN) {
         String partName = getPartName(hiveObject);
         return this.get_column_privilege_set(hiveObject.getDbName(), hiveObject
@@ -3860,7 +3852,7 @@ public class HiveMetaStore extends Thrif
       return partName;
     }
 
-    public PrincipalPrivilegeSet get_column_privilege_set(final String dbName,
+    private PrincipalPrivilegeSet get_column_privilege_set(final String dbName,
         final String tableName, final String partName, final String columnName,
         final String userName, final List<String> groupNames) throws MetaException,
         TException {
@@ -3878,7 +3870,7 @@ public class HiveMetaStore extends Thrif
       return ret;
     }
 
-    public PrincipalPrivilegeSet get_db_privilege_set(final String dbName,
+    private PrincipalPrivilegeSet get_db_privilege_set(final String dbName,
         final String userName, final List<String> groupNames) throws MetaException,
         TException {
       incrementCounter("get_db_privilege_set");
@@ -3894,7 +3886,7 @@ public class HiveMetaStore extends Thrif
       return ret;
     }
 
-    public PrincipalPrivilegeSet get_partition_privilege_set(
+    private PrincipalPrivilegeSet get_partition_privilege_set(
         final String dbName, final String tableName, final String partName,
         final String userName, final List<String> groupNames)
         throws MetaException, TException {
@@ -3912,7 +3904,7 @@ public class HiveMetaStore extends Thrif
       return ret;
     }
 
-    public PrincipalPrivilegeSet get_table_privilege_set(final String dbName,
+    private PrincipalPrivilegeSet get_table_privilege_set(final String dbName,
         final String tableName, final String userName,
         final List<String> groupNames) throws MetaException, TException {
       incrementCounter("get_table_privilege_set");
@@ -3935,6 +3927,7 @@ public class HiveMetaStore extends Thrif
         final String grantor, final PrincipalType grantorType, final boolean grantOption)
         throws MetaException, TException {
       incrementCounter("add_role_member");
+      firePreEvent(new PreAuthorizationCallEvent(this));
       if (PUBLIC.equals(roleName)) {
         throw new MetaException("No user can be added to " + PUBLIC +". Since all users implictly"
         + " belong to " + PUBLIC + " role.");
@@ -3987,7 +3980,7 @@ public class HiveMetaStore extends Thrif
     public List<Role> list_roles(final String principalName,
         final PrincipalType principalType) throws MetaException, TException {
       incrementCounter("list_roles");
-
+      firePreEvent(new PreAuthorizationCallEvent(this));
       List<Role> result = new ArrayList<Role>();
       try {
         List<MRoleMap> roleMaps = getMS().listRoles(principalName, principalType);
@@ -4012,7 +4005,7 @@ public class HiveMetaStore extends Thrif
     public boolean create_role(final Role role)
         throws MetaException, TException {
       incrementCounter("create_role");
-
+      firePreEvent(new PreAuthorizationCallEvent(this));
       if (PUBLIC.equals(role.getRoleName())) {
          throw new MetaException(PUBLIC + " role implictly exists. It can't be created.");
       }
@@ -4031,6 +4024,7 @@ public class HiveMetaStore extends Thrif
     public boolean drop_role(final String roleName)
         throws MetaException, TException {
       incrementCounter("drop_role");
+      firePreEvent(new PreAuthorizationCallEvent(this));
       if (ADMIN.equals(roleName) || PUBLIC.equals(roleName)) {
         throw new MetaException(PUBLIC + "/" + ADMIN +" role can't be dropped.");
       }
@@ -4048,7 +4042,7 @@ public class HiveMetaStore extends Thrif
     @Override
     public List<String> get_role_names() throws MetaException, TException {
       incrementCounter("get_role_names");
-
+      firePreEvent(new PreAuthorizationCallEvent(this));
       List<String> ret = null;
       try {
         ret = getMS().listRoleNames();
@@ -4064,6 +4058,7 @@ public class HiveMetaStore extends Thrif
     public boolean grant_privileges(final PrivilegeBag privileges) throws MetaException,
         TException {
       incrementCounter("grant_privileges");
+      firePreEvent(new PreAuthorizationCallEvent(this));
       Boolean ret = null;
       try {
         ret = getMS().grantPrivileges(privileges);
@@ -4079,7 +4074,7 @@ public class HiveMetaStore extends Thrif
     public boolean revoke_role(final String roleName, final String userName,
         final PrincipalType principalType) throws MetaException, TException {
       incrementCounter("remove_role_member");
-
+      firePreEvent(new PreAuthorizationCallEvent(this));
       if (PUBLIC.equals(roleName)) {
         throw new MetaException(PUBLIC + " role can't be revoked.");
       }
@@ -4100,6 +4095,7 @@ public class HiveMetaStore extends Thrif
     public boolean revoke_privileges(final PrivilegeBag privileges)
         throws MetaException, TException {
       incrementCounter("revoke_privileges");
+      firePreEvent(new PreAuthorizationCallEvent(this));
       Boolean ret = null;
       try {
         ret = getMS().revokePrivileges(privileges);
@@ -4111,10 +4107,9 @@ public class HiveMetaStore extends Thrif
       return ret;
     }
 
-    public PrincipalPrivilegeSet get_user_privilege_set(final String userName,
+    private PrincipalPrivilegeSet get_user_privilege_set(final String userName,
         final List<String> groupNames) throws MetaException, TException {
       incrementCounter("get_user_privilege_set");
-
       PrincipalPrivilegeSet ret = null;
       try {
         ret = getMS().getUserPrivilegeSet(userName, groupNames);
@@ -4126,14 +4121,11 @@ public class HiveMetaStore extends Thrif
       return ret;
     }
 
-    public PrincipalType getPrincipalType(String principalType) {
-      return PrincipalType.valueOf(principalType);
-    }
-
     @Override
     public List<HiveObjectPrivilege> list_privileges(String principalName,
         PrincipalType principalType, HiveObjectRef hiveObject)
         throws MetaException, TException {
+      firePreEvent(new PreAuthorizationCallEvent(this));
       if (hiveObject.getObjectType() == null) {
         return getAllPrivileges(principalName, principalType);
       }
@@ -4178,7 +4170,7 @@ public class HiveMetaStore extends Thrif
       return privs;
     }
 
-    public List<HiveObjectPrivilege> list_table_column_privileges(
+    private List<HiveObjectPrivilege> list_table_column_privileges(
         final String principalName, final PrincipalType principalType,
         final String dbName, final String tableName, final String columnName)
         throws MetaException, TException {
@@ -4218,7 +4210,7 @@ public class HiveMetaStore extends Thrif
       }
     }
 
-    public List<HiveObjectPrivilege> list_partition_column_privileges(
+    private List<HiveObjectPrivilege> list_partition_column_privileges(
         final String principalName, final PrincipalType principalType,
         final String dbName, final String tableName, final List<String> partValues,
         final String columnName) throws MetaException, TException {
@@ -4259,7 +4251,7 @@ public class HiveMetaStore extends Thrif
       }
     }
 
-    public List<HiveObjectPrivilege> list_db_privileges(final String principalName,
+    private List<HiveObjectPrivilege> list_db_privileges(final String principalName,
         final PrincipalType principalType, final String dbName)
         throws MetaException, TException {
       incrementCounter("list_security_db_grant");
@@ -4296,7 +4288,7 @@ public class HiveMetaStore extends Thrif
       }
     }
 
-    public List<HiveObjectPrivilege> list_partition_privileges(
+    private List<HiveObjectPrivilege> list_partition_privileges(
         final String principalName, final PrincipalType principalType,
         final String dbName, final String tableName, final List<String> partValues)
         throws MetaException, TException {
@@ -4338,7 +4330,7 @@ public class HiveMetaStore extends Thrif
       }
     }
 
-    public List<HiveObjectPrivilege> list_table_privileges(
+    private List<HiveObjectPrivilege> list_table_privileges(
         final String principalName, final PrincipalType principalType,
         final String dbName, final String tableName) throws MetaException,
         TException {
@@ -4376,7 +4368,7 @@ public class HiveMetaStore extends Thrif
       }
     }
 
-    public List<HiveObjectPrivilege> list_global_privileges(
+    private List<HiveObjectPrivilege> list_global_privileges(
         final String principalName, final PrincipalType principalType)
         throws MetaException, TException {
       incrementCounter("list_security_user_grant");
@@ -4872,6 +4864,7 @@ public class HiveMetaStore extends Thrif
         throws MetaException, TException {
 
       incrementCounter("get_principals_in_role");
+      firePreEvent(new PreAuthorizationCallEvent(this));
       Exception ex = null;
       List<MRoleMap> roleMaps = null;
       try {
@@ -4892,6 +4885,7 @@ public class HiveMetaStore extends Thrif
         GetRoleGrantsForPrincipalRequest request) throws MetaException, TException {
 
       incrementCounter("get_role_grants_for_principal");
+      firePreEvent(new PreAuthorizationCallEvent(this));
       Exception ex = null;
       List<MRoleMap> roleMaps = null;
       try {
@@ -4965,6 +4959,13 @@ public class HiveMetaStore extends Thrif
   }
 
   /**
+   * @return true if remote metastore has been created
+   */
+  public static boolean isMetaStoreRemote() {
+    return isMetaStoreRemote;
+  }
+
+  /**
    * Renew a delegation token to extend its lifetime.
    *
    * @param tokenStrForm
@@ -5125,7 +5126,7 @@ public class HiveMetaStore extends Thrif
       HiveConf conf, Lock startLock, Condition startCondition,
       MetaStoreThread.BooleanPointer startedServing) throws Throwable {
     try {
-
+      isMetaStoreRemote = true;
       // Server will create new threads up to max as necessary. After an idle
       // period, it will destory threads to keep the number of threads in the
       // pool to min.

Added: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreAuthorizationCallEvent.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreAuthorizationCallEvent.java?rev=1607753&view=auto
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreAuthorizationCallEvent.java (added)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreAuthorizationCallEvent.java Thu Jul  3 21:43:20 2014
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore.events;
+
+import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+
+public class PreAuthorizationCallEvent extends PreEventContext {
+
+  public PreAuthorizationCallEvent (HMSHandler handler) {
+    super(PreEventType.AUTHORIZATION_API_CALL, handler);
+  }
+
+}

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreEventContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreEventContext.java?rev=1607753&r1=1607752&r2=1607753&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreEventContext.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreEventContext.java Thu Jul  3 21:43:20 2014
@@ -36,7 +36,8 @@ public abstract class PreEventContext {
     ALTER_PARTITION,
     CREATE_DATABASE,
     DROP_DATABASE,
-    LOAD_PARTITION_DONE
+    LOAD_PARTITION_DONE,
+    AUTHORIZATION_API_CALL,
   }
 
   private final PreEventType eventType;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java?rev=1607753&r1=1607752&r2=1607753&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java Thu Jul  3 21:43:20 2014
@@ -18,8 +18,11 @@
 
 package org.apache.hadoop.hive.ql.metadata;
 
+import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -29,7 +32,7 @@ import org.apache.hadoop.hive.ql.securit
 import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
 import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider;
 import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
 import org.apache.hadoop.io.Text;
@@ -110,6 +113,10 @@ public final class HiveUtils {
   static final byte[] tabUnescapeBytes = "\t".getBytes();
   static final byte[] ctrlABytes = "\u0001".getBytes();
 
+
+  public static final Log LOG = LogFactory.getLog(HiveUtils.class);
+
+
   public static Text escapeText(Text text) {
     int length = text.getLength();
     byte[] textBytes = text.getBytes();
@@ -276,14 +283,14 @@ public final class HiveUtils {
   public static String unparseIdentifier(String identifier) {
     return unparseIdentifier(identifier, null);
   }
-  
+
   public static String unparseIdentifier(String identifier, Configuration conf) {
     // In the future, if we support arbitrary characters in
     // identifiers, then we'll need to escape any backticks
     // in identifier by doubling them up.
-    
+
     // the time has come
-    String qIdSupport = conf == null ? null : 
+    String qIdSupport = conf == null ? null :
       HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUOTEDID_SUPPORT);
     if ( qIdSupport != null && !"none".equals(qIdSupport) ) {
       identifier = identifier.replaceAll("`", "``");
@@ -301,8 +308,7 @@ public final class HiveUtils {
       Class<? extends HiveStorageHandler> handlerClass =
         (Class<? extends HiveStorageHandler>)
         Class.forName(className, true, JavaUtils.getClassLoader());
-      HiveStorageHandler storageHandler = (HiveStorageHandler)
-        ReflectionUtils.newInstance(handlerClass, conf);
+      HiveStorageHandler storageHandler = ReflectionUtils.newInstance(handlerClass, conf);
       return storageHandler;
     } catch (ClassNotFoundException e) {
       throw new HiveException("Error in loading storage handler."
@@ -324,8 +330,7 @@ public final class HiveUtils {
       Class<? extends HiveIndexHandler> handlerClass =
         (Class<? extends HiveIndexHandler>)
         Class.forName(indexHandlerClass, true, JavaUtils.getClassLoader());
-      HiveIndexHandler indexHandler = (HiveIndexHandler)
-        ReflectionUtils.newInstance(handlerClass, conf);
+      HiveIndexHandler indexHandler = ReflectionUtils.newInstance(handlerClass, conf);
       return indexHandler;
     } catch (ClassNotFoundException e) {
       throw new HiveException("Error in loading index handler."
@@ -334,16 +339,27 @@ public final class HiveUtils {
   }
 
   @SuppressWarnings("unchecked")
-  public static HiveAuthorizationProvider getAuthorizeProviderManager(
+  public static List<HiveMetastoreAuthorizationProvider> getMetaStoreAuthorizeProviderManagers(
       Configuration conf, HiveConf.ConfVars authorizationProviderConfKey,
       HiveAuthenticationProvider authenticator) throws HiveException {
-    return getAuthorizeProviderManager(conf, authorizationProviderConfKey, authenticator, false);
+
+    String clsStrs = HiveConf.getVar(conf, authorizationProviderConfKey);
+    if(clsStrs == null){
+      return null;
+    }
+    List<HiveMetastoreAuthorizationProvider> authProviders = new ArrayList<HiveMetastoreAuthorizationProvider>();
+    for (String clsStr : clsStrs.trim().split(",")) {
+      LOG.info("Adding metastore authorization provider: " + clsStr);
+      authProviders.add((HiveMetastoreAuthorizationProvider) getAuthorizeProviderManager(conf,
+          clsStr, authenticator, false));
+    }
+    return authProviders;
   }
 
   /**
    * Create a new instance of HiveAuthorizationProvider
    * @param conf
-   * @param authorizationProviderConfKey
+   * @param authzClassName - authorization provider class name
    * @param authenticator
    * @param nullIfOtherClass - return null if configuration
    *  does not point to a HiveAuthorizationProvider subclass
@@ -352,18 +368,16 @@ public final class HiveUtils {
    */
   @SuppressWarnings("unchecked")
   public static HiveAuthorizationProvider getAuthorizeProviderManager(
-      Configuration conf, HiveConf.ConfVars authorizationProviderConfKey,
+      Configuration conf, String authzClassName,
       HiveAuthenticationProvider authenticator, boolean nullIfOtherClass) throws HiveException {
 
-    String clsStr = HiveConf.getVar(conf, authorizationProviderConfKey);
-
     HiveAuthorizationProvider ret = null;
     try {
       Class<? extends HiveAuthorizationProvider> cls = null;
-      if (clsStr == null || clsStr.trim().equals("")) {
+      if (authzClassName == null || authzClassName.trim().equals("")) {
         cls = DefaultHiveAuthorizationProvider.class;
       } else {
-        Class<?> configClass = Class.forName(clsStr, true, JavaUtils.getClassLoader());
+        Class<?> configClass = Class.forName(authzClassName, true, JavaUtils.getClassLoader());
         if(nullIfOtherClass && !HiveAuthorizationProvider.class.isAssignableFrom(configClass) ){
           return null;
         }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java?rev=1607753&r1=1607752&r2=1607753&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java Thu Jul  3 21:43:20 2014
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hive.ql.security.authorization;
 
+import java.util.List;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -77,12 +79,12 @@ public class AuthorizationPreEventListen
     }
   };
 
-  private final ThreadLocal<HiveMetastoreAuthorizationProvider> tAuthorizer
-      = new ThreadLocal<HiveMetastoreAuthorizationProvider>() {
+  private final ThreadLocal<List<HiveMetastoreAuthorizationProvider>> tAuthorizers
+      = new ThreadLocal<List<HiveMetastoreAuthorizationProvider>>() {
     @Override
-    protected HiveMetastoreAuthorizationProvider initialValue() {
+    protected List<HiveMetastoreAuthorizationProvider> initialValue() {
       try {
-        return  (HiveMetastoreAuthorizationProvider) HiveUtils.getAuthorizeProviderManager(
+        return  HiveUtils.getMetaStoreAuthorizeProviderManagers(
             tConfig.get(), HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER, tAuthenticator.get());
       } catch (HiveException he) {
         throw new IllegalStateException("Authorization provider instantiation failure",he);
@@ -113,12 +115,16 @@ public class AuthorizationPreEventListen
       tConfig.set(context.getHandler().getConf());
       // Warning note : HMSHandler.getHiveConf() is not thread-unique, .getConf() is.
       tAuthenticator.get().setConf(tConfig.get());
-      tAuthorizer.get().setConf(tConfig.get());
+      for(HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()){
+        authorizer.setConf(tConfig.get());
+      }
       tConfigSetOnAuths.set(true); // set so we don't repeat this initialization
     }
 
     tAuthenticator.get().setMetaStoreHandler(context.getHandler());
-    tAuthorizer.get().setMetaStoreHandler(context.getHandler());
+    for(HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()){
+      authorizer.setMetaStoreHandler(context.getHandler());
+    }
 
     switch (context.getEventType()) {
     case CREATE_TABLE:
@@ -148,18 +154,34 @@ public class AuthorizationPreEventListen
     case LOAD_PARTITION_DONE:
       // noop for now
       break;
+    case AUTHORIZATION_API_CALL:
+      authorizeAuthorizationAPICall();
     default:
       break;
     }
 
   }
 
+  private void authorizeAuthorizationAPICall() throws InvalidOperationException, MetaException {
+    for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) {
+      try {
+        authorizer.authorizeAuthorizationApiInvocation();
+      } catch (AuthorizationException e) {
+        throw invalidOperationException(e);
+      } catch (HiveException e) {
+        throw metaException(e);
+      }
+    }
+  }
+
   private void authorizeCreateDatabase(PreCreateDatabaseEvent context)
       throws InvalidOperationException, MetaException {
     try {
-      tAuthorizer.get().authorize(new Database(context.getDatabase()),
-          HiveOperation.CREATEDATABASE.getInputRequiredPrivileges(),
-          HiveOperation.CREATEDATABASE.getOutputRequiredPrivileges());
+      for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) {
+        authorizer.authorize(new Database(context.getDatabase()),
+            HiveOperation.CREATEDATABASE.getInputRequiredPrivileges(),
+            HiveOperation.CREATEDATABASE.getOutputRequiredPrivileges());
+      }
     } catch (AuthorizationException e) {
       throw invalidOperationException(e);
     } catch (HiveException e) {
@@ -170,9 +192,11 @@ public class AuthorizationPreEventListen
   private void authorizeDropDatabase(PreDropDatabaseEvent context)
       throws InvalidOperationException, MetaException {
     try {
-      tAuthorizer.get().authorize(new Database(context.getDatabase()),
-          HiveOperation.DROPDATABASE.getInputRequiredPrivileges(),
-          HiveOperation.DROPDATABASE.getOutputRequiredPrivileges());
+      for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) {
+        authorizer.authorize(new Database(context.getDatabase()),
+            HiveOperation.DROPDATABASE.getInputRequiredPrivileges(),
+            HiveOperation.DROPDATABASE.getOutputRequiredPrivileges());
+      }
     } catch (AuthorizationException e) {
       throw invalidOperationException(e);
     } catch (HiveException e) {
@@ -183,9 +207,12 @@ public class AuthorizationPreEventListen
   private void authorizeCreateTable(PreCreateTableEvent context)
       throws InvalidOperationException, MetaException {
     try {
-      tAuthorizer.get().authorize(new TableWrapper(context.getTable()),
-          HiveOperation.CREATETABLE.getInputRequiredPrivileges(),
-          HiveOperation.CREATETABLE.getOutputRequiredPrivileges());
+      org.apache.hadoop.hive.ql.metadata.Table wrappedTable = new TableWrapper(context.getTable());
+      for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) {
+        authorizer.authorize(wrappedTable,
+            HiveOperation.CREATETABLE.getInputRequiredPrivileges(),
+            HiveOperation.CREATETABLE.getOutputRequiredPrivileges());
+      }
     } catch (AuthorizationException e) {
       throw invalidOperationException(e);
     } catch (HiveException e) {
@@ -196,9 +223,12 @@ public class AuthorizationPreEventListen
   private void authorizeDropTable(PreDropTableEvent context)
       throws InvalidOperationException, MetaException {
     try {
-      tAuthorizer.get().authorize(new TableWrapper(context.getTable()),
-          HiveOperation.DROPTABLE.getInputRequiredPrivileges(),
-          HiveOperation.DROPTABLE.getOutputRequiredPrivileges());
+      org.apache.hadoop.hive.ql.metadata.Table wrappedTable = new TableWrapper(context.getTable());
+      for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) {
+        authorizer.authorize(wrappedTable,
+            HiveOperation.DROPTABLE.getInputRequiredPrivileges(),
+            HiveOperation.DROPTABLE.getOutputRequiredPrivileges());
+      }
     } catch (AuthorizationException e) {
       throw invalidOperationException(e);
     } catch (HiveException e) {
@@ -208,10 +238,14 @@ public class AuthorizationPreEventListen
 
   private void authorizeAlterTable(PreAlterTableEvent context)
       throws InvalidOperationException, MetaException {
+
     try {
-      tAuthorizer.get().authorize(new TableWrapper(context.getOldTable()),
-          null,
-          new Privilege[]{Privilege.ALTER_METADATA});
+      org.apache.hadoop.hive.ql.metadata.Table wrappedTable = new TableWrapper(context.getOldTable());
+      for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) {
+        authorizer.authorize(wrappedTable,
+            null,
+            new Privilege[]{Privilege.ALTER_METADATA});
+      }
     } catch (AuthorizationException e) {
       throw invalidOperationException(e);
     } catch (HiveException e) {
@@ -223,9 +257,13 @@ public class AuthorizationPreEventListen
       throws InvalidOperationException, MetaException {
     try {
       for (org.apache.hadoop.hive.metastore.api.Partition mapiPart : context.getPartitions()) {
-        tAuthorizer.get().authorize(new PartitionWrapper(mapiPart, context),
-            HiveOperation.ALTERTABLE_ADDPARTS.getInputRequiredPrivileges(),
-            HiveOperation.ALTERTABLE_ADDPARTS.getOutputRequiredPrivileges());
+        org.apache.hadoop.hive.ql.metadata.Partition wrappedPartiton = new PartitionWrapper(
+            mapiPart, context);
+    for(HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()){
+          authorizer.authorize(wrappedPartiton,
+              HiveOperation.ALTERTABLE_ADDPARTS.getInputRequiredPrivileges(),
+              HiveOperation.ALTERTABLE_ADDPARTS.getOutputRequiredPrivileges());
+        }
       }
     } catch (AuthorizationException e) {
       throw invalidOperationException(e);
@@ -240,9 +278,13 @@ public class AuthorizationPreEventListen
       throws InvalidOperationException, MetaException {
     try {
       org.apache.hadoop.hive.metastore.api.Partition mapiPart = context.getPartition();
-      tAuthorizer.get().authorize(new PartitionWrapper(mapiPart, context),
-          HiveOperation.ALTERTABLE_DROPPARTS.getInputRequiredPrivileges(),
-          HiveOperation.ALTERTABLE_DROPPARTS.getOutputRequiredPrivileges());
+      org.apache.hadoop.hive.ql.metadata.Partition wrappedPartition = new PartitionWrapper(
+          mapiPart, context);
+ for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) {
+        authorizer.authorize(wrappedPartition,
+            HiveOperation.ALTERTABLE_DROPPARTS.getInputRequiredPrivileges(),
+            HiveOperation.ALTERTABLE_DROPPARTS.getOutputRequiredPrivileges());
+      }
     } catch (AuthorizationException e) {
       throw invalidOperationException(e);
     } catch (NoSuchObjectException e) {
@@ -256,9 +298,13 @@ public class AuthorizationPreEventListen
       throws InvalidOperationException, MetaException {
     try {
       org.apache.hadoop.hive.metastore.api.Partition mapiPart = context.getNewPartition();
-      tAuthorizer.get().authorize(new PartitionWrapper(mapiPart, context),
-          null,
-          new Privilege[]{Privilege.ALTER_METADATA});
+      org.apache.hadoop.hive.ql.metadata.Partition wrappedPartition = new PartitionWrapper(
+          mapiPart, context);
+    for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) {
+       authorizer.authorize(wrappedPartition,
+            null,
+            new Privilege[]{Privilege.ALTER_METADATA});
+      }
     } catch (AuthorizationException e) {
       throw invalidOperationException(e);
     } catch (NoSuchObjectException e) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java?rev=1607753&r1=1607752&r2=1607753&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java Thu Jul  3 21:43:20 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.securi
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 
 public class DefaultHiveMetastoreAuthorizationProvider extends BitSetCheckedAuthorizationProvider
@@ -35,5 +36,10 @@ public class DefaultHiveMetastoreAuthori
     hive_db.setHandler(handler);
   }
 
+  @Override
+  public void authorizeAuthorizationApiInvocation() throws HiveException, AuthorizationException {
+    // default no-op implementation
+  }
+
 
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java?rev=1607753&r1=1607752&r2=1607753&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java Thu Jul  3 21:43:20 2014
@@ -19,6 +19,8 @@
 package org.apache.hadoop.hive.ql.security.authorization;
 
 import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 
 /**
  * HiveMetastoreAuthorizationProvider : An extension of HiveAuthorizaytionProvider
@@ -37,4 +39,10 @@ public interface HiveMetastoreAuthorizat
    */
   void setMetaStoreHandler(HMSHandler handler);
 
+  /**
+   * Authorize metastore authorization api call.
+   */
+  void authorizeAuthorizationApiInvocation() throws HiveException, AuthorizationException;
+
+
 }

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/MetaStoreAuthzAPIAuthorizerEmbedOnly.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/MetaStoreAuthzAPIAuthorizerEmbedOnly.java?rev=1607753&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/MetaStoreAuthzAPIAuthorizerEmbedOnly.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/MetaStoreAuthzAPIAuthorizerEmbedOnly.java Thu Jul  3 21:43:20 2014
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security.authorization;
+
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+
+/**
+ * If this authorizer is used, it allows authorization api to be invoked only in embedded
+ * metastore mode.
+ */
+public class MetaStoreAuthzAPIAuthorizerEmbedOnly extends HiveAuthorizationProviderBase
+    implements HiveMetastoreAuthorizationProvider {
+
+  public static final String errMsg = "Metastore Authorization api invocation for "
+      + "remote metastore is disabled in this configuration.";
+
+  @Override
+  public void init(Configuration conf) throws HiveException {
+  }
+
+  @Override
+  public void authorize(Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
+      throws HiveException, AuthorizationException {
+    // not authorized by this implementation, ie operation is allowed by it
+  }
+
+  @Override
+  public void authorize(Database db, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
+      throws HiveException, AuthorizationException {
+    // not authorized by this implementation, ie operation is allowed by it
+  }
+
+  @Override
+  public void authorize(Table table, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
+      throws HiveException, AuthorizationException {
+    // not authorized by this implementation, ie operation is allowed by it
+  }
+
+  @Override
+  public void authorize(Partition part, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
+      throws HiveException, AuthorizationException {
+    // not authorized by this implementation, ie operation is allowed by it
+  }
+
+  @Override
+  public void authorize(Table table, Partition part, List<String> columns,
+      Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) throws HiveException,
+      AuthorizationException {
+    // not authorized by this implementation, ie operation is allowed by it
+  }
+
+  @Override
+  public void setMetaStoreHandler(HMSHandler handler) {
+    // no-op - HMSHander not needed by this impl
+  }
+
+  @Override
+  public void authorizeAuthorizationApiInvocation() throws AuthorizationException {
+    if (HiveMetaStore.isMetaStoreRemote()) {
+      throw new AuthorizationException(errMsg);
+    }
+  }
+
+
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java?rev=1607753&r1=1607752&r2=1607753&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java Thu Jul  3 21:43:20 2014
@@ -402,4 +402,9 @@ public class StorageBasedAuthorizationPr
     return ace;
   }
 
+  @Override
+  public void authorizeAuthorizationApiInvocation() throws HiveException, AuthorizationException {
+    // no-op - SBA does not attempt to authorize auth api call. Allow it
+  }
+
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=1607753&r1=1607752&r2=1607753&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Thu Jul  3 21:43:20 2014
@@ -374,8 +374,9 @@ public class SessionState {
           HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
       authenticator.setSessionState(this);
 
+      String clsStr = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER);
       authorizer = HiveUtils.getAuthorizeProviderManager(conf,
-          HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, authenticator, true);
+          clsStr, authenticator, true);
 
       if (authorizer == null) {
         // if it was null, the new authorization plugin must be specified in



Mime
View raw message