sentry-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From co...@apache.org
Subject [12/13] sentry git commit: SENTRY-999: Refactor the sentry to integrate with external components quickly (Colin Ma, reviewed by Dapeng Sun)
Date Fri, 22 Apr 2016 06:28:32 GMT
http://git-wip-us.apache.org/repos/asf/sentry/blob/d94e900a/SENTRY-999.001.patch
----------------------------------------------------------------------
diff --git a/SENTRY-999.001.patch b/SENTRY-999.001.patch
new file mode 100644
index 0000000..865a343
--- /dev/null
+++ b/SENTRY-999.001.patch
@@ -0,0 +1,18685 @@
+diff --git a/pom.xml b/pom.xml
+index e288593..914f436 100644
+--- a/pom.xml
++++ b/pom.xml
+@@ -475,7 +475,7 @@ limitations under the License.
+       </dependency>
+       <dependency>
+         <groupId>org.apache.sentry</groupId>
+-        <artifactId>sentry-policy-db</artifactId>
++        <artifactId>sentry-policy-engine</artifactId>
+         <version>${project.version}</version>
+       </dependency>
+       <dependency>
+@@ -485,21 +485,6 @@ limitations under the License.
+       </dependency>
+       <dependency>
+         <groupId>org.apache.sentry</groupId>
+-        <artifactId>sentry-policy-search</artifactId>
+-        <version>${project.version}</version>
+-      </dependency>
+-      <dependency>
+-        <groupId>org.apache.sentry</groupId>
+-        <artifactId>sentry-policy-sqoop</artifactId>
+-        <version>${project.version}</version>
+-      </dependency>
+-      <dependency>
+-        <groupId>org.apache.sentry</groupId>
+-        <artifactId>sentry-policy-kafka</artifactId>
+-        <version>${project.version}</version>
+-      </dependency>
+-      <dependency>
+-        <groupId>org.apache.sentry</groupId>
+         <artifactId>sentry-dist</artifactId>
+         <version>${project.version}</version>
+       </dependency>
+diff --git a/sentry-binding/sentry-binding-hive-common/pom.xml b/sentry-binding/sentry-binding-hive-common/pom.xml
+index 5f00dd2..18b422d 100644
+--- a/sentry-binding/sentry-binding-hive-common/pom.xml
++++ b/sentry-binding/sentry-binding-hive-common/pom.xml
+@@ -71,10 +71,6 @@ limitations under the License.
+       <dependency>
+         <groupId>org.apache.sentry</groupId>
+         <artifactId>sentry-provider-cache</artifactId>
+-      </dependency>
+-    <dependency>
+-      <groupId>org.apache.sentry</groupId>
+-      <artifactId>sentry-policy-db</artifactId>
+     </dependency>
+     <dependency>
+       <groupId>org.apache.hadoop</groupId>
+diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
+index 630bef3..06fe1fe 100644
+--- a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
++++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
+@@ -24,7 +24,7 @@ import java.util.Map;
+ import java.util.Set;
+ 
+ import org.apache.hadoop.conf.Configuration;
+-import org.apache.sentry.policy.common.PolicyConstants;
++import org.apache.sentry.core.common.utils.SentryConstants;
+ import org.apache.sentry.provider.common.PolicyFileConstants;
+ import org.apache.sentry.provider.common.ProviderBackendContext;
+ import org.apache.sentry.provider.file.SimpleFileProviderBackend;
+@@ -152,8 +152,8 @@ public class SentryIniPolicyFileFormatter implements SentryPolicyFileFormatter {
+     List<String> lines = Lists.newArrayList();
+     lines.add("[" + name + "]");
+     for (Map.Entry<String, Set<String>> entry : mappingData.entrySet()) {
+-      lines.add(PolicyConstants.KV_JOINER.join(entry.getKey(),
+-          PolicyConstants.ROLE_JOINER.join(entry.getValue())));
++      lines.add(SentryConstants.KV_JOINER.join(entry.getKey(),
++          SentryConstants.ROLE_JOINER.join(entry.getValue())));
+     }
+     return Joiner.on(NL).join(lines);
+   }
+diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java
+index 0a1d0e8..775a1f5 100644
+--- a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java
++++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzBinding.java
+@@ -34,11 +34,13 @@ import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+ import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
+ import org.apache.sentry.binding.hive.conf.InvalidConfigurationException;
+ import org.apache.sentry.core.common.ActiveRoleSet;
++import org.apache.sentry.core.common.Model;
+ import org.apache.sentry.core.common.Subject;
+ import org.apache.sentry.core.model.db.AccessConstants;
+ import org.apache.sentry.core.model.db.DBModelAction;
+ import org.apache.sentry.core.model.db.DBModelAuthorizable;
+ import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
++import org.apache.sentry.core.model.db.HivePrivilegeModel;
+ import org.apache.sentry.core.model.db.Server;
+ import org.apache.sentry.policy.common.PolicyEngine;
+ import org.apache.sentry.provider.cache.PrivilegeCache;
+@@ -60,6 +62,7 @@ public class HiveAuthzBinding {
+   private static final Splitter ROLE_SET_SPLITTER = Splitter.on(",").trimResults()
+       .omitEmptyStrings();
+   public static final String HIVE_BINDING_TAG = "hive.authz.bindings.tag";
++  public static final String HIVE_POLICY_ENGINE_OLD = "org.apache.sentry.policy.db.SimpleDBPolicyEngine";
+ 
+   private final HiveConf hiveConf;
+   private final Server authServer;
+@@ -206,6 +209,11 @@ public class HiveAuthzBinding {
+     String providerBackendName = authzConf.get(AuthzConfVars.AUTHZ_PROVIDER_BACKEND.getVar());
+     String policyEngineName = authzConf.get(AuthzConfVars.AUTHZ_POLICY_ENGINE.getVar());
+ 
++    // for the backward compatibility
++    if (HIVE_POLICY_ENGINE_OLD.equals(policyEngineName)) {
++      policyEngineName = AuthzConfVars.AUTHZ_POLICY_ENGINE.getDefault();
++    }
++
+     LOG.debug("Using authorization provider " + authProviderName +
+         " with resource " + resourceName + ", policy engine "
+         + policyEngineName + ", provider backend " + providerBackendName);
+@@ -216,19 +224,28 @@ public class HiveAuthzBinding {
+     ProviderBackend providerBackend = (ProviderBackend) providerBackendConstructor.
+         newInstance(new Object[] {authzConf, resourceName});
+ 
++    // create backendContext
++    ProviderBackendContext context = new ProviderBackendContext();
++    context.setAllowPerDatabase(true);
++    context.setValidators(HivePrivilegeModel.getInstance().getPrivilegeValidators(serverName));
++    // initialize the backend with the context
++    providerBackend.initialize(context);
++
++
+     // load the policy engine class
+     Constructor<?> policyConstructor =
+-      Class.forName(policyEngineName).getDeclaredConstructor(String.class, ProviderBackend.class);
++      Class.forName(policyEngineName).getDeclaredConstructor(ProviderBackend.class);
+     policyConstructor.setAccessible(true);
+     PolicyEngine policyEngine = (PolicyEngine) policyConstructor.
+-        newInstance(new Object[] {serverName, providerBackend});
++        newInstance(new Object[] {providerBackend});
+ 
+ 
+     // load the authz provider class
+     Constructor<?> constrctor =
+-      Class.forName(authProviderName).getDeclaredConstructor(String.class, PolicyEngine.class);
++      Class.forName(authProviderName).getDeclaredConstructor(String.class, PolicyEngine.class, Model.class);
+     constrctor.setAccessible(true);
+-    return (AuthorizationProvider) constrctor.newInstance(new Object[] {resourceName, policyEngine});
++    return (AuthorizationProvider) constrctor.newInstance(new Object[] {resourceName, policyEngine,
++            HivePrivilegeModel.getInstance()});
+   }
+ 
+   // Instantiate the authz provider using PrivilegeCache, this method is used for metadata filter function.
+@@ -238,7 +255,13 @@ public class HiveAuthzBinding {
+     String authProviderName = authzConf.get(AuthzConfVars.AUTHZ_PROVIDER.getVar());
+     String resourceName =
+             authzConf.get(AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar());
+-    String policyEngineName = authzConf.get(AuthzConfVars.AUTHZ_POLICY_ENGINE.getVar());
++    String policyEngineName = authzConf.get(AuthzConfVars.AUTHZ_POLICY_ENGINE.getVar(),
++            AuthzConfVars.AUTHZ_POLICY_ENGINE.getDefault());
++
++    // for the backward compatibility
++    if (HIVE_POLICY_ENGINE_OLD.equals(policyEngineName)) {
++      policyEngineName = AuthzConfVars.AUTHZ_POLICY_ENGINE.getDefault();
++    }
+ 
+     LOG.debug("Using authorization provider " + authProviderName +
+             " with resource " + resourceName + ", policy engine "
+@@ -251,19 +274,19 @@ public class HiveAuthzBinding {
+ 
+     // load the policy engine class
+     Constructor<?> policyConstructor =
+-            Class.forName(policyEngineName).getDeclaredConstructor(String.class, ProviderBackend.class);
++            Class.forName(policyEngineName).getDeclaredConstructor(ProviderBackend.class);
+     policyConstructor.setAccessible(true);
+     PolicyEngine policyEngine = (PolicyEngine) policyConstructor.
+-            newInstance(new Object[] {serverName, providerBackend});
++            newInstance(new Object[] {providerBackend});
+ 
+     // load the authz provider class
+     Constructor<?> constrctor =
+-            Class.forName(authProviderName).getDeclaredConstructor(String.class, PolicyEngine.class);
++            Class.forName(authProviderName).getDeclaredConstructor(String.class, PolicyEngine.class, Model.class);
+     constrctor.setAccessible(true);
+-    return (AuthorizationProvider) constrctor.newInstance(new Object[] {resourceName, policyEngine});
++    return (AuthorizationProvider) constrctor.newInstance(new Object[] {resourceName, policyEngine,
++            HivePrivilegeModel.getInstance()});
+   }
+ 
+-
+   /**
+    * Validate the privilege for the given operation for the given subject
+    * @param hiveOp
+diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
+index 5a89af2..ad19b37 100644
+--- a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
++++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
+@@ -92,7 +92,7 @@ public class HiveAuthzConf extends Configuration {
+       "org.apache.sentry.provider.common.HadoopGroupResourceAuthorizationProvider"),
+     AUTHZ_PROVIDER_RESOURCE("sentry.hive.provider.resource", ""),
+     AUTHZ_PROVIDER_BACKEND("sentry.hive.provider.backend", "org.apache.sentry.provider.file.SimpleFileProviderBackend"),
+-    AUTHZ_POLICY_ENGINE("sentry.hive.policy.engine", "org.apache.sentry.policy.db.SimpleDBPolicyEngine"),
++    AUTHZ_POLICY_ENGINE("sentry.hive.policy.engine", "org.apache.sentry.policy.engine.common.CommonPolicyEngine"),
+     AUTHZ_POLICY_FILE_FORMATTER(
+         "sentry.hive.policy.file.formatter",
+         "org.apache.sentry.binding.hive.SentryIniPolicyFileFormatter"),
+diff --git a/sentry-binding/sentry-binding-hive/pom.xml b/sentry-binding/sentry-binding-hive/pom.xml
+index b769488..07aaae3 100644
+--- a/sentry-binding/sentry-binding-hive/pom.xml
++++ b/sentry-binding/sentry-binding-hive/pom.xml
+@@ -70,6 +70,31 @@ limitations under the License.
+       <artifactId>sentry-binding-hive-common</artifactId>
+     </dependency>
+     <dependency>
++      <groupId>org.apache.sentry</groupId>
++      <artifactId>sentry-core-common</artifactId>
++    </dependency>
++    <dependency>
++      <groupId>org.apache.sentry</groupId>
++      <artifactId>sentry-core-model-db</artifactId>
++    </dependency>
++    <dependency>
++      <groupId>org.apache.sentry</groupId>
++      <artifactId>sentry-provider-common</artifactId>
++    </dependency>
++    <!-- required for SentryGrantRevokeTask -->
++    <dependency>
++      <groupId>org.apache.sentry</groupId>
++      <artifactId>sentry-provider-db</artifactId>
++    </dependency>
++    <dependency>
++      <groupId>org.apache.sentry</groupId>
++      <artifactId>sentry-provider-file</artifactId>
++    </dependency>
++      <dependency>
++        <groupId>org.apache.sentry</groupId>
++        <artifactId>sentry-provider-cache</artifactId>
++      </dependency>
++    <dependency>
+       <groupId>org.apache.hadoop</groupId>
+       <artifactId>hadoop-common</artifactId>
+       <scope>provided</scope>
+@@ -90,6 +115,11 @@ limitations under the License.
+       <groupId>org.apache.sentry</groupId>
+       <artifactId>sentry-provider-db</artifactId>
+     </dependency>
++    <dependency>
++      <groupId>org.apache.hadoop</groupId>
++      <artifactId>hadoop-minicluster</artifactId>
++      <scope>test</scope>
++    </dependency>
+   </dependencies>
+ 
+ </project>
+diff --git a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestSentryIniPolicyFileFormatter.java b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestSentryIniPolicyFileFormatter.java
+index 2bfc339..0e7ee3d 100644
+--- a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestSentryIniPolicyFileFormatter.java
++++ b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestSentryIniPolicyFileFormatter.java
+@@ -25,7 +25,7 @@ import java.util.Map;
+ import java.util.Set;
+ 
+ import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+-import org.apache.sentry.policy.common.PolicyConstants;
++import org.apache.sentry.core.common.utils.SentryConstants;
+ import org.apache.sentry.provider.common.PolicyFileConstants;
+ import org.junit.Test;
+ 
+@@ -208,8 +208,8 @@ public class TestSentryIniPolicyFileFormatter {
+       for (String actualPrivilege : actualPrivileges) {
+         boolean isFound = exceptedPrivileges.contains(actualPrivilege);
+         if (!isFound) {
+-          String withOptionPrivilege = PolicyConstants.AUTHORIZABLE_JOINER.join(actualPrivilege,
+-              PolicyConstants.KV_JOINER.join(PolicyFileConstants.PRIVILEGE_GRANT_OPTION_NAME,
++          String withOptionPrivilege = SentryConstants.AUTHORIZABLE_JOINER.join(actualPrivilege,
++              SentryConstants.KV_JOINER.join(PolicyFileConstants.PRIVILEGE_GRANT_OPTION_NAME,
+                   "false"));
+           isFound = exceptedPrivileges.contains(withOptionPrivilege);
+         }
+diff --git a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/AbstractTestSimplePolicyEngine.java b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/AbstractTestSimplePolicyEngine.java
+new file mode 100644
+index 0000000..df8443c
+--- /dev/null
++++ b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/AbstractTestSimplePolicyEngine.java
+@@ -0,0 +1,156 @@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *      http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++package org.apache.sentry.policy.hive;
++
++import java.io.File;
++import java.io.IOException;
++import java.util.Set;
++import java.util.TreeSet;
++
++import org.junit.Assert;
++
++import org.apache.commons.io.FileUtils;
++import org.apache.sentry.core.common.ActiveRoleSet;
++import org.apache.sentry.policy.common.PolicyEngine;
++import org.junit.After;
++import org.junit.AfterClass;
++import org.junit.Before;
++import org.junit.BeforeClass;
++import org.junit.Test;
++
++import com.google.common.collect.Sets;
++import com.google.common.io.Files;
++
++public abstract class AbstractTestSimplePolicyEngine {
++  private static final String PERM_SERVER1_CUSTOMERS_SELECT = "server=server1->db=customers->table=purchases->action=select";
++  private static final String PERM_SERVER1_CUSTOMERS_DB_CUSTOMERS_PARTIAL_SELECT = "server=server1->db=customers->table=purchases_partial->action=select";
++  private static final String PERM_SERVER1_ANALYST_ALL = "server=server1->db=analyst1";
++  private static final String PERM_SERVER1_JUNIOR_ANALYST_ALL = "server=server1->db=jranalyst1";
++  private static final String PERM_SERVER1_JUNIOR_ANALYST_READ = "server=server1->db=jranalyst1->table=*->action=select";
++  private static final String PERM_SERVER1_OTHER_GROUP_DB_CUSTOMERS_SELECT = "server=server1->db=other_group_db->table=purchases->action=select";
++
++  private static final String PERM_SERVER1_ADMIN = "server=server1";
++  private PolicyEngine policy;
++  private static File baseDir;
++
++  @BeforeClass
++  public static void setupClazz() throws IOException {
++    baseDir = Files.createTempDir();
++  }
++
++  @AfterClass
++  public static void teardownClazz() throws IOException {
++    if(baseDir != null) {
++      FileUtils.deleteQuietly(baseDir);
++    }
++  }
++
++  protected void setPolicy(PolicyEngine policy) {
++    this.policy = policy;
++  }
++  protected static File getBaseDir() {
++    return baseDir;
++  }
++  @Before
++  public void setup() throws IOException {
++    afterSetup();
++  }
++  @After
++  public void teardown() throws IOException {
++    beforeTeardown();
++  }
++  protected void afterSetup() throws IOException {
++
++  }
++
++  protected void beforeTeardown() throws IOException {
++
++  }
++
++  @Test
++  public void testManager() throws Exception {
++    Set<String> expected = Sets.newTreeSet(Sets.newHashSet(
++        PERM_SERVER1_CUSTOMERS_SELECT, PERM_SERVER1_ANALYST_ALL,
++        PERM_SERVER1_JUNIOR_ANALYST_ALL, PERM_SERVER1_JUNIOR_ANALYST_READ,
++        PERM_SERVER1_CUSTOMERS_DB_CUSTOMERS_PARTIAL_SELECT
++        ));
++    Assert.assertEquals(expected.toString(),
++        new TreeSet<String>(policy.getAllPrivileges(set("manager"), ActiveRoleSet.ALL))
++        .toString());
++  }
++
++  @Test
++  public void testAnalyst() throws Exception {
++    Set<String> expected = Sets.newTreeSet(Sets.newHashSet(
++        PERM_SERVER1_CUSTOMERS_SELECT, PERM_SERVER1_ANALYST_ALL,
++        PERM_SERVER1_JUNIOR_ANALYST_READ));
++    Assert.assertEquals(expected.toString(),
++        new TreeSet<String>(policy.getAllPrivileges(set("analyst"), ActiveRoleSet.ALL))
++        .toString());
++  }
++
++  @Test
++  public void testJuniorAnalyst() throws Exception {
++    Set<String> expected = Sets.newTreeSet(Sets
++        .newHashSet(PERM_SERVER1_JUNIOR_ANALYST_ALL,
++            PERM_SERVER1_CUSTOMERS_DB_CUSTOMERS_PARTIAL_SELECT));
++    Assert.assertEquals(expected.toString(),
++        new TreeSet<String>(policy.getAllPrivileges(set("jranalyst"), ActiveRoleSet.ALL))
++        .toString());
++  }
++
++  @Test
++  public void testAdmin() throws Exception {
++    Set<String> expected = Sets.newTreeSet(Sets.newHashSet(PERM_SERVER1_ADMIN));
++    Assert.assertEquals(expected.toString(),
++        new TreeSet<String>(policy.getAllPrivileges(set("admin"), ActiveRoleSet.ALL))
++        .toString());
++  }
++
++
++  @Test
++  public void testOtherGroup() throws Exception {
++    Set<String> expected = Sets.newTreeSet(Sets.newHashSet(
++        PERM_SERVER1_OTHER_GROUP_DB_CUSTOMERS_SELECT));
++    Assert.assertEquals(expected.toString(),
++        new TreeSet<String>(policy.getAllPrivileges(set("other_group"), ActiveRoleSet.ALL))
++        .toString());
++  }
++
++  @Test
++  public void testDbAll() throws Exception {
++    Set<String> expected = Sets.newTreeSet(Sets
++        .newHashSet(PERM_SERVER1_JUNIOR_ANALYST_ALL,
++            PERM_SERVER1_CUSTOMERS_DB_CUSTOMERS_PARTIAL_SELECT));
++    Assert.assertEquals(expected.toString(),
++        new TreeSet<String>(policy.getAllPrivileges(set("jranalyst"), ActiveRoleSet.ALL))
++        .toString());
++  }
++
++  @Test
++  public void testDbAllforOtherGroup() throws Exception {
++    Set<String> expected = Sets.newTreeSet(Sets.newHashSet(
++        PERM_SERVER1_OTHER_GROUP_DB_CUSTOMERS_SELECT));
++    Assert.assertEquals(expected.toString(),
++        new TreeSet<String>(policy.getAllPrivileges(set("other_group"), ActiveRoleSet.ALL))
++        .toString());
++  }
++
++  private static Set<String> set(String... values) {
++    return Sets.newHashSet(values);
++  }
++}
+diff --git a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/DBPolicyTestUtil.java b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/DBPolicyTestUtil.java
+new file mode 100644
+index 0000000..854acbe
+--- /dev/null
++++ b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/DBPolicyTestUtil.java
+@@ -0,0 +1,45 @@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *      http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++package org.apache.sentry.policy.hive;
++
++import org.apache.hadoop.conf.Configuration;
++import org.apache.sentry.core.model.db.HivePrivilegeModel;
++import org.apache.sentry.policy.common.PolicyEngine;
++import org.apache.sentry.policy.engine.common.CommonPolicyEngine;
++import org.apache.sentry.provider.common.ProviderBackend;
++import org.apache.sentry.provider.common.ProviderBackendContext;
++import org.apache.sentry.provider.file.SimpleFileProviderBackend;
++
++import java.io.IOException;
++
++public class DBPolicyTestUtil {
++
++  public static PolicyEngine createPolicyEngineForTest(String server, String resource) throws IOException {
++
++    ProviderBackend providerBackend = new SimpleFileProviderBackend(new Configuration(), resource);
++
++    // create backendContext
++    ProviderBackendContext context = new ProviderBackendContext();
++    context.setAllowPerDatabase(true);
++    context.setValidators(HivePrivilegeModel.getInstance().getPrivilegeValidators(server));
++    // initialize the backend with the context
++    providerBackend.initialize(context);
++
++
++    return new CommonPolicyEngine(providerBackend);
++  }
++}
+diff --git a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestDBModelAuthorizables.java b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestDBModelAuthorizables.java
+new file mode 100644
+index 0000000..fba2e1c
+--- /dev/null
++++ b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestDBModelAuthorizables.java
+@@ -0,0 +1,77 @@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ *
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied.  See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++
++package org.apache.sentry.policy.hive;
++import static junit.framework.Assert.assertEquals;
++import static junit.framework.Assert.assertNull;
++
++import org.apache.sentry.core.model.db.AccessURI;
++import org.apache.sentry.core.model.db.DBModelAuthorizables;
++import org.apache.sentry.core.model.db.Database;
++import org.apache.sentry.core.model.db.Server;
++import org.apache.sentry.core.model.db.Table;
++import org.apache.sentry.core.model.db.View;
++import org.junit.Test;
++
++public class TestDBModelAuthorizables {
++
++  @Test
++  public void testServer() throws Exception {
++    Server server = (Server) DBModelAuthorizables.from("SeRvEr=server1");
++    assertEquals("server1", server.getName());
++  }
++  @Test
++  public void testDb() throws Exception {
++    Database db = (Database)DBModelAuthorizables.from("dB=db1");
++    assertEquals("db1", db.getName());
++  }
++  @Test
++  public void testTable() throws Exception {
++    Table table = (Table)DBModelAuthorizables.from("tAbLe=t1");
++    assertEquals("t1", table.getName());
++  }
++  @Test
++  public void testView() throws Exception {
++    View view = (View)DBModelAuthorizables.from("vIeW=v1");
++    assertEquals("v1", view.getName());
++  }
++  @Test
++  public void testURI() throws Exception {
++    AccessURI uri = (AccessURI)DBModelAuthorizables.from("UrI=hdfs://uri1:8200/blah");
++    assertEquals("hdfs://uri1:8200/blah", uri.getName());
++  }
++
++  @Test(expected=IllegalArgumentException.class)
++  public void testNoKV() throws Exception {
++    System.out.println(DBModelAuthorizables.from("nonsense"));
++  }
++
++  @Test(expected=IllegalArgumentException.class)
++  public void testEmptyKey() throws Exception {
++    System.out.println(DBModelAuthorizables.from("=v"));
++  }
++  @Test(expected=IllegalArgumentException.class)
++  public void testEmptyValue() throws Exception {
++    System.out.println(DBModelAuthorizables.from("k="));
++  }
++  @Test
++  public void testNotAuthorizable() throws Exception {
++    assertNull(DBModelAuthorizables.from("k=v"));
++  }
++}
+diff --git a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestDatabaseRequiredInRole.java b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestDatabaseRequiredInRole.java
+new file mode 100644
+index 0000000..24f3ae9
+--- /dev/null
++++ b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestDatabaseRequiredInRole.java
+@@ -0,0 +1,50 @@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ *
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied.  See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++package org.apache.sentry.policy.hive;
++
++import org.junit.Assert;
++
++import org.apache.sentry.core.common.validator.PrivilegeValidatorContext;
++import org.apache.sentry.core.model.db.validator.DatabaseRequiredInPrivilege;
++import org.apache.shiro.config.ConfigurationException;
++import org.junit.Test;
++
++public class TestDatabaseRequiredInRole {
++
++  @Test
++  public void testURIInPerDbPolicyFile() throws Exception {
++    DatabaseRequiredInPrivilege dbRequiredInRole = new DatabaseRequiredInPrivilege();
++    System.setProperty("sentry.allow.uri.db.policyfile", "true");
++    dbRequiredInRole.validate(new PrivilegeValidatorContext("db1",
++      "server=server1->URI=file:///user/db/warehouse/tab1"));
++    System.setProperty("sentry.allow.uri.db.policyfile", "false");
++  }
++
++  @Test
++  public void testURIWithDBInPerDbPolicyFile() throws Exception {
++    DatabaseRequiredInPrivilege dbRequiredInRole = new DatabaseRequiredInPrivilege();
++    try {
++      dbRequiredInRole.validate(new PrivilegeValidatorContext("db1",
++        "server=server1->db=db1->URI=file:///user/db/warehouse/tab1"));
++      Assert.fail("Expected ConfigurationException");
++    } catch (ConfigurationException e) {
++      // expected
++    }
++  }
++}
+diff --git a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestPolicyParsingNegative.java b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestPolicyParsingNegative.java
+new file mode 100644
+index 0000000..4dc8812
+--- /dev/null
++++ b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestPolicyParsingNegative.java
+@@ -0,0 +1,194 @@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *      http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++package org.apache.sentry.policy.hive;
++
++import java.io.File;
++import java.io.IOException;
++
++import org.junit.Assert;
++
++import org.apache.commons.io.FileUtils;
++import org.apache.sentry.core.common.ActiveRoleSet;
++import org.apache.sentry.policy.common.PolicyEngine;
++import org.apache.sentry.provider.file.PolicyFile;
++import org.junit.After;
++import org.junit.Before;
++import org.junit.Test;
++import org.slf4j.Logger;
++import org.slf4j.LoggerFactory;
++
++import com.google.common.base.Charsets;
++import com.google.common.collect.ImmutableSet;
++import com.google.common.collect.Sets;
++import com.google.common.io.Files;
++
++public class TestPolicyParsingNegative {
++
++  @SuppressWarnings("unused")
++  private static final Logger LOGGER = LoggerFactory
++      .getLogger(TestPolicyParsingNegative.class);
++
++  private File baseDir;
++  private File globalPolicyFile;
++  private File otherPolicyFile;
++
++  @Before
++  public void setup() {
++    baseDir = Files.createTempDir();
++    globalPolicyFile = new File(baseDir, "global.ini");
++    otherPolicyFile = new File(baseDir, "other.ini");
++  }
++
++  @After
++  public void teardown() {
++    if(baseDir != null) {
++      FileUtils.deleteQuietly(baseDir);
++    }
++  }
++
++  private void append(String from, File to) throws IOException {
++    Files.append(from + "\n", to, Charsets.UTF_8);
++  }
++
++  @Test
++  public void testUnauthorizedDbSpecifiedInDBPolicyFile() throws Exception {
++    append("[databases]", globalPolicyFile);
++    append("other_group_db = " + otherPolicyFile.getPath(), globalPolicyFile);
++    append("[groups]", otherPolicyFile);
++    append("other_group = malicious_role", otherPolicyFile);
++    append("[roles]", otherPolicyFile);
++    append("malicious_role = server=server1->db=customers->table=purchases->action=select", otherPolicyFile);
++    PolicyEngine policy = DBPolicyTestUtil.createPolicyEngineForTest("server1", globalPolicyFile.getPath());
++    ImmutableSet<String> permissions = policy.getAllPrivileges(Sets.newHashSet("other_group"), ActiveRoleSet.ALL);
++    Assert.assertTrue(permissions.toString(), permissions.isEmpty());
++  }
++  @Test
++  public void testPerDbFileCannotContainUsersOrDatabases() throws Exception {
++    PolicyEngine policy;
++    ImmutableSet<String> permissions;
++    PolicyFile policyFile;
++    // test sanity
++    policyFile = PolicyFile.setAdminOnServer1("admin");
++    policyFile.addGroupsToUser("admin1", "admin");
++    policyFile.write(globalPolicyFile);
++    policyFile.write(otherPolicyFile);
++    policy = DBPolicyTestUtil.createPolicyEngineForTest("server1", globalPolicyFile.getPath());
++    permissions = policy.getAllPrivileges(Sets.newHashSet("admin"), ActiveRoleSet.ALL);
++    Assert.assertEquals(permissions.toString(), "[server=server1]");
++    // test to ensure [users] fails parsing of per-db file
++    policyFile.addDatabase("other", otherPolicyFile.getPath());
++    policyFile.write(globalPolicyFile);
++    policyFile.write(otherPolicyFile);
++    policy = DBPolicyTestUtil.createPolicyEngineForTest("server1", globalPolicyFile.getPath());
++    permissions = policy.getAllPrivileges(Sets.newHashSet("admin"), ActiveRoleSet.ALL);
++    Assert.assertEquals(permissions.toString(), "[server=server1]");
++    // test to ensure [databases] fails parsing of per-db file
++    // by removing the user mapping from the per-db policy file
++    policyFile.removeGroupsFromUser("admin1", "admin")
++      .write(otherPolicyFile);
++    policy = DBPolicyTestUtil.createPolicyEngineForTest("server1", globalPolicyFile.getPath());
++    permissions = policy.getAllPrivileges(Sets.newHashSet("admin"), ActiveRoleSet.ALL);
++    Assert.assertEquals(permissions.toString(), "[server=server1]");
++  }
++
++  @Test
++  public void testDatabaseRequiredInRole() throws Exception {
++    append("[databases]", globalPolicyFile);
++    append("other_group_db = " + otherPolicyFile.getPath(), globalPolicyFile);
++    append("[groups]", otherPolicyFile);
++    append("other_group = malicious_role", otherPolicyFile);
++    append("[roles]", otherPolicyFile);
++    append("malicious_role = server=server1", otherPolicyFile);
++    PolicyEngine policy = DBPolicyTestUtil.createPolicyEngineForTest("server1", globalPolicyFile.getPath());
++    ImmutableSet<String> permissions = policy.getAllPrivileges(Sets.newHashSet("other_group"), ActiveRoleSet.ALL);
++    Assert.assertTrue(permissions.toString(), permissions.isEmpty());
++  }
++
++  @Test
++  public void testServerAll() throws Exception {
++    append("[groups]", globalPolicyFile);
++    append("group = malicious_role", globalPolicyFile);
++    append("[roles]", globalPolicyFile);
++    append("malicious_role = server=*", globalPolicyFile);
++    PolicyEngine policy = DBPolicyTestUtil.createPolicyEngineForTest("server1", globalPolicyFile.getPath());
++    ImmutableSet<String> permissions = policy.getAllPrivileges(Sets.newHashSet("group"), ActiveRoleSet.ALL);
++    Assert.assertTrue(permissions.toString(), permissions.isEmpty());
++  }
++
++  @Test
++  public void testServerIncorrect() throws Exception {
++    append("[groups]", globalPolicyFile);
++    append("group = malicious_role", globalPolicyFile);
++    append("[roles]", globalPolicyFile);
++    append("malicious_role = server=server2", globalPolicyFile);
++    PolicyEngine policy = DBPolicyTestUtil.createPolicyEngineForTest("server1", globalPolicyFile.getPath());
++    ImmutableSet<String> permissions = policy.getAllPrivileges(Sets.newHashSet("group"), ActiveRoleSet.ALL);
++    Assert.assertTrue(permissions.toString(), permissions.isEmpty());
++  }
++
++  @Test
++  public void testAll() throws Exception {
++    append("[groups]", globalPolicyFile);
++    append("group = malicious_role", globalPolicyFile);
++    append("[roles]", globalPolicyFile);
++    append("malicious_role = *", globalPolicyFile);
++    PolicyEngine policy = DBPolicyTestUtil.createPolicyEngineForTest("server1", globalPolicyFile.getPath());
++    ImmutableSet<String> permissions = policy.getAllPrivileges(Sets.newHashSet("group"), ActiveRoleSet.ALL);
++    Assert.assertTrue(permissions.toString(), permissions.isEmpty());
++  }
++
++  /**
++   * Create policy file with multiple per db files.
++   * Verify that a file with bad format is the only one that's ignored
++   * @throws Exception
++   */
++  @Test
++  public void testMultiDbWithErrors() throws Exception {
++    File db1PolicyFile = new File(baseDir, "db1.ini");
++    File db2PolicyFile = new File(baseDir, "db2.ini");
++
++    // global policy file
++    append("[databases]", globalPolicyFile);
++    append("db1 = " + db1PolicyFile.getPath(), globalPolicyFile);
++    append("db2 = " + db2PolicyFile.getPath(), globalPolicyFile);
++    append("[groups]", globalPolicyFile);
++    append("db3_group = db3_rule", globalPolicyFile);
++    append("[roles]", globalPolicyFile);
++    append("db3_rule = server=server1->db=db3->table=sales->action=select", globalPolicyFile);
++
++    //db1 policy file with badly formatted rule
++    append("[groups]", db1PolicyFile);
++    append("db1_group = bad_rule", db1PolicyFile);
++    append("[roles]", db1PolicyFile);
++    append("bad_rule = server=server1->db=customers->=purchases->action=", db1PolicyFile);
++
++    //db2 policy file with proper rule
++    append("[groups]", db2PolicyFile);
++    append("db2_group = db2_rule", db2PolicyFile);
++    append("[roles]", db2PolicyFile);
++    append("db2_rule = server=server1->db=db2->table=purchases->action=select", db2PolicyFile);
++
++    PolicyEngine policy = DBPolicyTestUtil.createPolicyEngineForTest("server1", globalPolicyFile.getPath());
++
++    // verify that the db1 rule is empty
++    ImmutableSet<String> permissions = policy.getAllPrivileges(Sets.newHashSet("db1_group"), ActiveRoleSet.ALL);
++    Assert.assertTrue(permissions.toString(), permissions.isEmpty());
++
++    permissions = policy.getAllPrivileges(Sets.newHashSet("db2_group"), ActiveRoleSet.ALL);
++    Assert.assertEquals(permissions.toString(), 1, permissions.size());
++  }
++}
+diff --git a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestResourceAuthorizationProviderGeneralCases.java b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestResourceAuthorizationProviderGeneralCases.java
+new file mode 100644
+index 0000000..403eb6a
+--- /dev/null
++++ b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestResourceAuthorizationProviderGeneralCases.java
+@@ -0,0 +1,195 @@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *      http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++package org.apache.sentry.policy.hive;
++
++import java.io.File;
++import java.io.IOException;
++import java.util.Arrays;
++import java.util.EnumSet;
++import java.util.List;
++import java.util.Set;
++
++import com.google.common.collect.Sets;
++import junit.framework.Assert;
++
++import org.apache.commons.io.FileUtils;
++import org.apache.sentry.core.common.Action;
++import org.apache.sentry.core.common.ActiveRoleSet;
++import org.apache.sentry.core.common.Authorizable;
++import org.apache.sentry.core.common.Subject;
++import org.apache.sentry.core.model.db.AccessConstants;
++import org.apache.sentry.core.model.db.DBModelAction;
++import org.apache.sentry.core.model.db.Database;
++import org.apache.sentry.core.model.db.HivePrivilegeModel;
++import org.apache.sentry.core.model.db.Server;
++import org.apache.sentry.core.model.db.Table;
++import org.apache.sentry.provider.common.GroupMappingService;
++import org.apache.sentry.provider.common.ResourceAuthorizationProvider;
++import org.apache.sentry.provider.file.HadoopGroupResourceAuthorizationProvider;
++import org.apache.sentry.provider.file.PolicyFiles;
++import org.junit.After;
++import org.junit.Test;
++import org.slf4j.Logger;
++import org.slf4j.LoggerFactory;
++
++import com.google.common.base.Objects;
++import com.google.common.collect.HashMultimap;
++import com.google.common.collect.Multimap;
++import com.google.common.io.Files;
++
++
++public class TestResourceAuthorizationProviderGeneralCases {
++
++  private static final Logger LOGGER = LoggerFactory
++      .getLogger(TestResourceAuthorizationProviderGeneralCases.class);
++
++  private static final Multimap<String, String> USER_TO_GROUP_MAP = HashMultimap
++      .create();
++
++  private static final Subject SUB_ADMIN = new Subject("admin1");
++  private static final Subject SUB_MANAGER = new Subject("manager1");
++  private static final Subject SUB_ANALYST = new Subject("analyst1");
++  private static final Subject SUB_JUNIOR_ANALYST = new Subject("jranalyst1");
++
++  private static final Server SVR_SERVER1 = new Server("server1");
++  private static final Server SVR_ALL = new Server(AccessConstants.ALL);
++
++  private static final Database DB_CUSTOMERS = new Database("customers");
++  private static final Database DB_ANALYST = new Database("analyst1");
++  private static final Database DB_JR_ANALYST = new Database("jranalyst1");
++
++  private static final Table TBL_PURCHASES = new Table("purchases");
++
++  private static final Set<? extends Action> ALL = EnumSet.of(DBModelAction.ALL);
++  private static final Set<? extends Action> SELECT = EnumSet.of(DBModelAction.SELECT);
++  private static final Set<? extends Action> INSERT = EnumSet.of(DBModelAction.INSERT);
++
++  static {
++    USER_TO_GROUP_MAP.putAll(SUB_ADMIN.getName(), Arrays.asList("admin"));
++    USER_TO_GROUP_MAP.putAll(SUB_MANAGER.getName(), Arrays.asList("manager"));
++    USER_TO_GROUP_MAP.putAll(SUB_ANALYST.getName(), Arrays.asList("analyst"));
++    USER_TO_GROUP_MAP.putAll(SUB_JUNIOR_ANALYST.getName(),
++        Arrays.asList("jranalyst"));
++  }
++
++  private final ResourceAuthorizationProvider authzProvider;
++  private File baseDir;
++
++  public TestResourceAuthorizationProviderGeneralCases() throws IOException {
++    baseDir = Files.createTempDir();
++    PolicyFiles.copyToDir(baseDir, "hive-policy-test-authz-provider.ini", "hive-policy-test-authz-provider-other-group.ini");
++    authzProvider = new HadoopGroupResourceAuthorizationProvider(
++            DBPolicyTestUtil.createPolicyEngineForTest("server1",
++        new File(baseDir, "hive-policy-test-authz-provider.ini").getPath()),
++        new MockGroupMappingServiceProvider(USER_TO_GROUP_MAP), HivePrivilegeModel.getInstance());
++
++  }
++
++  @After
++  public void teardown() {
++    if(baseDir != null) {
++      FileUtils.deleteQuietly(baseDir);
++    }
++  }
++
++  private void doTestAuthorizables(
++      Subject subject, Set<? extends Action> privileges, boolean expected,
++      Authorizable... authorizables) throws Exception {
++    List<Authorizable> authzHierarchy = Arrays.asList(authorizables);
++    Objects.ToStringHelper helper = Objects.toStringHelper("TestParameters");
++      helper.add("authorizables", authzHierarchy).add("Privileges", privileges);
++    LOGGER.info("Running with " + helper.toString());
++    Assert.assertEquals(helper.toString(), expected,
++        authzProvider.hasAccess(subject, authzHierarchy, privileges, ActiveRoleSet.ALL));
++    LOGGER.info("Passed " + helper.toString());
++  }
++
++  private void doTestResourceAuthorizationProvider(Subject subject,
++      Server server, Database database, Table table,
++      Set<? extends Action> privileges, boolean expected) throws Exception {
++    List<Authorizable> authzHierarchy = Arrays.asList(new Authorizable[] {
++        server, database, table
++    });
++    Objects.ToStringHelper helper = Objects.toStringHelper("TestParameters");
++    helper.add("Subject", subject).add("Server", server).add("DB", database)
++    .add("Table", table).add("Privileges", privileges).add("authzHierarchy", authzHierarchy);
++    LOGGER.info("Running with " + helper.toString());
++    Assert.assertEquals(helper.toString(), expected,
++        authzProvider.hasAccess(subject, authzHierarchy, privileges, ActiveRoleSet.ALL));
++    LOGGER.info("Passed " + helper.toString());
++  }
++
++  @Test
++  public void testAdmin() throws Exception {
++    doTestResourceAuthorizationProvider(SUB_ADMIN, SVR_SERVER1, DB_CUSTOMERS, TBL_PURCHASES, ALL, true);
++    doTestResourceAuthorizationProvider(SUB_ADMIN, SVR_SERVER1, DB_CUSTOMERS, TBL_PURCHASES, SELECT, true);
++    doTestResourceAuthorizationProvider(SUB_ADMIN, SVR_SERVER1, DB_CUSTOMERS, TBL_PURCHASES, INSERT, true);
++    doTestAuthorizables(SUB_ADMIN, SELECT, true, SVR_ALL, DB_CUSTOMERS, TBL_PURCHASES);
++
++  }
++  @Test
++  public void testManager() throws Exception {
++    doTestResourceAuthorizationProvider(SUB_MANAGER, SVR_SERVER1, DB_CUSTOMERS, TBL_PURCHASES, ALL, false);
++    doTestResourceAuthorizationProvider(SUB_MANAGER, SVR_SERVER1, DB_CUSTOMERS, TBL_PURCHASES, SELECT, true);
++    doTestResourceAuthorizationProvider(SUB_MANAGER, SVR_SERVER1, DB_CUSTOMERS, TBL_PURCHASES, INSERT, false);
++    doTestResourceAuthorizationProvider(SUB_MANAGER, SVR_ALL, DB_CUSTOMERS, TBL_PURCHASES, SELECT, true);
++  }
++  @Test
++  public void testAnalyst() throws Exception {
++    doTestResourceAuthorizationProvider(SUB_ANALYST, SVR_SERVER1, DB_CUSTOMERS, TBL_PURCHASES, ALL, false);
++    doTestResourceAuthorizationProvider(SUB_ANALYST, SVR_SERVER1, DB_CUSTOMERS, TBL_PURCHASES, SELECT, true);
++    doTestResourceAuthorizationProvider(SUB_ANALYST, SVR_SERVER1, DB_CUSTOMERS, TBL_PURCHASES, INSERT, false);
++    doTestResourceAuthorizationProvider(SUB_ANALYST, SVR_ALL, DB_CUSTOMERS, TBL_PURCHASES, SELECT, true);
++
++    // analyst sandbox
++    doTestResourceAuthorizationProvider(SUB_ANALYST, SVR_SERVER1, DB_ANALYST, TBL_PURCHASES, ALL, true);
++    doTestResourceAuthorizationProvider(SUB_ANALYST, SVR_SERVER1, DB_ANALYST, TBL_PURCHASES, SELECT, true);
++    doTestResourceAuthorizationProvider(SUB_ANALYST, SVR_SERVER1, DB_ANALYST, TBL_PURCHASES, INSERT, true);
++    doTestResourceAuthorizationProvider(SUB_ANALYST, SVR_ALL, DB_ANALYST, TBL_PURCHASES, SELECT, true);
++
++    // jr analyst sandbox
++    doTestResourceAuthorizationProvider(SUB_ANALYST, SVR_SERVER1, DB_JR_ANALYST, TBL_PURCHASES, ALL, false);
++    doTestResourceAuthorizationProvider(SUB_ANALYST, SVR_SERVER1, DB_JR_ANALYST, TBL_PURCHASES, SELECT, true);
++    doTestResourceAuthorizationProvider(SUB_ANALYST, SVR_SERVER1, DB_JR_ANALYST, TBL_PURCHASES, INSERT, false);
++    doTestResourceAuthorizationProvider(SUB_ANALYST, SVR_ALL, DB_JR_ANALYST, TBL_PURCHASES, SELECT, true);
++  }
++  @Test
++  public void testJuniorAnalyst() throws Exception {
++    doTestResourceAuthorizationProvider(SUB_JUNIOR_ANALYST, SVR_SERVER1, DB_CUSTOMERS, TBL_PURCHASES, ALL, false);
++    doTestResourceAuthorizationProvider(SUB_JUNIOR_ANALYST, SVR_SERVER1, DB_CUSTOMERS, TBL_PURCHASES, SELECT, false);
++    doTestResourceAuthorizationProvider(SUB_JUNIOR_ANALYST, SVR_SERVER1, DB_CUSTOMERS, TBL_PURCHASES, INSERT, false);
++    doTestResourceAuthorizationProvider(SUB_JUNIOR_ANALYST, SVR_ALL, DB_CUSTOMERS, TBL_PURCHASES, SELECT, false);
++    // jr analyst sandbox
++    doTestResourceAuthorizationProvider(SUB_JUNIOR_ANALYST, SVR_SERVER1, DB_JR_ANALYST, TBL_PURCHASES, ALL, true);
++    doTestResourceAuthorizationProvider(SUB_JUNIOR_ANALYST, SVR_SERVER1, DB_JR_ANALYST, TBL_PURCHASES, SELECT, true);
++    doTestResourceAuthorizationProvider(SUB_JUNIOR_ANALYST, SVR_SERVER1, DB_JR_ANALYST, TBL_PURCHASES, INSERT, true);
++    doTestResourceAuthorizationProvider(SUB_JUNIOR_ANALYST, SVR_ALL, DB_JR_ANALYST, TBL_PURCHASES, SELECT, true);
++  }
++
++  public class MockGroupMappingServiceProvider implements GroupMappingService {
++    private final Multimap<String, String> userToGroupMap;
++
++    public MockGroupMappingServiceProvider(Multimap<String, String> userToGroupMap) {
++      this.userToGroupMap = userToGroupMap;
++    }
++
++    @Override
++    public Set<String> getGroups(String user) {
++      return Sets.newHashSet(userToGroupMap.get(user));
++    }
++  }
++}
+diff --git a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestResourceAuthorizationProviderSpecialCases.java b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestResourceAuthorizationProviderSpecialCases.java
+new file mode 100644
+index 0000000..6fe9e6b
+--- /dev/null
++++ b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestResourceAuthorizationProviderSpecialCases.java
+@@ -0,0 +1,124 @@
++ /*
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *      http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++package org.apache.sentry.policy.hive;
++
++import java.io.File;
++import java.io.IOException;
++import java.util.EnumSet;
++import java.util.List;
++import java.util.Set;
++
++import org.junit.Assert;
++
++import org.apache.commons.io.FileUtils;
++import org.apache.sentry.core.common.Action;
++import org.apache.sentry.core.common.ActiveRoleSet;
++import org.apache.sentry.core.common.Authorizable;
++import org.apache.sentry.core.common.Subject;
++import org.apache.sentry.core.model.db.AccessURI;
++import org.apache.sentry.core.model.db.DBModelAction;
++import org.apache.sentry.core.model.db.HivePrivilegeModel;
++import org.apache.sentry.core.model.db.Server;
++import org.apache.sentry.policy.common.PolicyEngine;
++import org.apache.sentry.provider.common.AuthorizationProvider;
++import org.apache.sentry.provider.file.LocalGroupResourceAuthorizationProvider;
++import org.apache.sentry.provider.file.PolicyFile;
++import org.junit.After;
++import org.junit.Before;
++import org.junit.Test;
++
++import com.google.common.collect.ImmutableList;
++import com.google.common.io.Files;
++
++public class TestResourceAuthorizationProviderSpecialCases {
++  private AuthorizationProvider authzProvider;
++  private PolicyFile policyFile;
++  private File baseDir;
++  private File iniFile;
++  private String initResource;
++  @Before
++  public void setup() throws IOException {
++    baseDir = Files.createTempDir();
++    iniFile = new File(baseDir, "policy.ini");
++    initResource = "file://" + iniFile.getPath();
++    policyFile = new PolicyFile();
++  }
++
++  @After
++  public void teardown() throws IOException {
++    if(baseDir != null) {
++      FileUtils.deleteQuietly(baseDir);
++    }
++  }
++
++  @Test
++  public void testDuplicateEntries() throws Exception {
++    Subject user1 = new Subject("user1");
++    Server server1 = new Server("server1");
++    AccessURI uri = new AccessURI("file:///path/to/");
++    Set<? extends Action> actions = EnumSet.of(DBModelAction.ALL, DBModelAction.SELECT, DBModelAction.INSERT);
++    policyFile.addGroupsToUser(user1.getName(), true, "group1", "group1")
++      .addRolesToGroup("group1",  true, "role1", "role1")
++      .addPermissionsToRole("role1", true, "server=" + server1.getName() + "->uri=" + uri.getName(),
++          "server=" + server1.getName() + "->uri=" + uri.getName());
++    policyFile.write(iniFile);
++    PolicyEngine policy = DBPolicyTestUtil.createPolicyEngineForTest(server1.getName(), initResource);
++    authzProvider = new LocalGroupResourceAuthorizationProvider(initResource, policy, HivePrivilegeModel.getInstance());
++    List<? extends Authorizable> authorizableHierarchy = ImmutableList.of(server1, uri);
++    Assert.assertTrue(authorizableHierarchy.toString(),
++        authzProvider.hasAccess(user1, authorizableHierarchy, actions, ActiveRoleSet.ALL));
++  }
++  @Test
++  public void testNonAbolutePath() throws Exception {
++    Subject user1 = new Subject("user1");
++    Server server1 = new Server("server1");
++    AccessURI uri = new AccessURI("file:///path/to/");
++    Set<? extends Action> actions = EnumSet.of(DBModelAction.ALL, DBModelAction.SELECT, DBModelAction.INSERT);
++    policyFile.addGroupsToUser(user1.getName(), "group1")
++      .addRolesToGroup("group1", "role1")
++      .addPermissionsToRole("role1", "server=" + server1.getName() + "->uri=" + uri.getName());
++    policyFile.write(iniFile);
++    PolicyEngine policy = DBPolicyTestUtil.createPolicyEngineForTest(server1.getName(), initResource);
++    authzProvider = new LocalGroupResourceAuthorizationProvider(initResource, policy, HivePrivilegeModel.getInstance());
++    // positive test
++    List<? extends Authorizable> authorizableHierarchy = ImmutableList.of(server1, uri);
++    Assert.assertTrue(authorizableHierarchy.toString(),
++        authzProvider.hasAccess(user1, authorizableHierarchy, actions, ActiveRoleSet.ALL));
++    // negative tests
++    // TODO we should support the case of /path/to/./ but let's to that later
++    uri = new AccessURI("file:///path/to/./");
++    authorizableHierarchy = ImmutableList.of(server1, uri);
++    Assert.assertFalse(authorizableHierarchy.toString(),
++        authzProvider.hasAccess(user1, authorizableHierarchy, actions, ActiveRoleSet.ALL));
++    uri = new AccessURI("file:///path/to/../");
++    authorizableHierarchy = ImmutableList.of(server1, uri);
++    Assert.assertFalse(authorizableHierarchy.toString(),
++        authzProvider.hasAccess(user1, authorizableHierarchy, actions, ActiveRoleSet.ALL));
++    uri = new AccessURI("file:///path/to/../../");
++    authorizableHierarchy = ImmutableList.of(server1, uri);
++    Assert.assertFalse(authorizableHierarchy.toString(),
++        authzProvider.hasAccess(user1, authorizableHierarchy, actions, ActiveRoleSet.ALL));
++    uri = new AccessURI("file:///path/to/dir/../../");
++    authorizableHierarchy = ImmutableList.of(server1, uri);
++    Assert.assertFalse(authorizableHierarchy.toString(),
++        authzProvider.hasAccess(user1, authorizableHierarchy, actions, ActiveRoleSet.ALL));
++  }
++  @Test(expected=IllegalArgumentException.class)
++  public void testInvalidPath() throws Exception {
++    new AccessURI(":invaliduri");
++  }
++}
+diff --git a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestSimpleDBPolicyEngineDFS.java b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestSimpleDBPolicyEngineDFS.java
+new file mode 100644
+index 0000000..97cf615
+--- /dev/null
++++ b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestSimpleDBPolicyEngineDFS.java
+@@ -0,0 +1,115 @@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *      http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++package org.apache.sentry.policy.hive;
++
++import java.io.File;
++import java.io.IOException;
++import java.util.Set;
++
++import org.junit.Assert;
++
++import org.apache.hadoop.conf.Configuration;
++import org.apache.hadoop.fs.FileSystem;
++import org.apache.hadoop.fs.Path;
++import org.apache.hadoop.hdfs.MiniDFSCluster;
++import org.apache.sentry.core.common.ActiveRoleSet;
++import org.apache.sentry.policy.common.PolicyEngine;
++import org.apache.sentry.provider.file.PolicyFile;
++import org.apache.sentry.provider.file.PolicyFiles;
++import org.junit.AfterClass;
++import org.junit.BeforeClass;
++import org.junit.Test;
++
++import com.google.common.collect.ImmutableSet;
++import com.google.common.collect.Sets;
++import com.google.common.io.Files;
++
++public class TestSimpleDBPolicyEngineDFS extends AbstractTestSimplePolicyEngine {
++
++  private static MiniDFSCluster dfsCluster;
++  private static FileSystem fileSystem;
++  private static Path root;
++  private static Path etc;
++
++  @BeforeClass
++  public static void setupLocalClazz() throws IOException {
++    File baseDir = getBaseDir();
++    Assert.assertNotNull(baseDir);
++    File dfsDir = new File(baseDir, "dfs");
++    Assert.assertTrue(dfsDir.isDirectory() || dfsDir.mkdirs());
++    Configuration conf = new Configuration();
++    conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, dfsDir.getPath());
++    dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
++    fileSystem = dfsCluster.getFileSystem();
++    root = new Path(fileSystem.getUri().toString());
++    etc = new Path(root, "/etc");
++    fileSystem.mkdirs(etc);
++  }
++  @AfterClass
++  public static void teardownLocalClazz() {
++    if(dfsCluster != null) {
++      dfsCluster.shutdown();
++    }
++  }
++
++  @Override
++  protected void  afterSetup() throws IOException {
++    fileSystem.delete(etc, true);
++    fileSystem.mkdirs(etc);
++    PolicyFiles.copyToDir(fileSystem, etc, "hive-policy-test-authz-provider.ini", "hive-policy-test-authz-provider-other-group.ini");
++    setPolicy(DBPolicyTestUtil.createPolicyEngineForTest("server1",
++        new Path(etc, "hive-policy-test-authz-provider.ini").toString()));
++  }
++  @Override
++  protected void beforeTeardown() throws IOException {
++    fileSystem.delete(etc, true);
++  }
++
++  @Test
++  public void testMultiFSPolicy() throws Exception {
++    File globalPolicyFile = new File(Files.createTempDir(), "global-policy.ini");
++    File dbPolicyFile = new File(Files.createTempDir(), "db11-policy.ini");
++
++    // Create global policy file
++    PolicyFile dbPolicy = new PolicyFile()
++      .addPermissionsToRole("db11_role", "server=server1->db=db11")
++      .addRolesToGroup("group1", "db11_role");
++
++    dbPolicy.write(dbPolicyFile);
++    Path dbPolicyPath = new Path(etc, "db11-policy.ini");
++
++    // create per-db policy file
++    PolicyFile globalPolicy = new PolicyFile()
++      .addPermissionsToRole("admin_role", "server=server1")
++      .addRolesToGroup("admin_group", "admin_role")
++      .addGroupsToUser("db", "admin_group");
++    globalPolicy.addDatabase("db11", dbPolicyPath.toUri().toString());
++    globalPolicy.write(globalPolicyFile);
++
++
++    PolicyFiles.copyFilesToDir(fileSystem, etc, globalPolicyFile);
++    PolicyFiles.copyFilesToDir(fileSystem, etc, dbPolicyFile);
++    PolicyEngine multiFSEngine =
++            DBPolicyTestUtil.createPolicyEngineForTest("server1", globalPolicyFile.getPath());
++
++    Set<String> dbGroups = Sets.newHashSet();
++    dbGroups.add("group1");
++    ImmutableSet<String> dbPerms =
++        multiFSEngine.getAllPrivileges(dbGroups, ActiveRoleSet.ALL);
++    Assert.assertEquals("No DB permissions found", 1, dbPerms.size());
++  }
++}
+diff --git a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestSimpleDBPolicyEngineLocalFS.java b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestSimpleDBPolicyEngineLocalFS.java
+new file mode 100644
+index 0000000..c986d7e
+--- /dev/null
++++ b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/policy/hive/TestSimpleDBPolicyEngineLocalFS.java
+@@ -0,0 +1,44 @@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *      http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++package org.apache.sentry.policy.hive;
++
++import java.io.File;
++import java.io.IOException;
++
++import org.junit.Assert;
++
++import org.apache.commons.io.FileUtils;
++import org.apache.sentry.provider.file.PolicyFiles;
++
++public class TestSimpleDBPolicyEngineLocalFS extends AbstractTestSimplePolicyEngine {
++
++  @Override
++  protected void  afterSetup() throws IOException {
++    File baseDir = getBaseDir();
++    Assert.assertNotNull(baseDir);
++    Assert.assertTrue(baseDir.isDirectory() || baseDir.mkdirs());
++    PolicyFiles.copyToDir(baseDir, "hive-policy-test-authz-provider.ini", "hive-policy-test-authz-provider-other-group.ini");
++    setPolicy(DBPolicyTestUtil.createPolicyEngineForTest("server1",
++        new File(baseDir, "hive-policy-test-authz-provider.ini").getPath()));
++  }
++  @Override
++  protected void beforeTeardown() throws IOException {
++    File baseDir = getBaseDir();
++    Assert.assertNotNull(baseDir);
++    FileUtils.deleteQuietly(baseDir);
++  }
++}
+diff --git a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/privilege/hive/TestCommonPrivilegeForHive.java b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/privilege/hive/TestCommonPrivilegeForHive.java
+new file mode 100644
+index 0000000..c719802
+--- /dev/null
++++ b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/privilege/hive/TestCommonPrivilegeForHive.java
+@@ -0,0 +1,344 @@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *      http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++package org.apache.sentry.privilege.hive;
++
++import junit.framework.Assert;
++import org.apache.sentry.core.common.Model;
++import org.apache.sentry.core.common.utils.KeyValue;
++import org.apache.sentry.core.common.utils.PathUtils;
++import org.apache.sentry.core.common.utils.SentryConstants;
++import org.apache.sentry.core.model.db.AccessConstants;
++import org.apache.sentry.core.model.db.HivePrivilegeModel;
++import org.apache.sentry.policy.common.CommonPrivilege;
++import org.apache.sentry.policy.common.Privilege;
++import org.junit.Before;
++import org.junit.Test;
++
++import static junit.framework.Assert.assertFalse;
++import static junit.framework.Assert.assertTrue;
++
++public class TestCommonPrivilegeForHive {
++
++  private Model hivePrivilegeModel;
++
++  private static final String ALL = AccessConstants.ALL;
++
++  private static final CommonPrivilege ROLE_SERVER_SERVER1_DB_ALL =
++          create(new KeyValue("server", "server1"), new KeyValue("db", ALL));
++  private static final CommonPrivilege ROLE_SERVER_SERVER1_DB_DB1 =
++          create(new KeyValue("server", "server1"), new KeyValue("db", "db1"));
++  private static final CommonPrivilege ROLE_SERVER_SERVER2_DB_ALL =
++          create(new KeyValue("server", "server2"), new KeyValue("db", ALL));
++  private static final CommonPrivilege ROLE_SERVER_SERVER2_DB_DB1 =
++          create(new KeyValue("server", "server2"), new KeyValue("db", "db1"));
++  private static final CommonPrivilege ROLE_SERVER_ALL_DB_ALL =
++          create(new KeyValue("server", ALL), new KeyValue("db", ALL));
++  private static final CommonPrivilege ROLE_SERVER_ALL_DB_DB1 =
++          create(new KeyValue("server", ALL), new KeyValue("db", "db1"));
++
++  private static final CommonPrivilege ROLE_SERVER_SERVER1_URI_URI1 =
++          create(new KeyValue("server", "server1"), new KeyValue("uri",
++                  "hdfs://namenode:8020/path/to/uri1"));
++  private static final CommonPrivilege ROLE_SERVER_SERVER1_URI_URI2 =
++          create(new KeyValue("server", "server1"), new KeyValue("uri",
++                  "hdfs://namenode:8020/path/to/uri2/"));
++  private static final CommonPrivilege ROLE_SERVER_SERVER1_URI_ALL =
++          create(new KeyValue("server", "server1"), new KeyValue("uri", ALL));
++
++  private static final CommonPrivilege ROLE_SERVER_SERVER1 =
++          create(new KeyValue("server", "server1"));
++
++  private static final CommonPrivilege REQUEST_SERVER1_DB1 =
++          create(new KeyValue("server", "server1"), new KeyValue("db", "db1"));
++  private static final CommonPrivilege REQUEST_SERVER2_DB1 =
++          create(new KeyValue("server", "server2"), new KeyValue("db", "db1"));
++  private static final CommonPrivilege REQUEST_SERVER1_DB2 =
++          create(new KeyValue("server", "server1"), new KeyValue("db", "db2"));
++  private static final CommonPrivilege REQUEST_SERVER2_DB2 =
++          create(new KeyValue("server", "server2"), new KeyValue("db", "db2"));
++
++  private static final CommonPrivilege REQUEST_SERVER1_URI1 =
++          create(new KeyValue("server", "server1"), new KeyValue("uri",
++                  "hdfs://namenode:8020/path/to/uri1/some/file"));
++  private static final CommonPrivilege REQUEST_SERVER1_URI2 =
++          create(new KeyValue("server", "server1"), new KeyValue("uri",
++                  "hdfs://namenode:8020/path/to/uri2/some/other/file"));
++
++  private static final CommonPrivilege REQUEST_SERVER1_OTHER =
++          create(new KeyValue("server", "server2"), new KeyValue("other", "thing"));
++
++  private static final CommonPrivilege REQUEST_SERVER1 =
++          create(new KeyValue("server", "server2"));
++
++  @Before
++  public void prepareData() {
++    hivePrivilegeModel = HivePrivilegeModel.getInstance();
++  }
++
++  @Test
++  public void testOther() throws Exception {
++    assertFalse(ROLE_SERVER_ALL_DB_ALL.implies(REQUEST_SERVER1_OTHER, hivePrivilegeModel));
++    assertFalse(REQUEST_SERVER1_OTHER.implies(ROLE_SERVER_ALL_DB_ALL, hivePrivilegeModel));
++  }
++
++  @Test
++  public void testRoleShorterThanRequest() throws Exception {
++    assertTrue(ROLE_SERVER_SERVER1.implies(REQUEST_SERVER1_DB1, hivePrivilegeModel));
++    assertTrue(ROLE_SERVER_SERVER1.implies(REQUEST_SERVER1_DB2, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_SERVER1.implies(REQUEST_SERVER2_DB1, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_SERVER1.implies(REQUEST_SERVER2_DB2, hivePrivilegeModel));
++
++    assertTrue(ROLE_SERVER_ALL_DB_ALL.implies(REQUEST_SERVER1, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_ALL_DB_DB1.implies(REQUEST_SERVER1, hivePrivilegeModel));
++  }
++
++  @Test
++  public void testRolesAndRequests() throws Exception {
++    // ROLE_SERVER_SERVER1_DB_ALL
++    assertTrue(ROLE_SERVER_SERVER1_DB_ALL.implies(REQUEST_SERVER1_DB1, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_SERVER1_DB_ALL.implies(REQUEST_SERVER2_DB1, hivePrivilegeModel));
++    assertTrue(ROLE_SERVER_SERVER1_DB_ALL.implies(REQUEST_SERVER1_DB2, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_SERVER1_DB_ALL.implies(REQUEST_SERVER2_DB2, hivePrivilegeModel));
++
++    // test inverse
++    assertTrue(REQUEST_SERVER1_DB1.implies(ROLE_SERVER_SERVER1_DB_ALL, hivePrivilegeModel));
++    assertFalse(REQUEST_SERVER2_DB1.implies(ROLE_SERVER_SERVER1_DB_ALL, hivePrivilegeModel));
++    assertTrue(REQUEST_SERVER1_DB2.implies(ROLE_SERVER_SERVER1_DB_ALL, hivePrivilegeModel));
++    assertFalse(REQUEST_SERVER2_DB2.implies(ROLE_SERVER_SERVER1_DB_ALL, hivePrivilegeModel));
++
++    // ROLE_SERVER_SERVER1_DB_DB1
++    assertTrue(ROLE_SERVER_SERVER1_DB_DB1.implies(REQUEST_SERVER1_DB1, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_SERVER1_DB_DB1.implies(REQUEST_SERVER2_DB1, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_SERVER1_DB_DB1.implies(REQUEST_SERVER1_DB2, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_SERVER1_DB_DB1.implies(REQUEST_SERVER2_DB2, hivePrivilegeModel));
++
++    // test inverse
++    assertTrue(REQUEST_SERVER1_DB1.implies(ROLE_SERVER_SERVER1_DB_DB1, hivePrivilegeModel));
++    assertFalse(REQUEST_SERVER2_DB1.implies(ROLE_SERVER_SERVER1_DB_DB1, hivePrivilegeModel));
++    assertFalse(REQUEST_SERVER1_DB2.implies(ROLE_SERVER_SERVER1_DB_DB1, hivePrivilegeModel));
++    assertFalse(REQUEST_SERVER2_DB2.implies(ROLE_SERVER_SERVER1_DB_DB1, hivePrivilegeModel));
++
++    // ROLE_SERVER_SERVER2_DB_ALL
++    assertFalse(ROLE_SERVER_SERVER2_DB_ALL.implies(REQUEST_SERVER1_DB1, hivePrivilegeModel));
++    assertTrue(ROLE_SERVER_SERVER2_DB_ALL.implies(REQUEST_SERVER2_DB1, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_SERVER2_DB_ALL.implies(REQUEST_SERVER1_DB2, hivePrivilegeModel));
++    assertTrue(ROLE_SERVER_SERVER2_DB_ALL.implies(REQUEST_SERVER2_DB2, hivePrivilegeModel));
++
++    // test inverse
++    assertFalse(REQUEST_SERVER1_DB1.implies(ROLE_SERVER_SERVER2_DB_ALL, hivePrivilegeModel));
++    assertTrue(REQUEST_SERVER2_DB1.implies(ROLE_SERVER_SERVER2_DB_ALL, hivePrivilegeModel));
++    assertFalse(REQUEST_SERVER1_DB2.implies(ROLE_SERVER_SERVER2_DB_ALL, hivePrivilegeModel));
++    assertTrue(REQUEST_SERVER2_DB2.implies(ROLE_SERVER_SERVER2_DB_ALL, hivePrivilegeModel));
++
++    // ROLE_SERVER_SERVER2_DB_DB1
++    assertFalse(ROLE_SERVER_SERVER2_DB_DB1.implies(REQUEST_SERVER1_DB1, hivePrivilegeModel));
++    assertTrue(ROLE_SERVER_SERVER2_DB_DB1.implies(REQUEST_SERVER2_DB1, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_SERVER2_DB_DB1.implies(REQUEST_SERVER1_DB2, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_SERVER2_DB_DB1.implies(REQUEST_SERVER2_DB2, hivePrivilegeModel));
++
++    assertFalse(REQUEST_SERVER1_DB1.implies(ROLE_SERVER_SERVER2_DB_DB1, hivePrivilegeModel));
++    assertTrue(REQUEST_SERVER2_DB1.implies(ROLE_SERVER_SERVER2_DB_DB1, hivePrivilegeModel));
++    assertFalse(REQUEST_SERVER1_DB2.implies(ROLE_SERVER_SERVER2_DB_DB1, hivePrivilegeModel));
++    assertFalse(REQUEST_SERVER2_DB2.implies(ROLE_SERVER_SERVER2_DB_DB1, hivePrivilegeModel));
++
++    // ROLE_SERVER_ALL_DB_ALL
++    assertTrue(ROLE_SERVER_ALL_DB_ALL.implies(REQUEST_SERVER1_DB1, hivePrivilegeModel));
++    assertTrue(ROLE_SERVER_ALL_DB_ALL.implies(REQUEST_SERVER2_DB1, hivePrivilegeModel));
++    assertTrue(ROLE_SERVER_ALL_DB_ALL.implies(REQUEST_SERVER1_DB2, hivePrivilegeModel));
++    assertTrue(ROLE_SERVER_ALL_DB_ALL.implies(REQUEST_SERVER2_DB2, hivePrivilegeModel));
++
++    // test inverse
++    assertTrue(REQUEST_SERVER1_DB1.implies(ROLE_SERVER_ALL_DB_ALL, hivePrivilegeModel));
++    assertTrue(REQUEST_SERVER2_DB1.implies(ROLE_SERVER_ALL_DB_ALL, hivePrivilegeModel));
++    assertTrue(REQUEST_SERVER1_DB2.implies(ROLE_SERVER_ALL_DB_ALL, hivePrivilegeModel));
++    assertTrue(REQUEST_SERVER2_DB2.implies(ROLE_SERVER_ALL_DB_ALL, hivePrivilegeModel));
++
++    // ROLE_SERVER_ALL_DB_DB1
++    assertTrue(ROLE_SERVER_ALL_DB_DB1.implies(REQUEST_SERVER1_DB1, hivePrivilegeModel));
++    assertTrue(ROLE_SERVER_ALL_DB_DB1.implies(REQUEST_SERVER2_DB1, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_ALL_DB_DB1.implies(REQUEST_SERVER1_DB2, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_ALL_DB_DB1.implies(REQUEST_SERVER2_DB2, hivePrivilegeModel));
++
++    // test inverse
++    assertTrue(REQUEST_SERVER1_DB1.implies(ROLE_SERVER_ALL_DB_DB1, hivePrivilegeModel));
++    assertTrue(REQUEST_SERVER2_DB1.implies(ROLE_SERVER_ALL_DB_DB1, hivePrivilegeModel));
++    assertFalse(REQUEST_SERVER1_DB2.implies(ROLE_SERVER_ALL_DB_DB1, hivePrivilegeModel));
++    assertFalse(REQUEST_SERVER2_DB2.implies(ROLE_SERVER_ALL_DB_DB1, hivePrivilegeModel));
++
++    // uri
++    assertTrue(ROLE_SERVER_SERVER1.implies(REQUEST_SERVER1_URI1, hivePrivilegeModel));
++    assertTrue(ROLE_SERVER_SERVER1.implies(REQUEST_SERVER1_URI2, hivePrivilegeModel));
++    assertTrue(ROLE_SERVER_SERVER1.implies(REQUEST_SERVER1_URI2, hivePrivilegeModel));
++    assertTrue(ROLE_SERVER_SERVER1_URI_ALL.implies(REQUEST_SERVER1_URI1, hivePrivilegeModel));
++    assertTrue(ROLE_SERVER_SERVER1_URI_ALL.implies(REQUEST_SERVER1_URI2, hivePrivilegeModel));
++    assertTrue(ROLE_SERVER_SERVER1.implies(REQUEST_SERVER1_URI2, hivePrivilegeModel));
++    assertTrue(ROLE_SERVER_SERVER1_URI_URI1.implies(REQUEST_SERVER1_URI1, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_SERVER1_URI_URI1.implies(REQUEST_SERVER1_URI2, hivePrivilegeModel));
++    assertTrue(ROLE_SERVER_SERVER1_URI_URI2.implies(REQUEST_SERVER1_URI2, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_SERVER1_URI_URI2.implies(REQUEST_SERVER1_URI1, hivePrivilegeModel));
++    assertFalse(REQUEST_SERVER2_DB2.implies(REQUEST_SERVER1_URI1, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_ALL_DB_DB1.implies(REQUEST_SERVER1_URI1, hivePrivilegeModel));
++    // test inverse
++    assertTrue(REQUEST_SERVER1_URI1.implies(ROLE_SERVER_SERVER1_URI_ALL, hivePrivilegeModel));
++    assertTrue(REQUEST_SERVER1_URI2.implies(ROLE_SERVER_SERVER1_URI_ALL, hivePrivilegeModel));
++    assertFalse(REQUEST_SERVER1_URI1.implies(ROLE_SERVER_SERVER1, hivePrivilegeModel));
++    assertFalse(REQUEST_SERVER1_URI1.implies(ROLE_SERVER_SERVER1_URI_URI1, hivePrivilegeModel));
++    assertFalse(REQUEST_SERVER1_URI2.implies(ROLE_SERVER_SERVER1_URI_URI1, hivePrivilegeModel));
++    assertFalse(REQUEST_SERVER1_URI2.implies(ROLE_SERVER_SERVER1_URI_URI2, hivePrivilegeModel));
++    assertFalse(REQUEST_SERVER1_URI1.implies(ROLE_SERVER_SERVER1_URI_URI2, hivePrivilegeModel));
++  };
++
++  @Test
++  public void testUnexpected() throws Exception {
++    Privilege p = new Privilege() {
++      @Override
++      public boolean implies(Privilege p, Model m) {
++        return false;
++      }
++    };
++    assertFalse(ROLE_SERVER_SERVER1_DB_ALL.implies(null, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_SERVER1_DB_ALL.implies(p, hivePrivilegeModel));
++    assertFalse(ROLE_SERVER_SERVER1_DB_ALL.equals(null));
++    assertFalse(ROLE_SERVER_SERVER1_DB_ALL.equals(p));
++
++    Assert.assertEquals(ROLE_SERVER_SERVER1_DB_ALL.hashCode(),
++            create(ROLE_SERVER_SERVER1_DB_ALL.toString()).hashCode());
++  }
++
++  @Test(expected=IllegalArgumentException.class)
++  public void testNullString() throws Exception {
++    System.out.println(create((String)null));
++  }
++
++  @Test(expected=IllegalArgumentException.class)
++  public void testEmptyString() throws Exception {
++    System.out.println(create(""));
++  }
++
++  @Test(expected=IllegalArgumentException.class)
++  public void testEmptyKey() throws Exception {
++    System.out.println(create(SentryConstants.KV_JOINER.join("", "db1")));
++  }
++
++  @Test(expected=IllegalArgumentException.class)
++  public void testEmptyValue() throws Exception {
++    System.out.println(create(SentryConstants.KV_JOINER.join("db", "")));
++  }
++
++  @Test(expected=IllegalArgumentException.class)
++  public void testEmptyPart() throws Exception {
++    System.out.println(create(SentryConstants.AUTHORIZABLE_JOINER.
++            join(SentryConstants.KV_JOINER.join("server", "server1"), "")));
++  }
++
++  @Test(expected=IllegalArgumentException.class)
++  public void testOnlySeperators() throws Exception {
++    System.out.println(create(SentryConstants.AUTHORIZABLE_JOINER.
++            join(SentryConstants.KV_SEPARATOR, SentryConstants.KV_SEPARATOR,
++            SentryConstants.KV_SEPARATOR)));
++  }
++
++  @Test
++  public void testImpliesURIPositive() throws Exception {
++    assertTrue(PathUtils.impliesURI("hdfs://namenode:8020/path", "hdfs://namenode:8020/path/to/some/dir"));
++    assertTrue(PathUtils.impliesURI("hdfs://namenode:8020/path", "hdfs://namenode:8020/path"));
++    assertTrue(PathUtils.impliesURI("file:///path", "file:///path/to/some/dir"));
++    assertTrue(PathUtils.impliesURI("file:///path", "file:///path"));
++  }
++
++  @Test
++  public void testImpliesURINegative() throws Exception {
++    // relative path
++    assertFalse(PathUtils.impliesURI("hdfs://namenode:8020/path", "hdfs://namenode:8020/path/to/../../other"));
++    assertFalse(PathUtils.impliesURI("file:///path", "file:///path/to/../../other"));
++    // bad policy
++    assertFalse(PathUtils.impliesURI("blah", "hdfs://namenode:8020/path/to/some/dir"));
++    // bad request
++    assertFalse(PathUtils.impliesURI("hdfs://namenode:8020/path", "blah"));
++    // scheme
++    assertFalse(PathUtils.impliesURI("hdfs://namenode:8020/path", "file:///path/to/some/dir"));
++    assertFalse(PathUtils.impliesURI("hdfs://namenode:8020/path", "file://namenode:8020/path/to/some/dir"));
++    // hostname
++    assertFalse(PathUtils.impliesURI("hdfs://namenode1:8020/path", "hdfs://namenode2:8020/path/to/some/dir"));
++    // port
++    assertFalse(PathUtils.impliesURI("hdfs://namenode:8020/path", "hdfs://namenode:8021/path/to/some/dir"));
++    // mangled path
++    assertFalse(PathUtils.impliesURI("hdfs://namenode:8020/path", "hdfs://namenode:8020/pathFooBar"));
++    // ends in /
++    assertTrue(PathUtils.impliesURI("hdfs://namenode:8020/path/", "hdfs://namenode:8020/path/FooBar"));
++  }
++
++  @Test
++  public void testActionHierarchy() throws Exception {
++    String dbName = "db1";
++    CommonPrivilege dbAll = create(new KeyValue("server", "server1"),
++            new KeyValue("db", dbName), new KeyValue("action", "ALL"));
++
++    CommonPrivilege dbSelect = create(new KeyValue("server", "server1"),
++            new KeyValue("db", dbName), new KeyValue("action", "SELECT"));
++    CommonPrivilege dbInsert = create(new KeyValue("server", "server1"),
++            new KeyValue("db", dbName), new KeyValue("action", "INSERT"));
++    CommonPrivilege dbAlter = create(new KeyValue("server", "server1"),
++            new KeyValue("db", dbName), new KeyValue("action", "ALTER"));
++    CommonPrivilege dbCreate = create(new KeyValue("server", "server1"),
++            new KeyValue("db", dbName), new KeyValue("action", "CREATE"));
++    CommonPrivilege dbDrop = create(new KeyValue("server", "server1"),
++            new KeyValue("db", dbName), new KeyValue("action", "DROP"));
++    CommonPrivilege dbIndex = create(new KeyValue("server", "server1"),
++            new KeyValue("db", dbName), new KeyValue("action", "INDEX"));
++    CommonPrivilege dbLock = create(new KeyValue("server", "server1"),
++            new KeyValue("db", dbName), new KeyValue("action", "LOCK"));
++
++    assertTrue(dbAll.implies(dbSelect, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbInsert, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbAlter, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbCreate, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbDrop, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbIndex, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbLock, hivePrivilegeModel));
++
++    dbAll = create(new KeyValue("server", "server1"),
++            new KeyValue("db", dbName), new KeyValue("action", "*"));
++
++    assertTrue(dbAll.implies(dbSelect, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbInsert, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbAlter, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbCreate, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbDrop, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbIndex, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbLock, hivePrivilegeModel));
++
++    dbAll = create(new KeyValue("server", "server1"),
++            new KeyValue("db", dbName));
++
++    assertTrue(dbAll.implies(dbSelect, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbInsert, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbAlter, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbCreate, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbDrop, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbIndex, hivePrivilegeModel));
++    assertTrue(dbAll.implies(dbLock, hivePrivilegeModel));
++  }
++
++  static CommonPrivilege create(KeyValue... keyValues) {
++    return create(SentryConstants.AUTHORIZABLE_JOINER.join(keyValues));
++  }
++
++  static CommonPrivilege create(String s) {
++    return new CommonPrivilege(s);
++  }
++}
+diff --git a/sentry-binding/sentry-binding-hive/src/test/resources/hive-policy-test-authz-provider-other-group.ini b/sentry-binding/sentry-binding-hive/src/test/resources/hive-policy-test-authz-provider-other-group.ini
+new file mode 100644
+index 0000000..cd3695c
+--- /dev/null
++++ b/sentry-binding/sentry-binding-hive/src/test/resources/hive-policy-test-authz-provider-other-group.ini
+@@ -0,0 +1,22 @@
++# Licensed to the Apache Software Foundation (ASF) under one
++# or more contributor license agreements.  See the NOTICE file
++# distributed with this work for additional information
++# regarding copyright ownership.  The ASF licenses this file
++# to you under the Apache License, Version 2.0 (the
++# "License"); you may not use this file except in compliance
++# with the License.  You may obtain a copy of the License at
++#
++#  http://www.apache.org/licenses/LICENSE-2.0
++#
++# Unless required by applicable law or agreed to in writing,
++# software distributed under the License is distributed on an
++# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++# KIND, either express or implied.  See the License for the
++# specific language governing permissions and limitations
++# under the License.
++
++[groups]
++other_group = analyst_role
++
++[roles]
++analyst_role = server=server1->db=other_group_db->table=purchases->action=select
+\ No newline at end of file
+diff --git a/sentry-binding/sentry-binding-hive/src/test/resources/hive-policy-test-authz-provider.ini b/sentry-binding/sentry-binding-hive/src/test/resources/hive-policy-test-authz-provider.ini
+new file mode 100644
+index 0000000..e9114ef
+--- /dev/null
++++ b/sentry-binding/sentry-binding-hive/src/test/resources/hive-policy-test-authz-provider.ini
+@@ -0,0 +1,32 @@
++# Licensed to the Apache Software Foundation (ASF) under one
++# or more contributor license agreements.  See the NOTICE file
++# distributed with this work for additional information
++# regarding copyright ownership.  The ASF licenses this file
++# to you under the Apache License, Version 2.0 (the
++# "License"); you may not use this file except in compliance
++# with the License.  You may obtain a copy of the License at
++#
++#  http://www.apache.org/licenses/LICENSE-2.0
++#
++# Unless required by applicable law or agreed to in writing,
++# software distributed under the License is distributed on an
++# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++# KIND, either express or implied.  See the License for the
++# specific language governing permissions and limitations
++# under the License.
++
++[databases]
++other_group_db = hive-policy-test-authz-provider-other-group.ini
++
++[groups]
++manager = analyst_role, junior_analyst_role
++analyst = analyst_role
++jranalyst = junior_analyst_role
++admin = admin
++
++[roles]
++analyst_role = server=server1->db=customers->table=purchases->action=select, \
++  server=server1->db=analyst1, \
++  server=server1->db=jranalyst1->table=*->action=select
++junior_analyst_role = server=server1->db=jranalyst1, server=server1->db=customers->table=purchases_partial->action=select
++admin = server=server1
+diff --git a/sentry-binding/sentry-binding-kafka/pom.xml b/sentry-binding/sentry-binding-kafka/pom.xml
+index 15d3de5..f6f212b 100644
+--- a/sentry-binding/sentry-binding-kafka/pom.xml
++++ b/sentry-binding/sentry-binding-kafka/pom.xml
+@@ -45,10 +45,6 @@ limitations under the License.
+     </dependency>
+     <dependency>
+       <groupId>org.apache.sentry</groupId>
+-      <artifactId>sentry-policy-kafka</artifactId>
+-    </dependency>
+-    <dependency>
+-      <groupId>org.apache.sentry</groupId>
+       <artifactId>sentry-provider-common</artifactId>
+     </dependency>
+     <dependency>
+@@ -73,5 +69,10 @@ limitations under the License.
+       <artifactId>kafka_2.11</artifactId>
+       <scope>provided</scope>
+     </dependency>
++    <dependency>
++      <groupId>org.apache.hadoop</groupId>
++      <artifactId>hadoop-minicluster</artifactId>
++      <scope>test</scope>
++    </dependency>
+   </dependencies>
+ </project>
+diff --git a/sentry-binding/sentry-binding-kafka/src/main/java/org/apache/sentry/kafka/binding/KafkaAuthBinding.java b/sentry-binding/sentry-binding-kafka/src/main/java/org/apache/sentry/kafka/binding/KafkaAuthBinding.java
+index c6600a0..15f7359 100644
+--- a/sentry-binding/sentry-binding-kafka/src/main/java/org/apache/sentry/kafka/binding/KafkaAuthBinding.java
++++ b/sentry-binding/sentry-binding-kafka/src/main/java/org/apache/sentry/kafka/binding/KafkaAuthBinding.java
+@@ -42,16 +42,19 @@ import org.apache.kafka.common.security.auth.KafkaPrincipal;
+ import org.apache.sentry.SentryUserException;
+ import org.apache.sentry.core.common.ActiveRoleSet;
+ import org.apache.sentry.core.common.Authorizable;
++import org.apache.sentry.core.common.Model;
+ import org.apache.sentry.core.common.Subject;
+ import org.apache.sentry.core.model.kafka.KafkaActionFactory;
+ import org.apache.sentry.core.model.kafka.KafkaActionFactory.KafkaAction;
+ import org.apache.sentry.core.model.kafka.KafkaAuthorizable;
++import org.apache.sentry.core.model.kafka.KafkaPrivilegeModel;
+ import org.apache.sentry.kafka.ConvertUtil;
+ import org.apache.sentry.kafka.conf.KafkaAuthConf.AuthzConfVars;
+ import org.apache.sentry.policy.common.PolicyEngine;
+ import org.apache.sentry.provider.common.AuthorizationComponent;
+ import org.apache.sentry.provider.common.AuthorizationProvider;
+ import org.apache.sentry.provider.common.ProviderBackend;
++import org.apache.sentry.provider.common.ProviderBackendContext;
+ import org.apache.sentry.provider.db.generic.SentryGenericProviderBackend;
+ import org.apache.sentry.provider.db.generic.service.thrift.SentryGenericServiceClient;
+ import org.apache.sentry.provider.db.generic.service.thrift.SentryGenericServiceClientFactory;
+@@ -72,491 +75,497 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY
+ 
+ public class KafkaAuthBinding {
+ 
+-    private static final Logger LOG = LoggerFactory.getLogger(KafkaAuthBinding.class);
+-    private static final String COMPONENT_TYPE = AuthorizationComponent.KAFKA;
+-    private static final String COMPONENT_NAME = COMPONENT_TYPE;
++  private static final Logger LOG = LoggerFactory.getLogger(KafkaAuthBinding.class);
++  private static final String COMPONENT_TYPE = AuthorizationComponent.KAFKA;
++  private static final String COMPONENT_NAME = COMPONENT_TYPE;
+ 
+-    private static Boolean kerberosInit;
++  private static Boolean kerberosInit;
+ 
+-    private final Configuration authConf;
+-    private final AuthorizationProvider authProvider;
+-    private final KafkaActionFactory actionFactory = KafkaActionFactory.getInstance();
++  private fin

<TRUNCATED>

Mime
View raw message