sentry-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From s..@apache.org
Subject [5/5] sentry git commit: SENTRY-1138: Extract common classes for binding-hive-v1 and binding-hive-v2 (Dapeng Sun, reviewed by Colin Ma)
Date Wed, 23 Mar 2016 02:22:36 GMT
SENTRY-1138: Extract common classes for binding-hive-v1 and binding-hive-v2 (Dapeng Sun, reviewed by Colin Ma)


Project: http://git-wip-us.apache.org/repos/asf/sentry/repo
Commit: http://git-wip-us.apache.org/repos/asf/sentry/commit/7a30c819
Tree: http://git-wip-us.apache.org/repos/asf/sentry/tree/7a30c819
Diff: http://git-wip-us.apache.org/repos/asf/sentry/diff/7a30c819

Branch: refs/heads/master
Commit: 7a30c819cf66fcff833db2da7993899b03a9d664
Parents: 4643f98
Author: Sun Dapeng <sdp@apache.org>
Authored: Wed Mar 23 10:19:02 2016 +0800
Committer: Sun Dapeng <sdp@apache.org>
Committed: Wed Mar 23 10:19:02 2016 +0800

----------------------------------------------------------------------
 pom.xml                                         |   5 +
 sentry-binding/pom.xml                          |  11 +-
 .../sentry-binding-hive-common/pom.xml          | 102 +++
 .../apache/hadoop/hive/SentryHiveConstants.java |  31 +
 .../hive/ql/exec/SentryFilterDDLTask.java       | 137 +++
 .../ql/exec/SentryHivePrivilegeObjectDesc.java  |  51 ++
 .../binding/hive/HiveAuthzBindingHookBase.java  | 826 +++++++++++++++++++
 .../hive/SentryIniPolicyFileFormatter.java      | 161 ++++
 .../binding/hive/SentryOnFailureHook.java       |  38 +
 .../hive/SentryOnFailureHookContext.java        |  98 +++
 .../hive/SentryOnFailureHookContextImpl.java    | 125 +++
 .../hive/SentryPolicyFileFormatFactory.java     |  44 +
 .../binding/hive/SentryPolicyFileFormatter.java |  39 +
 .../binding/hive/authz/HiveAuthzBinding.java    | 407 +++++++++
 .../binding/hive/authz/HiveAuthzPrivileges.java | 153 ++++
 .../binding/hive/authz/SentryConfigTool.java    | 622 ++++++++++++++
 .../sentry/binding/hive/conf/HiveAuthzConf.java | 269 ++++++
 .../conf/InvalidConfigurationException.java     |  31 +
 .../metastore/AuthorizingObjectStoreBase.java   | 412 +++++++++
 .../metastore/MetastoreAuthzBindingBase.java    | 450 ++++++++++
 .../metastore/SentryHiveMetaStoreClient.java    | 161 ++++
 .../metastore/SentryMetaStoreFilterHook.java    | 201 +++++
 .../SentryMetastorePostEventListenerBase.java   | 404 +++++++++
 sentry-binding/sentry-binding-hive-v2/pom.xml   |  17 +-
 sentry-binding/sentry-binding-hive/pom.xml      |  40 +-
 .../apache/hadoop/hive/SentryHiveConstants.java |  31 -
 .../hive/ql/exec/SentryFilterDDLTask.java       | 137 ---
 .../hive/ql/exec/SentryGrantRevokeTask.java     |   4 +-
 .../ql/exec/SentryHivePrivilegeObjectDesc.java  |  51 --
 .../binding/hive/HiveAuthzBindingHook.java      | 716 +---------------
 .../hive/HiveAuthzBindingSessionHook.java       |   2 +-
 .../hive/SentryIniPolicyFileFormatter.java      | 161 ----
 .../binding/hive/SentryOnFailureHook.java       |  38 -
 .../hive/SentryOnFailureHookContext.java        |  98 ---
 .../hive/SentryOnFailureHookContextImpl.java    | 125 ---
 .../hive/SentryPolicyFileFormatFactory.java     |  44 -
 .../binding/hive/SentryPolicyFileFormatter.java |  39 -
 .../binding/hive/authz/HiveAuthzBinding.java    | 407 ---------
 .../binding/hive/authz/HiveAuthzPrivileges.java | 153 ----
 .../binding/hive/authz/SentryConfigTool.java    | 622 --------------
 .../sentry/binding/hive/conf/HiveAuthzConf.java | 269 ------
 .../conf/InvalidConfigurationException.java     |  31 -
 .../metastore/AuthorizingObjectStore.java       |   6 +-
 .../metastore/MetastoreAuthzBinding.java        | 404 +--------
 .../metastore/SentryHiveMetaStoreClient.java    | 161 ----
 .../metastore/SentryMetaStoreFilterHook.java    | 201 -----
 .../org/apache/sentry/binding/hive/TestURI.java |  12 +-
 sentry-hdfs/sentry-hdfs-service/pom.xml         |   3 +-
 .../sentry/hdfs/MetastorePluginWithHA.java      |   6 +-
 49 files changed, 4804 insertions(+), 3752 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index d25c314..3393c47 100644
--- a/pom.xml
+++ b/pom.xml
@@ -391,6 +391,11 @@ limitations under the License.
       </dependency>
       <dependency>
         <groupId>org.apache.sentry</groupId>
+        <artifactId>sentry-binding-hive-common</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.sentry</groupId>
         <artifactId>sentry-binding-hive</artifactId>
         <version>${project.version}</version>
       </dependency>

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-binding/pom.xml b/sentry-binding/pom.xml
index 9e4999b..830f0b1 100644
--- a/sentry-binding/pom.xml
+++ b/sentry-binding/pom.xml
@@ -30,14 +30,23 @@ limitations under the License.
   <packaging>pom</packaging>
 
   <modules>
-    <module>sentry-binding-hive</module>
     <module>sentry-binding-kafka</module>
+    <module>sentry-binding-hive-common</module>
     <module>sentry-binding-solr</module>
     <module>sentry-binding-sqoop</module>
   </modules>
 
   <profiles>
     <profile>
+      <id>hive-authz1</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <modules>
+        <module>sentry-binding-hive</module>
+      </modules>
+    </profile>
+    <profile>
       <id>hive-authz2</id>
       <activation>
         <activeByDefault>false</activeByDefault>

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/pom.xml b/sentry-binding/sentry-binding-hive-common/pom.xml
new file mode 100644
index 0000000..3748522
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/pom.xml
@@ -0,0 +1,102 @@
+<?xml version="1.0"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <groupId>org.apache.sentry</groupId>
+    <artifactId>sentry-binding</artifactId>
+    <version>1.7.0-incubating-SNAPSHOT</version>
+  </parent>
+
+  <artifactId>sentry-binding-hive-common</artifactId>
+  <name>Sentry Hive Binding Common</name>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.thrift</groupId>
+      <artifactId>libthrift</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.derby</groupId>
+      <artifactId>derby</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-service</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-core-common</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-core-model-db</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-provider-common</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-provider-file</artifactId>
+    </dependency>
+      <dependency>
+        <groupId>org.apache.sentry</groupId>
+        <artifactId>sentry-provider-cache</artifactId>
+      </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-policy-db</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client</artifactId>
+      <version>${hadoop.version}</version>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-all</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <!-- required for SentryGrantRevokeTask -->
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-provider-db</artifactId>
+    </dependency>
+  </dependencies>
+
+</project>

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/SentryHiveConstants.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/SentryHiveConstants.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/SentryHiveConstants.java
new file mode 100644
index 0000000..5238414
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/SentryHiveConstants.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive;
+
+import java.util.EnumSet;
+
+import org.apache.hadoop.hive.ql.security.authorization.PrivilegeType;
+
+public class SentryHiveConstants {
+  public static final EnumSet<PrivilegeType> ALLOWED_PRIVS = EnumSet.allOf(PrivilegeType.class);
+
+  public static final String PRIVILEGE_NOT_SUPPORTED = "Sentry does not support privilege: ";
+  public static final String PARTITION_PRIVS_NOT_SUPPORTED = "Sentry does not support partition level authorization";
+  public static final String GRANT_REVOKE_NOT_SUPPORTED_ON_OBJECT = "Sentry does not allow grant/revoke on: ";
+  public static final String GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL = "Sentry does not allow privileges to be granted/revoked to/from: ";
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java
new file mode 100644
index 0000000..ca24531
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java
@@ -0,0 +1,137 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.exec;
+
+import static org.apache.hadoop.util.StringUtils.stringifyException;
+
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.DriverContext;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHookBase;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.core.common.Subject;
+
+import com.google.common.base.Preconditions;
+
+public class SentryFilterDDLTask extends DDLTask {
+  private static final long serialVersionUID = 1L;
+  private static final Log LOG = LogFactory.getLog(SentryFilterDDLTask.class);
+
+  private HiveAuthzBinding hiveAuthzBinding;
+  private Subject subject;
+  private HiveOperation stmtOperation;
+
+  public SentryFilterDDLTask(HiveAuthzBinding hiveAuthzBinding, Subject subject,
+      HiveOperation stmtOperation) {
+    Preconditions.checkNotNull(hiveAuthzBinding);
+    Preconditions.checkNotNull(subject);
+    Preconditions.checkNotNull(stmtOperation);
+
+    this.hiveAuthzBinding = hiveAuthzBinding;
+    this.subject = subject;
+    this.stmtOperation = stmtOperation;
+  }
+
+  public HiveAuthzBinding getHiveAuthzBinding() {
+    return hiveAuthzBinding;
+  }
+
+  public Subject getSubject() {
+    return subject;
+  }
+
+  public HiveOperation getStmtOperation() {
+    return stmtOperation;
+  }
+
+  @Override
+  public int execute(DriverContext driverContext) {
+    // Currently the SentryFilterDDLTask only supports filter the "show columns in table " command.
+    ShowColumnsDesc showCols = work.getShowColumnsDesc();
+    try {
+      if (showCols != null) {
+        return showFilterColumns(showCols);
+      }
+    } catch (Throwable e) {
+      failed(e);
+      return 1;
+    }
+
+    return super.execute(driverContext);
+  }
+
+  private void failed(Throwable e) {
+    while (e.getCause() != null && e.getClass() == RuntimeException.class) {
+      e = e.getCause();
+    }
+    setException(e);
+    LOG.error(stringifyException(e));
+  }
+
+  /**
+   * Filter the command "show columns in table"
+   *
+   */
+  private int showFilterColumns(ShowColumnsDesc showCols) throws HiveException {
+    Table table = Hive.get(conf).getTable(showCols.getTableName());
+
+    // write the results in the file
+    DataOutputStream outStream = null;
+    try {
+      Path resFile = new Path(showCols.getResFile());
+      FileSystem fs = resFile.getFileSystem(conf);
+      outStream = fs.create(resFile);
+
+      List<FieldSchema> cols = table.getCols();
+      cols.addAll(table.getPartCols());
+      // In case the query is served by HiveServer2, don't pad it with spaces,
+      // as HiveServer2 output is consumed by JDBC/ODBC clients.
+      boolean isOutputPadded = !SessionState.get().isHiveServerQuery();
+      outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(
+          fiterColumns(cols, table), false, isOutputPadded, null));
+      outStream.close();
+      outStream = null;
+    } catch (IOException e) {
+      throw new HiveException(e, ErrorMsg.GENERIC_ERROR);
+    } finally {
+      IOUtils.closeStream(outStream);
+    }
+    return 0;
+  }
+
+  private List<FieldSchema> fiterColumns(List<FieldSchema> cols, Table table) throws HiveException {
+    // filter some columns that the subject has privilege on
+    return HiveAuthzBindingHookBase.filterShowColumns(getHiveAuthzBinding(),
+        cols, getStmtOperation(), getSubject().getName(), table.getTableName(), table.getDbName());
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
new file mode 100644
index 0000000..4fa4221
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec;
+
+import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
+
+public class SentryHivePrivilegeObjectDesc extends PrivilegeObjectDesc {
+  private boolean isUri;
+  private boolean isServer;
+
+  public SentryHivePrivilegeObjectDesc() {
+    // reset table type which is on by default
+    super.setTable(false);
+  }
+
+  public boolean getUri() {
+    return isUri;
+  }
+
+  public void setUri(boolean isUri) {
+    this.isUri = isUri;
+  }
+
+  public boolean getServer() {
+    return isServer;
+  }
+
+  public void setServer(boolean isServer) {
+    this.isServer = isServer;
+  }
+
+  public boolean isSentryPrivObjectDesc() {
+    return isServer || isUri;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHookBase.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHookBase.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHookBase.java
new file mode 100644
index 0000000..6df939f
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHookBase.java
@@ -0,0 +1,826 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive;
+
+import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
+
+import java.io.Serializable;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URL;
+import java.security.CodeSource;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.EnumSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.JavaUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.hooks.Entity;
+import org.apache.hadoop.hive.ql.hooks.Entity.Type;
+import org.apache.hadoop.hive.ql.hooks.Hook;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges;
+import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationScope;
+import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationType;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.core.common.Subject;
+import org.apache.sentry.core.common.utils.PathUtils;
+import org.apache.sentry.core.model.db.AccessURI;
+import org.apache.sentry.core.model.db.Column;
+import org.apache.sentry.core.model.db.DBModelAction;
+import org.apache.sentry.core.model.db.DBModelAuthorizable;
+import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
+import org.apache.sentry.core.model.db.Database;
+import org.apache.sentry.core.model.db.Table;
+import org.apache.sentry.provider.cache.PrivilegeCache;
+import org.apache.sentry.provider.cache.SimplePrivilegeCache;
+import org.apache.sentry.provider.common.AuthorizationProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Splitter;
+import com.google.common.collect.ImmutableList;
+
+public abstract class HiveAuthzBindingHookBase extends AbstractSemanticAnalyzerHook {
+  private static final Logger LOG = LoggerFactory
+      .getLogger(HiveAuthzBindingHookBase.class);
+  protected final HiveAuthzBinding hiveAuthzBinding;
+  protected final HiveAuthzConf authzConf;
+  protected Database currDB = Database.ALL;
+  protected Table currTab;
+  protected AccessURI udfURI;
+  protected AccessURI serdeURI;
+  protected AccessURI partitionURI;
+  protected Table currOutTab = null;
+  protected Database currOutDB = null;
+  protected final List<String> serdeWhiteList;
+  protected boolean serdeURIPrivilegesEnabled;
+
+  protected final static HiveAuthzPrivileges columnMetaDataPrivilege =
+      new HiveAuthzPrivileges.AuthzPrivilegeBuilder()
+          .addInputObjectPriviledge(AuthorizableType.Column,
+              EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT))
+          .setOperationScope(HiveOperationScope.COLUMN).setOperationType(HiveOperationType.INFO)
+          .build();
+
+  // True if this is a basic DESCRIBE <table> operation. False for other DESCRIBE variants
+  // like DESCRIBE [FORMATTED|EXTENDED]. Required because Hive treats these stmts as the same
+  // HiveOperationType, but we want to enforces different privileges on each statement.
+  // Basic DESCRIBE <table> is allowed with only column-level privs, while the variants
+  // require table-level privileges.
+  protected boolean isDescTableBasic = false;
+
+  public HiveAuthzBindingHookBase() throws Exception {
+    SessionState session = SessionState.get();
+    if(session == null) {
+      throw new IllegalStateException("Session has not been started");
+    }
+    // HACK: set a random classname to force the Auth V2 in Hive
+    SessionState.get().setAuthorizer(null);
+
+    HiveConf hiveConf = session.getConf();
+    if(hiveConf == null) {
+      throw new IllegalStateException("Session HiveConf is null");
+    }
+    authzConf = loadAuthzConf(hiveConf);
+    hiveAuthzBinding = new HiveAuthzBinding(hiveConf, authzConf);
+    String serdeWhiteLists =
+        authzConf.get(HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST,
+            HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST_DEFAULT);
+    serdeWhiteList = Arrays.asList(serdeWhiteLists.split(","));
+    serdeURIPrivilegesEnabled =
+        authzConf.getBoolean(HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED,
+            HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED_DEFAULT);
+
+    FunctionRegistry.setupPermissionsForBuiltinUDFs("", HiveAuthzConf.HIVE_UDF_BLACK_LIST);
+  }
+
+  public static HiveAuthzConf loadAuthzConf(HiveConf hiveConf) {
+    boolean depreicatedConfigFile = false;
+    HiveAuthzConf newAuthzConf = null;
+    String hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
+    if(hiveAuthzConf == null || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
+      hiveAuthzConf = hiveConf.get(HiveAuthzConf.HIVE_ACCESS_CONF_URL);
+      depreicatedConfigFile = true;
+    }
+
+    if(hiveAuthzConf == null || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
+      throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_SENTRY_CONF_URL
+          + " value '" + hiveAuthzConf + "' is invalid.");
+    }
+    try {
+      newAuthzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
+    } catch (MalformedURLException e) {
+      if (depreicatedConfigFile) {
+        throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_ACCESS_CONF_URL
+            + " specifies a malformed URL '" + hiveAuthzConf + "'", e);
+      } else {
+        throw new IllegalArgumentException("Configuration key " + HiveAuthzConf.HIVE_SENTRY_CONF_URL
+            + " specifies a malformed URL '" + hiveAuthzConf + "'", e);
+      }
+    }
+    return newAuthzConf;
+  }
+
+  @Override
+  public abstract ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
+      throws SemanticException;
+
+  /**
+   * Post analyze hook that invokes hive auth bindings
+   */
+  @Override
+  public abstract void postAnalyze(HiveSemanticAnalyzerHookContext context,
+      List<Task<? extends Serializable>> rootTasks) throws SemanticException;
+
+  protected void executeOnFailureHooks(HiveSemanticAnalyzerHookContext context,
+      HiveOperation hiveOp, AuthorizationException e) {
+    SentryOnFailureHookContext hookCtx = new SentryOnFailureHookContextImpl(
+        context.getCommand(), context.getInputs(), context.getOutputs(),
+        hiveOp, currDB, currTab, udfURI, null, context.getUserName(),
+        context.getIpAddress(), e, context.getConf());
+    String csHooks = authzConf.get(
+        HiveAuthzConf.AuthzConfVars.AUTHZ_ONFAILURE_HOOKS.getVar(), "").trim();
+
+    try {
+      for (Hook aofh : getHooks(csHooks)) {
+        ((SentryOnFailureHook)aofh).run(hookCtx);
+      }
+    } catch (Exception ex) {
+      LOG.error("Error executing hook:", ex);
+    }
+  }
+
+  @VisibleForTesting
+  protected static AccessURI extractPartition(ASTNode ast) throws SemanticException {
+    for (int i = 0; i < ast.getChildCount(); i++) {
+      ASTNode child = (ASTNode)ast.getChild(i);
+      if (child.getToken().getType() == HiveParser.TOK_PARTITIONLOCATION &&
+          child.getChildCount() == 1) {
+        return parseURI(BaseSemanticAnalyzer.
+          unescapeSQLString(child.getChild(0).getText()));
+      }
+    }
+    return null;
+  }
+
+  @VisibleForTesting
+  protected static AccessURI parseURI(String uri) throws SemanticException {
+    return parseURI(uri, false);
+  }
+
+  @VisibleForTesting
+  protected static AccessURI parseURI(String uri, boolean isLocal)
+      throws SemanticException {
+    try {
+      HiveConf conf = SessionState.get().getConf();
+      String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE);
+      Path warehousePath = new Path(warehouseDir);
+      if (warehousePath.isAbsoluteAndSchemeAuthorityNull()) {
+        FileSystem fs = FileSystem.get(conf);
+        warehouseDir = fs.makeQualified(warehousePath).toUri().toString();
+      }
+      return new AccessURI(PathUtils.parseURI(warehouseDir, uri, isLocal));
+    } catch (Exception e) {
+      throw new SemanticException("Error parsing URI " + uri + ": " +
+        e.getMessage(), e);
+    }
+  }
+
+  // Find the current database for session
+  protected Database getCanonicalDb() {
+    return new Database(SessionState.get().getCurrentDatabase());
+  }
+
+  protected Database extractDatabase(ASTNode ast) throws SemanticException {
+    String tableName = BaseSemanticAnalyzer.getUnescapedName(ast);
+    if (tableName.contains(".")) {
+      return new Database(tableName.split("\\.")[0]);
+    } else {
+      return getCanonicalDb();
+    }
+  }
+
+  protected Table extractTable(ASTNode ast) throws SemanticException {
+    String tableName = BaseSemanticAnalyzer.getUnescapedName(ast);
+    if (tableName.contains(".")) {
+      return new Table(tableName.split("\\.")[1]);
+    } else {
+      return new Table(tableName);
+    }
+  }
+
+  public static void runFailureHook(SentryOnFailureHookContext hookContext,
+      String csHooks) {
+    try {
+      for (Hook aofh : getHooks(csHooks)) {
+        ((SentryOnFailureHook) aofh).run(hookContext);
+      }
+    } catch (Exception ex) {
+      LOG.error("Error executing hook:", ex);
+    }
+  }
+  /**
+   * Convert the input/output entities into authorizables. generate
+   * authorizables for cases like Database and metadata operations where the
+   * compiler doesn't capture entities. invoke the hive binding to validate
+   * permissions
+   *
+   * @param context
+   * @param stmtAuthObject
+   * @param stmtOperation
+   * @throws AuthorizationException
+   */
+  protected void authorizeWithHiveBindings(HiveSemanticAnalyzerHookContext context,
+      HiveAuthzPrivileges stmtAuthObject, HiveOperation stmtOperation) throws  AuthorizationException {
+    Set<ReadEntity> inputs = context.getInputs();
+    Set<WriteEntity> outputs = context.getOutputs();
+    List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+    List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+
+    if(LOG.isDebugEnabled()) {
+      LOG.debug("stmtAuthObject.getOperationScope() = " + stmtAuthObject.getOperationScope());
+      LOG.debug("context.getInputs() = " + context.getInputs());
+      LOG.debug("context.getOutputs() = " + context.getOutputs());
+    }
+
+    // Workaround to allow DESCRIBE <table> to be executed with only column-level privileges, while
+    // still authorizing DESCRIBE [EXTENDED|FORMATTED] as table-level.
+    // This is done by treating DESCRIBE <table> the same as SHOW COLUMNS, which only requires column
+    // level privs.
+    if (isDescTableBasic) {
+      stmtAuthObject = columnMetaDataPrivilege;
+    }
+
+    switch (stmtAuthObject.getOperationScope()) {
+
+    case SERVER :
+      // validate server level privileges if applicable. Eg create UDF,register jar etc ..
+      List<DBModelAuthorizable> serverHierarchy = new ArrayList<DBModelAuthorizable>();
+      serverHierarchy.add(hiveAuthzBinding.getAuthServer());
+      inputHierarchy.add(serverHierarchy);
+      break;
+    case DATABASE:
+      // workaround for database scope statements (create/alter/drop db)
+      List<DBModelAuthorizable> dbHierarchy = new ArrayList<DBModelAuthorizable>();
+      dbHierarchy.add(hiveAuthzBinding.getAuthServer());
+      dbHierarchy.add(currDB);
+      inputHierarchy.add(dbHierarchy);
+      outputHierarchy.add(dbHierarchy);
+
+      getInputHierarchyFromInputs(inputHierarchy, inputs);
+
+      if (serdeURI != null) {
+        List<DBModelAuthorizable> serdeUriHierarchy = new ArrayList<DBModelAuthorizable>();
+        serdeUriHierarchy.add(hiveAuthzBinding.getAuthServer());
+        serdeUriHierarchy.add(serdeURI);
+        outputHierarchy.add(serdeUriHierarchy);
+      }
+      break;
+    case TABLE:
+      // workaround for add partitions
+      if(partitionURI != null) {
+        inputHierarchy.add(ImmutableList.of(hiveAuthzBinding.getAuthServer(), partitionURI));
+      }
+
+      getInputHierarchyFromInputs(inputHierarchy, inputs);
+      for (WriteEntity writeEntity: outputs) {
+        if (filterWriteEntity(writeEntity)) {
+          continue;
+        }
+        List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
+        entityHierarchy.add(hiveAuthzBinding.getAuthServer());
+        entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity));
+        outputHierarchy.add(entityHierarchy);
+      }
+      // workaround for metadata queries.
+      // Capture the table name in pre-analyze and include that in the input entity list
+      if (currTab != null) {
+        List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
+        externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+        externalAuthorizableHierarchy.add(currDB);
+        externalAuthorizableHierarchy.add(currTab);
+        inputHierarchy.add(externalAuthorizableHierarchy);
+      }
+
+
+
+      // workaround for DDL statements
+      // Capture the table name in pre-analyze and include that in the output entity list
+      if (currOutTab != null) {
+        List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
+        externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+        externalAuthorizableHierarchy.add(currOutDB);
+        externalAuthorizableHierarchy.add(currOutTab);
+        outputHierarchy.add(externalAuthorizableHierarchy);
+      }
+
+      if (serdeURI != null) {
+        List<DBModelAuthorizable> serdeUriHierarchy = new ArrayList<DBModelAuthorizable>();
+        serdeUriHierarchy.add(hiveAuthzBinding.getAuthServer());
+        serdeUriHierarchy.add(serdeURI);
+        outputHierarchy.add(serdeUriHierarchy);
+      }
+
+      break;
+    case FUNCTION:
+      /* The 'FUNCTION' privilege scope currently used for
+       *  - CREATE TEMP FUNCTION
+       *  - DROP TEMP FUNCTION.
+       */
+      if (udfURI != null) {
+        List<DBModelAuthorizable> udfUriHierarchy = new ArrayList<DBModelAuthorizable>();
+        udfUriHierarchy.add(hiveAuthzBinding.getAuthServer());
+        udfUriHierarchy.add(udfURI);
+        inputHierarchy.add(udfUriHierarchy);
+        for (WriteEntity writeEntity : outputs) {
+          List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
+          entityHierarchy.add(hiveAuthzBinding.getAuthServer());
+          entityHierarchy.addAll(getAuthzHierarchyFromEntity(writeEntity));
+          outputHierarchy.add(entityHierarchy);
+        }
+      }
+      break;
+    case CONNECT:
+      /* The 'CONNECT' is an implicit privilege scope currently used for
+       *  - USE <db>
+       *  It's allowed when the user has any privilege on the current database. For application
+       *  backward compatibility, we allow (optional) implicit connect permission on 'default' db.
+       */
+      List<DBModelAuthorizable> connectHierarchy = new ArrayList<DBModelAuthorizable>();
+      connectHierarchy.add(hiveAuthzBinding.getAuthServer());
+      // by default allow connect access to default db
+      Table currTbl = Table.ALL;
+      Column currCol = Column.ALL;
+      if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(currDB.getName()) &&
+          "false".equalsIgnoreCase(authzConf.
+              get(HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false"))) {
+        currDB = Database.ALL;
+        currTbl = Table.SOME;
+      }
+
+      connectHierarchy.add(currDB);
+      connectHierarchy.add(currTbl);
+      connectHierarchy.add(currCol);
+
+      inputHierarchy.add(connectHierarchy);
+      outputHierarchy.add(connectHierarchy);
+      break;
+    case COLUMN:
+      for (ReadEntity readEntity: inputs) {
+        if (readEntity.getAccessedColumns() != null && !readEntity.getAccessedColumns().isEmpty()) {
+          addColumnHierarchy(inputHierarchy, readEntity);
+        } else {
+          List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
+          entityHierarchy.add(hiveAuthzBinding.getAuthServer());
+          entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity));
+          entityHierarchy.add(Column.ALL);
+          inputHierarchy.add(entityHierarchy);
+        }
+      }
+      break;
+    default:
+      throw new AuthorizationException("Unknown operation scope type " +
+          stmtAuthObject.getOperationScope().toString());
+    }
+
+    HiveAuthzBinding binding = null;
+    try {
+      binding = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, context.getUserName());
+    } catch (SemanticException e) {
+      // Will use the original hiveAuthzBinding
+      binding = hiveAuthzBinding;
+    }
+    // validate permission
+    binding.authorize(stmtOperation, stmtAuthObject, getCurrentSubject(context), inputHierarchy,
+        outputHierarchy);
+  }
+
+  // Build the hierarchy of authorizable object for the given entity type.
+  private List<DBModelAuthorizable> getAuthzHierarchyFromEntity(Entity entity) {
+    List<DBModelAuthorizable> objectHierarchy = new ArrayList<DBModelAuthorizable>();
+    switch (entity.getType()) {
+    case TABLE:
+      objectHierarchy.add(new Database(entity.getTable().getDbName()));
+      objectHierarchy.add(new Table(entity.getTable().getTableName()));
+      break;
+    case PARTITION:
+    case DUMMYPARTITION:
+      objectHierarchy.add(new Database(entity.getPartition().getTable().getDbName()));
+      objectHierarchy.add(new Table(entity.getPartition().getTable().getTableName()));
+      break;
+    case DFS_DIR:
+    case LOCAL_DIR:
+      try {
+        objectHierarchy.add(parseURI(entity.toString(),
+            entity.getType().equals(Entity.Type.LOCAL_DIR)));
+      } catch (Exception e) {
+        throw new AuthorizationException("Failed to get File URI", e);
+      }
+      break;
+    case DATABASE:
+    case FUNCTION:
+      // TODO use database entities from compiler instead of capturing from AST
+      break;
+    default:
+      throw new UnsupportedOperationException("Unsupported entity type " +
+          entity.getType().name());
+    }
+    return objectHierarchy;
+  }
+
+  /**
+   * Add column level hierarchy to inputHierarchy
+   *
+   * @param inputHierarchy
+   * @param entity
+   * @param sentryContext
+   */
+  protected void addColumnHierarchy(List<List<DBModelAuthorizable>> inputHierarchy,
+      ReadEntity entity) {
+    List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
+    entityHierarchy.add(hiveAuthzBinding.getAuthServer());
+    entityHierarchy.addAll(getAuthzHierarchyFromEntity(entity));
+
+    switch (entity.getType()) {
+    case TABLE:
+    case PARTITION:
+      List<String> cols = entity.getAccessedColumns();
+      for (String col : cols) {
+        List<DBModelAuthorizable> colHierarchy = new ArrayList<DBModelAuthorizable>(entityHierarchy);
+        colHierarchy.add(new Column(col));
+        inputHierarchy.add(colHierarchy);
+      }
+      break;
+    default:
+      inputHierarchy.add(entityHierarchy);
+    }
+  }
+
+  /**
+   * Get Authorizable from inputs and put into inputHierarchy
+   *
+   * @param inputHierarchy
+   * @param entity
+   * @param sentryContext
+   */
+  protected void getInputHierarchyFromInputs(List<List<DBModelAuthorizable>> inputHierarchy,
+      Set<ReadEntity> inputs) {
+    for (ReadEntity readEntity: inputs) {
+      // skip the tables/view that are part of expanded view definition
+      // skip the Hive generated dummy entities created for queries like 'select <expr>'
+      if (isChildTabForView(readEntity) || isDummyEntity(readEntity)) {
+        continue;
+      }
+      if (readEntity.getAccessedColumns() != null && !readEntity.getAccessedColumns().isEmpty()) {
+        addColumnHierarchy(inputHierarchy, readEntity);
+      } else {
+        List<DBModelAuthorizable> entityHierarchy = new ArrayList<DBModelAuthorizable>();
+        entityHierarchy.add(hiveAuthzBinding.getAuthServer());
+        entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity));
+        inputHierarchy.add(entityHierarchy);
+      }
+    }
+  }
+
+  // Check if this write entity needs to skipped
+  private boolean filterWriteEntity(WriteEntity writeEntity)
+      throws AuthorizationException {
+    // skip URI validation for session scratch file URIs
+    if (writeEntity.isTempURI()) {
+      return true;
+    }
+    try {
+      if (writeEntity.getTyp().equals(Type.DFS_DIR)
+          || writeEntity.getTyp().equals(Type.LOCAL_DIR)) {
+        HiveConf conf = SessionState.get().getConf();
+        String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE);
+        URI scratchURI = new URI(PathUtils.parseDFSURI(warehouseDir,
+          conf.getVar(HiveConf.ConfVars.SCRATCHDIR)));
+        URI requestURI = new URI(PathUtils.parseDFSURI(warehouseDir,
+          writeEntity.getLocation().getPath()));
+        LOG.debug("scratchURI = " + scratchURI + ", requestURI = " + requestURI);
+        if (PathUtils.impliesURI(scratchURI, requestURI)) {
+          return true;
+        }
+        URI localScratchURI = new URI(PathUtils.parseLocalURI(conf.getVar(HiveConf.ConfVars.LOCALSCRATCHDIR)));
+        URI localRequestURI = new URI(PathUtils.parseLocalURI(writeEntity.getLocation().getPath()));
+        LOG.debug("localScratchURI = " + localScratchURI + ", localRequestURI = " + localRequestURI);
+        if (PathUtils.impliesURI(localScratchURI, localRequestURI)) {
+          return true;
+        }
+      }
+    } catch (Exception e) {
+      throw new AuthorizationException("Failed to extract uri details", e);
+    }
+    return false;
+  }
+
+  public static List<String> filterShowTables(
+      HiveAuthzBinding hiveAuthzBinding, List<String> queryResult,
+      HiveOperation operation, String userName, String dbName)
+          throws SemanticException {
+    List<String> filteredResult = new ArrayList<String>();
+    Subject subject = new Subject(userName);
+    HiveAuthzPrivileges tableMetaDataPrivilege = new HiveAuthzPrivileges.AuthzPrivilegeBuilder().
+        addInputObjectPriviledge(AuthorizableType.Column, EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)).
+        setOperationScope(HiveOperationScope.TABLE).
+        setOperationType(HiveOperationType.INFO).
+        build();
+
+    HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
+
+    for (String tableName : queryResult) {
+      // if user has privileges on table, add to filtered list, else discard
+      Table table = new Table(tableName);
+      Database database;
+      database = new Database(dbName);
+
+      List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+      List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+      List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
+      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+      externalAuthorizableHierarchy.add(database);
+      externalAuthorizableHierarchy.add(table);
+      externalAuthorizableHierarchy.add(Column.ALL);
+      inputHierarchy.add(externalAuthorizableHierarchy);
+
+      try {
+        // do the authorization by new HiveAuthzBinding with PrivilegeCache
+        hiveBindingWithPrivilegeCache.authorize(operation, tableMetaDataPrivilege, subject,
+            inputHierarchy, outputHierarchy);
+        filteredResult.add(table.getName());
+      } catch (AuthorizationException e) {
+        // squash the exception, user doesn't have privileges, so the table is
+        // not added to
+        // filtered list.
+      }
+    }
+    return filteredResult;
+  }
+
+  public static List<FieldSchema> filterShowColumns(
+      HiveAuthzBinding hiveAuthzBinding, List<FieldSchema> cols,
+      HiveOperation operation, String userName, String tableName, String dbName)
+          throws SemanticException {
+    List<FieldSchema> filteredResult = new ArrayList<FieldSchema>();
+    Subject subject = new Subject(userName);
+    HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
+
+    Database database = new Database(dbName);
+    Table table = new Table(tableName);
+    for (FieldSchema col : cols) {
+      // if user has privileges on column, add to filtered list, else discard
+      List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+      List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+      List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
+      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+      externalAuthorizableHierarchy.add(database);
+      externalAuthorizableHierarchy.add(table);
+      externalAuthorizableHierarchy.add(new Column(col.getName()));
+      inputHierarchy.add(externalAuthorizableHierarchy);
+
+      try {
+        // do the authorization by new HiveAuthzBinding with PrivilegeCache
+        hiveBindingWithPrivilegeCache.authorize(operation, columnMetaDataPrivilege, subject,
+            inputHierarchy, outputHierarchy);
+        filteredResult.add(col);
+      } catch (AuthorizationException e) {
+        // squash the exception, user doesn't have privileges, so the column is
+        // not added to
+        // filtered list.
+      }
+    }
+    return filteredResult;
+  }
+
+  public static List<String> filterShowDatabases(
+      HiveAuthzBinding hiveAuthzBinding, List<String> queryResult,
+      HiveOperation operation, String userName) throws SemanticException {
+    List<String> filteredResult = new ArrayList<String>();
+    Subject subject = new Subject(userName);
+    HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
+
+    HiveAuthzPrivileges anyPrivilege = new HiveAuthzPrivileges.AuthzPrivilegeBuilder().
+        addInputObjectPriviledge(AuthorizableType.Column, EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)).
+        addInputObjectPriviledge(AuthorizableType.URI, EnumSet.of(DBModelAction.SELECT)).
+        setOperationScope(HiveOperationScope.CONNECT).
+        setOperationType(HiveOperationType.QUERY).
+        build();
+
+    for (String dbName:queryResult) {
+      // if user has privileges on database, add to filtered list, else discard
+      Database database = null;
+
+      // if default is not restricted, continue
+      if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(dbName) && "false".equalsIgnoreCase(
+        hiveAuthzBinding.getAuthzConf().get(
+              HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(),
+              "false"))) {
+        filteredResult.add(DEFAULT_DATABASE_NAME);
+        continue;
+      }
+
+      database = new Database(dbName);
+
+      List<List<DBModelAuthorizable>> inputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+      List<List<DBModelAuthorizable>> outputHierarchy = new ArrayList<List<DBModelAuthorizable>>();
+      List<DBModelAuthorizable> externalAuthorizableHierarchy = new ArrayList<DBModelAuthorizable>();
+      externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+      externalAuthorizableHierarchy.add(database);
+      externalAuthorizableHierarchy.add(Table.ALL);
+      externalAuthorizableHierarchy.add(Column.ALL);
+      inputHierarchy.add(externalAuthorizableHierarchy);
+
+      try {
+        // do the authorization by new HiveAuthzBinding with PrivilegeCache
+        hiveBindingWithPrivilegeCache.authorize(operation, anyPrivilege, subject,
+            inputHierarchy, outputHierarchy);
+        filteredResult.add(database.getName());
+      } catch (AuthorizationException e) {
+        // squash the exception, user doesn't have privileges, so the table is
+        // not added to
+        // filtered list.
+      }
+    }
+
+    return filteredResult;
+  }
+
+  /**
+   * Check if the given read entity is a table that has parents of type Table
+   * Hive compiler performs a query rewrite by replacing view with its definition. In the process, tt captures both
+   * the original view and the tables/view that it selects from .
+   * The access authorization is only interested in the top level views and not the underlying tables.
+   * @param readEntity
+   * @return
+   */
+  private boolean isChildTabForView(ReadEntity readEntity) {
+    // If this is a table added for view, then we need to skip that
+    if (!readEntity.getType().equals(Type.TABLE) && !readEntity.getType().equals(Type.PARTITION)) {
+      return false;
+    }
+    if (readEntity.getParents() != null && readEntity.getParents().size() > 0) {
+      for (ReadEntity parentEntity : readEntity.getParents()) {
+        if (!parentEntity.getType().equals(Type.TABLE)) {
+          return false;
+        }
+      }
+      return true;
+    } else {
+      return false;
+    }
+  }
+
+  /**
+   * Returns the hooks specified in a configuration variable.  The hooks are returned in a list in
+   * the order they were specified in the configuration variable.
+   *
+   * @param hookConfVar The configuration variable specifying a comma separated list of the hook
+   *                    class names.
+   * @return            A list of the hooks, in the order they are listed in the value of hookConfVar
+   * @throws Exception
+   */
+  private static <T extends Hook> List<T> getHooks(String csHooks) throws Exception {
+
+    List<T> hooks = new ArrayList<T>();
+    if (csHooks.isEmpty()) {
+      return hooks;
+    }
+    for (String hookClass : Splitter.on(",").omitEmptyStrings().trimResults().split(csHooks)) {
+      try {
+        @SuppressWarnings("unchecked")
+        T hook =
+            (T) Class.forName(hookClass, true, JavaUtils.getClassLoader()).newInstance();
+        hooks.add(hook);
+      } catch (ClassNotFoundException e) {
+        LOG.error(hookClass + " Class not found:" + e.getMessage());
+        throw e;
+      }
+    }
+
+    return hooks;
+  }
+
+  // Check if the given entity is identified as dummy by Hive compilers.
+  private boolean isDummyEntity(Entity entity) {
+    return entity.isDummy();
+  }
+
+  // create hiveBinding with PrivilegeCache
+  private static HiveAuthzBinding getHiveBindingWithPrivilegeCache(HiveAuthzBinding hiveAuthzBinding,
+      String userName) throws SemanticException {
+    // get the original HiveAuthzBinding, and get the user's privileges by AuthorizationProvider
+    AuthorizationProvider authProvider = hiveAuthzBinding.getCurrentAuthProvider();
+    Set<String> userPrivileges = authProvider.getPolicyEngine().getPrivileges(
+            authProvider.getGroupMapping().getGroups(userName), hiveAuthzBinding.getActiveRoleSet(),
+            hiveAuthzBinding.getAuthServer());
+
+    // create PrivilegeCache using user's privileges
+    PrivilegeCache privilegeCache = new SimplePrivilegeCache(userPrivileges);
+    try {
+      // create new instance of HiveAuthzBinding whose backend provider should be SimpleCacheProviderBackend
+      return new HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveServer2, hiveAuthzBinding.getHiveConf(),
+              hiveAuthzBinding.getAuthzConf(), privilegeCache);
+    } catch (Exception e) {
+      LOG.error("Can not create HiveAuthzBinding with privilege cache.");
+      throw new SemanticException(e);
+    }
+  }
+
+  private static boolean hasPrefixMatch(List<String> prefixList, final String str) {
+    for (String prefix : prefixList) {
+      if (str.startsWith(prefix)) {
+        return true;
+      }
+    }
+
+    return false;
+  }
+
+  /**
+   * Set the Serde URI privileges. If the URI privileges are not set, which serdeURI will be null,
+   * the URI authorization checks will be skipped.
+   */
+  protected void setSerdeURI(String serdeClassName) throws SemanticException {
+    if (!serdeURIPrivilegesEnabled) {
+      return;
+    }
+
+    // WhiteList Serde Jar can be used by any users. WhiteList checking is
+    // done by comparing the Java package name. The assumption is cluster
+    // admin will ensure there is no Java namespace collision.
+    // e.g org.apache.hadoop.hive.serde2 is used by hive and cluster admin should
+    // ensure no custom Serde class is introduced under the same namespace.
+    if (!hasPrefixMatch(serdeWhiteList, serdeClassName)) {
+      try {
+        CodeSource serdeSrc =
+            Class.forName(serdeClassName, true, Utilities.getSessionSpecifiedClassLoader())
+                .getProtectionDomain().getCodeSource();
+        if (serdeSrc == null) {
+          throw new SemanticException("Could not resolve the jar for Serde class " + serdeClassName);
+        }
+
+        String serdeJar = serdeSrc.getLocation().getPath();
+        if (serdeJar == null || serdeJar.isEmpty()) {
+          throw new SemanticException("Could not find the jar for Serde class " + serdeClassName
+              + "to validate privileges");
+        }
+
+        serdeURI = parseURI(serdeSrc.getLocation().toString(), true);
+      } catch (ClassNotFoundException e) {
+        throw new SemanticException("Error retrieving Serde class:" + e.getMessage(), e);
+      }
+    }
+  }
+
+  protected HiveOperation getCurrentHiveStmtOp() {
+    SessionState sessState = SessionState.get();
+    if (sessState == null) {
+      // TODO: Warn
+      return null;
+    }
+    return sessState.getHiveOperation();
+  }
+
+  protected Subject getCurrentSubject(HiveSemanticAnalyzerHookContext context) {
+    // Extract the username from the hook context
+    return new Subject(context.getUserName());
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
new file mode 100644
index 0000000..630bef3
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
@@ -0,0 +1,161 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.hive;
+
+import java.io.File;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.sentry.policy.common.PolicyConstants;
+import org.apache.sentry.provider.common.PolicyFileConstants;
+import org.apache.sentry.provider.common.ProviderBackendContext;
+import org.apache.sentry.provider.file.SimpleFileProviderBackend;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Charsets;
+import com.google.common.base.Joiner;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import com.google.common.collect.Table;
+import com.google.common.io.Files;
+
+/**
+ * SentryIniPolicyFileFormatter is to parse file and write data to file for sentry mapping data with
+ * ini format, eg:
+ * [groups]
+ * group1=role1
+ * [roles]
+ * role1=server=server1
+ */
+public class SentryIniPolicyFileFormatter implements SentryPolicyFileFormatter {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(SentryIniPolicyFileFormatter.class);
+
+  private static final String NL = System.getProperty("line.separator", "\n");
+
+  /**
+   * Write the sentry mapping data to ini file.
+   *
+   * @param resourcePath
+   *        The path of the output file
+   * @param sentryMappingData
+   *        The map for sentry mapping data, eg:
+   *        for the following mapping data:
+   *        group1=role1,role2
+   *        group2=role2,role3
+   *        role1=server=server1->db=db1
+   *        role2=server=server1->db=db1->table=tbl1,server=server1->db=db1->table=tbl2
+   *        role3=server=server1->url=hdfs://localhost/path
+   *
+   *        The sentryMappingData will be inputed as:
+   *        {
+   *        groups={[group1={role1, role2}], group2=[role2, role3]},
+   *        roles={role1=[server=server1->db=db1],
+   *        role2=[server=server1->db=db1->table=tbl1,server=server1->db=db1->table=tbl2],
+   *        role3=[server=server1->url=hdfs://localhost/path]
+   *        }
+   *        }
+   */
+  @Override
+  public void write(String resourcePath, Map<String, Map<String, Set<String>>> sentryMappingData)
+      throws Exception {
+    File destFile = new File(resourcePath);
+    if (destFile.exists() && !destFile.delete()) {
+      throw new IllegalStateException("Unable to delete " + destFile);
+    }
+    String contents = Joiner
+        .on(NL)
+        .join(
+        generateSection(PolicyFileConstants.GROUPS,
+                sentryMappingData.get(PolicyFileConstants.GROUPS)),
+        generateSection(PolicyFileConstants.ROLES,
+                sentryMappingData.get(PolicyFileConstants.ROLES)),
+            "");
+    LOGGER.info("Writing policy file to " + destFile + ":\n" + contents);
+    Files.write(contents, destFile, Charsets.UTF_8);
+  }
+
+  /**
+   * parse the ini file and return a map with all data
+   *
+   * @param resourcePath
+   *        The path of the input file
+   * @param conf
+   *        The configuration info
+   * @return the result of sentry mapping data in map structure.
+   */
+  @Override
+  public Map<String, Map<String, Set<String>>> parse(String resourcePath, Configuration conf)
+      throws Exception {
+    Map<String, Map<String, Set<String>>> resultMap = Maps.newHashMap();
+    // SimpleFileProviderBackend is used for parse the ini file
+    SimpleFileProviderBackend policyFileBackend = new SimpleFileProviderBackend(conf, resourcePath);
+    ProviderBackendContext context = new ProviderBackendContext();
+    context.setAllowPerDatabase(true);
+    // parse the ini file
+    policyFileBackend.initialize(context);
+
+    // SimpleFileProviderBackend parsed the input file and output the data in Table format.
+    Table<String, String, Set<String>> groupRolePrivilegeTable = policyFileBackend
+        .getGroupRolePrivilegeTable();
+    Map<String, Set<String>> groupRolesMap = Maps.newHashMap();
+    Map<String, Set<String>> rolePrivilegesMap = Maps.newHashMap();
+    for (String groupName : groupRolePrivilegeTable.rowKeySet()) {
+      for (String roleName : groupRolePrivilegeTable.columnKeySet()) {
+        // get the roles set for the current groupName
+        Set<String> tempRoles = groupRolesMap.get(groupName);
+        if (tempRoles == null) {
+          tempRoles = Sets.newHashSet();
+        }
+        Set<String> privileges = groupRolePrivilegeTable.get(groupName, roleName);
+        // if there has privilege for [group,role], if no privilege exist, the [group, role] info
+        // will be discard.
+        if (privileges != null) {
+          // update [group, role] mapping data
+          tempRoles.add(roleName);
+          groupRolesMap.put(groupName, tempRoles);
+          // update [role, privilege] mapping data
+          rolePrivilegesMap.put(roleName, privileges);
+        }
+      }
+    }
+    resultMap.put(PolicyFileConstants.GROUPS, groupRolesMap);
+    resultMap.put(PolicyFileConstants.ROLES, rolePrivilegesMap);
+    return resultMap;
+  }
+
+  // generate the ini section according to the mapping data.
+  private String generateSection(String name, Map<String, Set<String>> mappingData) {
+    if (mappingData.isEmpty()) {
+      return "";
+    }
+    List<String> lines = Lists.newArrayList();
+    lines.add("[" + name + "]");
+    for (Map.Entry<String, Set<String>> entry : mappingData.entrySet()) {
+      lines.add(PolicyConstants.KV_JOINER.join(entry.getKey(),
+          PolicyConstants.ROLE_JOINER.join(entry.getValue())));
+    }
+    return Joiner.on(NL).join(lines);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHook.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHook.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHook.java
new file mode 100644
index 0000000..45a2925
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHook.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.hive;
+
+import org.apache.hadoop.hive.ql.hooks.Hook;
+
+/**
+ *
+ * SentryOnFailureHook allows Sentry to be extended
+ * with custom logic to be executed upon authorization failure.
+ *
+ */
+public interface SentryOnFailureHook extends Hook {
+
+  /**
+   *
+   * @param context
+   *     The hook context passed to each hook.
+   * @throws Exception
+   */
+  void run(SentryOnFailureHookContext context) throws Exception;
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java
new file mode 100644
index 0000000..c101a4f
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.hive;
+
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.sentry.core.model.db.AccessURI;
+import org.apache.sentry.core.model.db.Database;
+import org.apache.sentry.core.model.db.Table;
+
+/**
+ * Context information provided by Access to implementations
+ * of AccessOnFailureHook
+ */
+public interface SentryOnFailureHookContext  {
+
+  /**
+   * @return the command attempted by user
+   */
+  String getCommand();
+
+  /**
+    * @return the set of read entities
+    */
+  Set<ReadEntity> getInputs();
+
+  /**
+   * @return the set of write entities
+   */
+  Set<WriteEntity> getOutputs();
+
+  /**
+   * @return the operation
+   */
+  HiveOperation getHiveOp();
+
+  /**
+   * @return the user name
+   */
+  String getUserName();
+
+  /**
+   * @return the ip address
+   */
+  String getIpAddress();
+
+  /**
+   * @return the database object
+   */
+  Database getDatabase();
+
+  /**
+   * @return the table object
+   */
+  Table getTable();
+
+  /**
+   * @return the udf URI
+   */
+  AccessURI getUdfURI();
+
+  /**
+   * @return the partition URI
+   */
+  AccessURI getPartitionURI();
+
+  /**
+   * @return the authorization failure exception
+   */
+  AuthorizationException getException();
+
+  /**
+   * @return the config
+   */
+  Configuration getConf();
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContextImpl.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContextImpl.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContextImpl.java
new file mode 100644
index 0000000..f97d7f3
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContextImpl.java
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.hive;
+
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.sentry.core.model.db.AccessURI;
+import org.apache.sentry.core.model.db.Database;
+import org.apache.sentry.core.model.db.Table;
+
+public class SentryOnFailureHookContextImpl implements SentryOnFailureHookContext {
+
+  private final String command;
+  private final Set<ReadEntity> inputs;
+  private final Set<WriteEntity> outputs;
+  private final HiveOperation hiveOp;
+  private final String userName;
+  private final String ipAddress;
+  private final Database database;
+  private final Table table;
+  private final AccessURI udfURI;
+  private final AccessURI partitionURI;
+  private final AuthorizationException authException;
+  private final Configuration conf;
+
+  public SentryOnFailureHookContextImpl(String command,
+      Set<ReadEntity> inputs, Set<WriteEntity> outputs, HiveOperation hiveOp,
+      Database db, Table tab, AccessURI udfURI, AccessURI partitionURI,
+      String userName, String ipAddress, AuthorizationException e,
+      Configuration conf) {
+    this.command = command;
+    this.inputs = inputs;
+    this.outputs = outputs;
+    this.hiveOp = hiveOp;
+    this.userName = userName;
+    this.ipAddress = ipAddress;
+    this.database = db;
+    this.table = tab;
+    this.udfURI = udfURI;
+    this.partitionURI = partitionURI;
+    this.authException = e;
+    this.conf = conf;
+  }
+
+  @Override
+  public String getCommand() {
+    return command;
+  }
+
+  @Override
+  public Set<ReadEntity> getInputs() {
+    return inputs;
+  }
+
+  @Override
+  public Set<WriteEntity> getOutputs() {
+    return outputs;
+  }
+
+  @Override
+  public HiveOperation getHiveOp() {
+    return hiveOp;
+  }
+
+  @Override
+  public String getUserName() {
+    return userName;
+  }
+
+  @Override
+  public String getIpAddress() {
+    return ipAddress;
+  }
+
+  @Override
+  public Database getDatabase() {
+    return database;
+  }
+
+  @Override
+  public Table getTable() {
+    return table;
+  }
+
+  @Override
+  public AccessURI getUdfURI() {
+    return udfURI;
+  }
+
+  @Override
+  public AccessURI getPartitionURI() {
+    return partitionURI;
+  }
+
+  @Override
+  public AuthorizationException getException() {
+    return authException;
+  }
+
+  @Override
+  public Configuration getConf() {
+    return conf;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatFactory.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatFactory.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatFactory.java
new file mode 100644
index 0000000..d2c6072
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatFactory.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.hive;
+
+import java.lang.reflect.Constructor;
+
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
+
+/**
+ * SentryPolicyFileFormatFactory is used to create FileFormatter for different file type according
+ * to the configuration, the default FileFormatter is for ini file.
+ */
+public class SentryPolicyFileFormatFactory {
+
+  public static SentryPolicyFileFormatter createFileFormatter(HiveAuthzConf conf) throws Exception {
+    // The default formatter is org.apache.sentry.binding.hive.SentryIniPolicyFileFormatter, for ini
+    // file.
+    String policyFileFormatterName = conf.get(AuthzConfVars.AUTHZ_POLICY_FILE_FORMATTER.getVar());
+    // load the policy file formatter class
+    Constructor<?> policyFileFormatterConstructor = Class.forName(policyFileFormatterName)
+        .getDeclaredConstructor();
+    policyFileFormatterConstructor.setAccessible(true);
+    SentryPolicyFileFormatter sentryPolicyFileFormatter = (SentryPolicyFileFormatter) policyFileFormatterConstructor
+        .newInstance();
+    return sentryPolicyFileFormatter;
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7a30c819/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatter.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatter.java b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatter.java
new file mode 100644
index 0000000..4f465b3
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-common/src/main/java/org/apache/sentry/binding/hive/SentryPolicyFileFormatter.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.hive;
+
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * SentryPolicyFileFormatter is to parse file and write data to file for sentry mapping data.
+ */
+public interface SentryPolicyFileFormatter {
+
+  // write the sentry mapping data to file
+  void write(String resourcePath, Map<String, Map<String, Set<String>>> sentryMappingData)
+      throws Exception;
+
+  // parse the sentry mapping data from file
+  Map<String, Map<String, Set<String>>> parse(String resourcePath, Configuration conf)
+      throws Exception;
+
+}


Mime
View raw message