atlas-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From venkat...@apache.org
Subject [38/58] [abbrv] incubator-atlas git commit: Refactor packages and scripts to Atlas (cherry picked from commit 414beba)
Date Tue, 16 Jun 2015 23:05:12 GMT
http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
new file mode 100755
index 0000000..c4db02a
--- /dev/null
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
@@ -0,0 +1,379 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.hive.hook;
+
+import org.apache.atlas.MetadataServiceClient;
+import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
+import org.apache.atlas.hive.model.HiveDataModelGenerator;
+import org.apache.atlas.hive.model.HiveDataTypes;
+import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.typesystem.persistence.Id;
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.commons.lang.StringEscapeUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONObject;
+import org.slf4j.Logger;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.util.Map;
+
+public class HiveHookIT {
+    public static final Logger LOG = org.slf4j.LoggerFactory.getLogger(HiveHookIT.class);
+
+    private static final String DGI_URL = "http://localhost:21000/";
+    private static final String CLUSTER_NAME = "test";
+    public static final String DEFAULT_DB = "default";
+    private Driver driver;
+    private MetadataServiceClient dgiCLient;
+    private SessionState ss;
+
+    @BeforeClass
+    public void setUp() throws Exception {
+        //Set-up hive session
+        HiveConf conf = getHiveConf();
+        driver = new Driver(conf);
+        ss = new SessionState(conf, System.getProperty("user.name"));
+        ss = SessionState.start(ss);
+        SessionState.setCurrentSessionState(ss);
+
+        dgiCLient = new MetadataServiceClient(DGI_URL);
+    }
+
+    private HiveConf getHiveConf() {
+        HiveConf hiveConf = new HiveConf(this.getClass());
+        hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
+        hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName());
+        hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
+        hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/metastore");
+        hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL);
+        hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
+        hiveConf.set("hive.hook.dgi.synchronous", "true");
+        hiveConf.set(HiveMetaStoreBridge.HIVE_CLUSTER_NAME, CLUSTER_NAME);
+        hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODE, true);  //to not use hdfs
+        hiveConf.setVar(HiveConf.ConfVars.HIVETESTMODEPREFIX, "");
+        hiveConf.set("fs.pfile.impl", "org.apache.hadoop.fs.ProxyLocalFileSystem");
+        return hiveConf;
+    }
+
+    private void runCommand(String cmd) throws Exception {
+        ss.setCommandType(null);
+        driver.run(cmd);
+    }
+
+    @Test
+    public void testCreateDatabase() throws Exception {
+        String dbName = "db" + random();
+        runCommand("create database " + dbName + " WITH DBPROPERTIES ('p1'='v1', 'p2'='v2')");
+        String dbId = assertDatabaseIsRegistered(dbName);
+        Referenceable definition = dgiCLient.getEntity(dbId);
+        Map params = (Map) definition.get("parameters");
+        Assert.assertNotNull(params);
+        Assert.assertEquals(params.size(), 2);
+        Assert.assertEquals(params.get("p1"), "v1");
+
+        //There should be just one entity per dbname
+        runCommand("drop database " + dbName);
+        runCommand("create database " + dbName);
+        assertDatabaseIsRegistered(dbName);
+    }
+
+    private String dbName() {
+        return "db" + random();
+    }
+
+    private String createDatabase() throws Exception {
+        String dbName = dbName();
+        runCommand("create database " + dbName);
+        return dbName;
+    }
+
+    private String tableName() {
+        return "table" + random();
+    }
+
+    private String createTable() throws Exception {
+        return createTable(true);
+    }
+
+    private String createTable(boolean partition) throws Exception {
+        String tableName = tableName();
+        runCommand("create table " + tableName + "(id int, name string) comment 'table comment' "
+                + (partition ? " partitioned by(dt string)" : ""));
+        return tableName;
+    }
+
+    @Test
+    public void testCreateTable() throws Exception {
+        String tableName = tableName();
+        String dbName = createDatabase();
+        String colName = "col" + random();
+        runCommand("create table " + dbName + "." + tableName + "(" + colName + " int, name string)");
+        assertTableIsRegistered(dbName, tableName);
+        //there is only one instance of column registered
+        assertColumnIsRegistered(colName);
+
+        tableName = createTable();
+        String tableId = assertTableIsRegistered(DEFAULT_DB, tableName);
+        Referenceable tableRef = dgiCLient.getEntity(tableId);
+        Assert.assertEquals(tableRef.get("tableType"), TableType.MANAGED_TABLE.name());
+        Assert.assertEquals(tableRef.get(HiveDataModelGenerator.COMMENT), "table comment");
+        String entityName = HiveMetaStoreBridge.getTableName(CLUSTER_NAME, DEFAULT_DB, tableName);
+        Assert.assertEquals(tableRef.get(HiveDataModelGenerator.NAME), entityName);
+
+        final Id sdId = (Id) tableRef.get("sd");
+        Referenceable sdRef = dgiCLient.getEntity(sdId.id);
+        Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_IS_STORED_AS_SUB_DIRS),false);
+
+        //Create table where database doesn't exist, will create database instance as well
+        assertDatabaseIsRegistered(DEFAULT_DB);
+    }
+
+    private String assertColumnIsRegistered(String colName) throws Exception {
+        LOG.debug("Searching for column {}", colName);
+        String query = String.format("%s where name = '%s'", HiveDataTypes.HIVE_COLUMN.getName(), colName.toLowerCase());
+        return assertEntityIsRegistered(query, true);
+
+    }
+
+    @Test
+    public void testCTAS() throws Exception {
+        String tableName = createTable();
+        String ctasTableName = "table" + random();
+        String query = "create table " + ctasTableName + " as select * from " + tableName;
+        runCommand(query);
+
+        assertTableIsRegistered(DEFAULT_DB, ctasTableName);
+        assertProcessIsRegistered(query);
+    }
+
+    @Test
+    public void testCreateView() throws Exception {
+        String tableName = createTable();
+        String viewName = tableName();
+        String query = "create view " + viewName + " as select * from " + tableName;
+        runCommand(query);
+
+        assertTableIsRegistered(DEFAULT_DB, viewName);
+        assertProcessIsRegistered(query);
+    }
+
+    @Test
+    public void testLoadData() throws Exception {
+        String tableName = createTable(false);
+
+        String loadFile = file("load");
+        String query = "load data local inpath 'file://" + loadFile + "' into table " + tableName;
+        runCommand(query);
+
+        assertProcessIsRegistered(query);
+    }
+
+    @Test
+    public void testInsert() throws Exception {
+        String tableName = createTable();
+        String insertTableName = createTable();
+        String query = "insert into " + insertTableName + " partition(dt = '2015-01-01') select id, name from "
+                + tableName + " where dt = '2015-01-01'";
+
+        runCommand(query);
+        assertProcessIsRegistered(query);
+        assertPartitionIsRegistered(DEFAULT_DB, insertTableName, "2015-01-01");
+    }
+
+    private String random() {
+        return RandomStringUtils.randomAlphanumeric(10);
+    }
+
+    private String file(String tag) throws Exception {
+        String filename = "./target/" + tag + "-data-" + random();
+        File file = new File(filename);
+        file.createNewFile();
+        return file.getAbsolutePath();
+    }
+
+    private String mkdir(String tag) throws Exception {
+        String filename = "./target/" + tag + "-data-" + random();
+        File file = new File(filename);
+        file.mkdirs();
+        return file.getAbsolutePath();
+    }
+
+    @Test
+    public void testExportImport() throws Exception {
+        String tableName = createTable(false);
+
+        String filename = "pfile://" + mkdir("export");
+        String query = "export table " + tableName + " to \"" + filename + "\"";
+        runCommand(query);
+        assertProcessIsRegistered(query);
+
+        tableName = createTable(false);
+
+        query = "import table " + tableName + " from '" + filename + "'";
+        runCommand(query);
+        assertProcessIsRegistered(query);
+    }
+
+    @Test
+    public void testSelect() throws Exception {
+        String tableName = createTable();
+        String query = "select * from " + tableName;
+        runCommand(query);
+        assertProcessIsRegistered(query);
+
+        //single entity per query
+        query = "SELECT * from " + tableName.toUpperCase();
+        runCommand(query);
+        assertProcessIsRegistered(query);
+    }
+
+    @Test
+    public void testAlterTable() throws Exception {
+        String tableName = createTable();
+        String newName = tableName();
+        String query = "alter table " + tableName + " rename to " + newName;
+        runCommand(query);
+
+        assertTableIsRegistered(DEFAULT_DB, newName);
+        assertTableIsNotRegistered(DEFAULT_DB, tableName);
+    }
+
+    @Test
+    public void testAlterView() throws Exception {
+        String tableName = createTable();
+        String viewName = tableName();
+        String newName = tableName();
+        String query = "create view " + viewName + " as select * from " + tableName;
+        runCommand(query);
+
+        query = "alter view " + viewName + " rename to " + newName;
+        runCommand(query);
+
+        assertTableIsRegistered(DEFAULT_DB, newName);
+        assertTableIsNotRegistered(DEFAULT_DB, viewName);
+    }
+
+    private void assertProcessIsRegistered(String queryStr) throws Exception {
+//        String dslQuery = String.format("%s where queryText = \"%s\"", HiveDataTypes.HIVE_PROCESS.getName(),
+//                normalize(queryStr));
+//        assertEntityIsRegistered(dslQuery, true);
+        //todo replace with DSL
+        String typeName = HiveDataTypes.HIVE_PROCESS.getName();
+        String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()",
+                typeName, typeName, normalize(queryStr));
+        JSONObject response = dgiCLient.searchByGremlin(gremlinQuery);
+        JSONArray results = response.getJSONArray(MetadataServiceClient.RESULTS);
+        Assert.assertEquals(results.length(), 1);
+    }
+
+    private String normalize(String str) {
+        if (StringUtils.isEmpty(str)) {
+            return null;
+        }
+        return StringEscapeUtils.escapeJava(str.toLowerCase());
+    }
+
+    private String assertTableIsRegistered(String dbName, String tableName) throws Exception {
+        return assertTableIsRegistered(dbName, tableName, true);
+    }
+
+    private String assertTableIsNotRegistered(String dbName, String tableName) throws Exception {
+        return assertTableIsRegistered(dbName, tableName, false);
+    }
+
+    private String assertTableIsRegistered(String dbName, String tableName, boolean registered) throws Exception {
+        LOG.debug("Searching for table {}.{}", dbName, tableName);
+        String query = String.format("%s as t where tableName = '%s', db where name = '%s' and clusterName = '%s'"
+                + " select t", HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(),
+                CLUSTER_NAME);
+        return assertEntityIsRegistered(query, registered);
+    }
+
+    private String assertDatabaseIsRegistered(String dbName) throws Exception {
+        LOG.debug("Searching for database {}", dbName);
+        String query = String.format("%s where name = '%s' and clusterName = '%s'", HiveDataTypes.HIVE_DB.getName(),
+                dbName.toLowerCase(), CLUSTER_NAME);
+        return assertEntityIsRegistered(query, true);
+    }
+
+    private void assertPartitionIsRegistered(String dbName, String tableName, String value) throws Exception {
+        String typeName = HiveDataTypes.HIVE_PARTITION.getName();
+        String dbType = HiveDataTypes.HIVE_DB.getName();
+        String tableType = HiveDataTypes.HIVE_TABLE.getName();
+
+        LOG.debug("Searching for partition of {}.{} with values {}", dbName, tableName, value);
+        //todo replace with DSL
+        String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.values', ['%s']).as('p')."
+                        + "out('__%s.table').has('%s.tableName', '%s').out('__%s.db').has('%s.name', '%s')"
+                        + ".has('%s.clusterName', '%s').back('p').toList()", typeName, typeName, value, typeName,
+                tableType, tableName.toLowerCase(), tableType, dbType, dbName.toLowerCase(), dbType, CLUSTER_NAME);
+        JSONObject response = dgiCLient.searchByGremlin(gremlinQuery);
+        JSONArray results = response.getJSONArray(MetadataServiceClient.RESULTS);
+        Assert.assertEquals(results.length(), 1);
+    }
+
+    private String assertEntityIsRegistered(String dslQuery, boolean registered) throws Exception{
+        JSONArray results = dgiCLient.searchByDSL(dslQuery);
+        if (registered) {
+            Assert.assertEquals(results.length(), 1);
+            JSONObject row = results.getJSONObject(0);
+            if (row.has("$id$")) {
+                return row.getJSONObject("$id$").getString("id");
+            } else {
+                return row.getJSONObject("_col_0").getString("id");
+            }
+        } else {
+            Assert.assertEquals(results.length(), 0);
+            return null;
+        }
+    }
+
+    @Test
+    public void testLineage() throws Exception {
+        String table1 = createTable(false);
+
+        String db2 = createDatabase();
+        String table2 = tableName();
+
+        String query = String.format("create table %s.%s as select * from %s", db2, table2, table1);
+        runCommand(query);
+        String table1Id = assertTableIsRegistered(DEFAULT_DB, table1);
+        String table2Id = assertTableIsRegistered(db2, table2);
+
+        String datasetName = HiveMetaStoreBridge.getTableName(CLUSTER_NAME, db2, table2);
+        JSONObject response = dgiCLient.getInputGraph(datasetName);
+        JSONObject vertices = response.getJSONObject("values").getJSONObject("vertices");
+        Assert.assertTrue(vertices.has(table1Id));
+        Assert.assertTrue(vertices.has(table2Id));
+
+        datasetName = HiveMetaStoreBridge.getTableName(CLUSTER_NAME, DEFAULT_DB, table1);
+        response = dgiCLient.getOutputGraph(datasetName);
+        vertices = response.getJSONObject("values").getJSONObject("vertices");
+        Assert.assertTrue(vertices.has(table1Id));
+        Assert.assertTrue(vertices.has(table2Id));
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/NegativeSSLAndKerberosHiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/NegativeSSLAndKerberosHiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/NegativeSSLAndKerberosHiveHookIT.java
new file mode 100755
index 0000000..6caa4fc
--- /dev/null
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/NegativeSSLAndKerberosHiveHookIT.java
@@ -0,0 +1,162 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.hive.hook;
+
+import org.apache.atlas.PropertiesUtil;
+import org.apache.atlas.security.SecurityProperties;
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
+import org.apache.hadoop.security.ssl.SSLFactory;
+import org.apache.hadoop.security.ssl.SSLHostnameVerifier;
+import org.mortbay.jetty.webapp.WebAppContext;
+import org.testng.Assert;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.net.URL;
+import java.nio.file.Files;
+
+import static org.apache.atlas.security.SecurityProperties.CERT_STORES_CREDENTIAL_PROVIDER_PATH;
+import static org.apache.atlas.security.SecurityProperties.KEYSTORE_FILE_KEY;
+import static org.apache.atlas.security.SecurityProperties.TLS_ENABLED;
+import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_FILE_KEY;
+
+/**
+ * Perform all the necessary setup steps for client and server comm over SSL/Kerberos, but then don't estalish a
+ * kerberos user for the invocation.  Need a separate use case since the Jersey layer cached the URL connection handler,
+ * which indirectly caches the kerberos delegation token.
+ */
+public class NegativeSSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
+
+    private Driver driver;
+    private SessionState ss;
+    private TestSecureEmbeddedServer secureEmbeddedServer;
+    private String originalConf;
+
+    @BeforeClass
+    public void setUp() throws Exception {
+        //Set-up hive session
+        HiveConf conf = getHiveConf();
+        driver = new Driver(conf);
+        ss = new SessionState(conf, System.getProperty("user.name"));
+        ss = SessionState.start(ss);
+        SessionState.setCurrentSessionState(ss);
+
+        jksPath = new Path(Files.createTempDirectory("tempproviders").toString(), "test.jks");
+        providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
+
+        String persistDir = null;
+        URL resource = NegativeSSLAndKerberosHiveHookIT.class.getResource("/");
+        if (resource != null) {
+            persistDir = resource.toURI().getPath();
+        }
+        // delete prior ssl-client.xml file
+        resource = NegativeSSLAndKerberosHiveHookIT.class.getResource("/" + SecurityProperties.SSL_CLIENT_PROPERTIES);
+        if (resource != null) {
+            File sslClientFile = new File(persistDir, SecurityProperties.SSL_CLIENT_PROPERTIES);
+            if (sslClientFile != null && sslClientFile.exists()) {
+                sslClientFile.delete();
+            }
+        }
+        setupKDCAndPrincipals();
+        setupCredentials();
+
+        // client will actually only leverage subset of these properties
+        final PropertiesConfiguration configuration = new PropertiesConfiguration();
+        configuration.setProperty(TLS_ENABLED, true);
+        configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
+        configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
+        configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
+        configuration.setProperty("atlas.http.authentication.type", "kerberos");
+        configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
+
+        configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
+
+        String confLocation = System.getProperty("atlas.conf");
+        URL url;
+        if (confLocation == null) {
+            url = PropertiesUtil.class.getResource("/application.properties");
+        } else {
+            url = new File(confLocation, "application.properties").toURI().toURL();
+        }
+        configuration.load(url);
+        configuration.setProperty(TLS_ENABLED, true);
+        configuration.setProperty("atlas.http.authentication.enabled", "true");
+        configuration.setProperty("atlas.http.authentication.kerberos.principal", "HTTP/localhost@" + kdc.getRealm());
+        configuration.setProperty("atlas.http.authentication.kerberos.keytab", httpKeytabFile.getAbsolutePath());
+        configuration.setProperty("atlas.http.authentication.kerberos.name.rules",
+                "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT");
+
+        configuration.save(new FileWriter(persistDir + File.separator + "application.properties"));
+
+        secureEmbeddedServer = new TestSecureEmbeddedServer(21443, "webapp/target/apache-atlas") {
+            @Override
+            public PropertiesConfiguration getConfiguration() {
+                return configuration;
+            }
+        };
+        WebAppContext webapp = new WebAppContext();
+        webapp.setContextPath("/");
+        webapp.setWar(System.getProperty("user.dir") + getWarPath());
+        secureEmbeddedServer.getServer().setHandler(webapp);
+
+        // save original setting
+        originalConf = System.getProperty("atlas.conf");
+        System.setProperty("atlas.conf", persistDir);
+        secureEmbeddedServer.getServer().start();
+
+    }
+
+    @AfterClass
+    public void tearDown() throws Exception {
+        if (secureEmbeddedServer != null) {
+            secureEmbeddedServer.getServer().stop();
+        }
+
+        if (kdc != null) {
+            kdc.stop();
+        }
+
+        if (originalConf != null) {
+            System.setProperty("atlas.conf", originalConf);
+        }
+    }
+
+    private void runCommand(final String cmd) throws Exception {
+        ss.setCommandType(null);
+        driver.run(cmd);
+        Assert.assertNotNull(driver.getErrorMsg());
+        Assert.assertTrue(driver.getErrorMsg().contains("Mechanism level: Failed to find any Kerberos tgt"));
+    }
+
+    @Test
+    public void testUnsecuredCreateDatabase() throws Exception {
+        String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
+        runCommand("create database " + dbName);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLAndKerberosHiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLAndKerberosHiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLAndKerberosHiveHookIT.java
new file mode 100755
index 0000000..3ea2d9c
--- /dev/null
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLAndKerberosHiveHookIT.java
@@ -0,0 +1,229 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.hive.hook;
+
+import org.apache.atlas.MetadataException;
+import org.apache.atlas.MetadataServiceClient;
+import org.apache.atlas.PropertiesUtil;
+import org.apache.atlas.hive.model.HiveDataTypes;
+import org.apache.atlas.security.SecurityProperties;
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
+import org.apache.hadoop.security.ssl.SSLFactory;
+import org.apache.hadoop.security.ssl.SSLHostnameVerifier;
+import org.codehaus.jettison.json.JSONArray;
+import org.mortbay.jetty.webapp.WebAppContext;
+import org.testng.Assert;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import javax.security.auth.Subject;
+import javax.security.auth.callback.Callback;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.callback.NameCallback;
+import javax.security.auth.callback.PasswordCallback;
+import javax.security.auth.callback.UnsupportedCallbackException;
+import javax.security.auth.login.LoginContext;
+import javax.security.auth.login.LoginException;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.net.URL;
+import java.nio.file.Files;
+import java.security.PrivilegedExceptionAction;
+
+import static org.apache.atlas.security.SecurityProperties.CERT_STORES_CREDENTIAL_PROVIDER_PATH;
+import static org.apache.atlas.security.SecurityProperties.KEYSTORE_FILE_KEY;
+import static org.apache.atlas.security.SecurityProperties.TLS_ENABLED;
+import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_FILE_KEY;
+
+public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
+    public static final String TEST_USER_JAAS_SECTION = "TestUser";
+    public static final String TESTUSER = "testuser";
+    public static final String TESTPASS = "testpass";
+
+    private static final String DGI_URL = "https://localhost:21443/";
+    private Driver driver;
+    private MetadataServiceClient dgiCLient;
+    private SessionState ss;
+    private TestSecureEmbeddedServer secureEmbeddedServer;
+    private Subject subject;
+    private String originalConf;
+
+    @BeforeClass
+    public void setUp() throws Exception {
+        //Set-up hive session
+        HiveConf conf = getHiveConf();
+        driver = new Driver(conf);
+        ss = new SessionState(conf, System.getProperty("user.name"));
+        ss = SessionState.start(ss);
+        SessionState.setCurrentSessionState(ss);
+
+        jksPath = new Path(Files.createTempDirectory("tempproviders").toString(), "test.jks");
+        providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
+
+        String persistDir = null;
+        URL resource = SSLAndKerberosHiveHookIT.class.getResource("/");
+        if (resource != null) {
+            persistDir = resource.toURI().getPath();
+        }
+        // delete prior ssl-client.xml file
+        resource = SSLAndKerberosHiveHookIT.class.getResource("/" + SecurityProperties.SSL_CLIENT_PROPERTIES);
+        if (resource != null) {
+            File sslClientFile = new File(persistDir, SecurityProperties.SSL_CLIENT_PROPERTIES);
+            if (sslClientFile != null && sslClientFile.exists()) {
+                sslClientFile.delete();
+            }
+        }
+        setupKDCAndPrincipals();
+        setupCredentials();
+
+        // client will actually only leverage subset of these properties
+        final PropertiesConfiguration configuration = new PropertiesConfiguration();
+        configuration.setProperty(TLS_ENABLED, true);
+        configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
+        configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
+        configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
+        configuration.setProperty("atlas.http.authentication.type", "kerberos");
+        configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
+
+        configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
+
+        String confLocation = System.getProperty("atlas.conf");
+        URL url;
+        if (confLocation == null) {
+            url = PropertiesUtil.class.getResource("/application.properties");
+        } else {
+            url = new File(confLocation, "application.properties").toURI().toURL();
+        }
+        configuration.load(url);
+        configuration.setProperty(TLS_ENABLED, true);
+        configuration.setProperty("atlas.http.authentication.enabled", "true");
+        configuration.setProperty("atlas.http.authentication.kerberos.principal", "HTTP/localhost@" + kdc.getRealm());
+        configuration.setProperty("atlas.http.authentication.kerberos.keytab", httpKeytabFile.getAbsolutePath());
+        configuration.setProperty("atlas.http.authentication.kerberos.name.rules",
+                "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT");
+
+        configuration.save(new FileWriter(persistDir + File.separator + "application.properties"));
+
+        dgiCLient = new MetadataServiceClient(DGI_URL) {
+            @Override
+            protected PropertiesConfiguration getClientProperties() throws MetadataException {
+                return configuration;
+            }
+        };
+
+        secureEmbeddedServer = new TestSecureEmbeddedServer(21443, "webapp/target/apache-atlas") {
+            @Override
+            public PropertiesConfiguration getConfiguration() {
+                return configuration;
+            }
+        };
+        WebAppContext webapp = new WebAppContext();
+        webapp.setContextPath("/");
+        webapp.setWar(System.getProperty("user.dir") + getWarPath());
+        secureEmbeddedServer.getServer().setHandler(webapp);
+
+        // save original setting
+        originalConf = System.getProperty("atlas.conf");
+        System.setProperty("atlas.conf", persistDir);
+        secureEmbeddedServer.getServer().start();
+
+        subject = loginTestUser();
+    }
+
+    @AfterClass
+    public void tearDown() throws Exception {
+        if (secureEmbeddedServer != null) {
+            secureEmbeddedServer.getServer().stop();
+        }
+
+        if (kdc != null) {
+            kdc.stop();
+        }
+
+        if (originalConf != null) {
+            System.setProperty("atlas.conf", originalConf);
+        }
+    }
+
+    protected Subject loginTestUser() throws LoginException, IOException {
+        LoginContext lc = new LoginContext(TEST_USER_JAAS_SECTION, new CallbackHandler() {
+
+            @Override
+            public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
+                for (int i = 0; i < callbacks.length; i++) {
+                    if (callbacks[i] instanceof PasswordCallback) {
+                        PasswordCallback passwordCallback = (PasswordCallback) callbacks[i];
+                        passwordCallback.setPassword(TESTPASS.toCharArray());
+                    }
+                    if (callbacks[i] instanceof NameCallback) {
+                        NameCallback nameCallback = (NameCallback) callbacks[i];
+                        nameCallback.setName(TESTUSER);
+                    }
+                }
+            }
+        });
+        // attempt authentication
+        lc.login();
+        return lc.getSubject();
+    }
+
+    private void runCommand(final String cmd) throws Exception {
+        ss.setCommandType(null);
+        Subject.doAs(subject, new PrivilegedExceptionAction<Object>() {
+            @Override
+            public Object run() throws Exception {
+                driver.run(cmd);
+
+                return null;
+            }
+        });
+    }
+
+    @Test
+    public void testCreateDatabase() throws Exception {
+        String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
+        runCommand("create database " + dbName);
+
+        assertDatabaseIsRegistered(dbName);
+    }
+
+    private void assertDatabaseIsRegistered(String dbName) throws Exception {
+        assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName);
+    }
+
+    private void assertInstanceIsRegistered(final String typeName, final String colName, final String colValue) throws Exception {
+        Subject.doAs(subject, new PrivilegedExceptionAction<Object>() {
+            @Override
+            public Object run() throws Exception {
+                JSONArray results = dgiCLient.rawSearch(typeName, colName, colValue);
+                Assert.assertEquals(results.length(), 1);
+
+                return null;
+            }
+        });
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLHiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLHiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLHiveHookIT.java
new file mode 100755
index 0000000..cac313c
--- /dev/null
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLHiveHookIT.java
@@ -0,0 +1,224 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.hive.hook;
+
+import org.apache.atlas.MetadataException;
+import org.apache.atlas.MetadataServiceClient;
+import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
+import org.apache.atlas.hive.model.HiveDataTypes;
+import org.apache.atlas.security.SecurityProperties;
+import org.apache.atlas.web.service.SecureEmbeddedServer;
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.security.alias.CredentialProvider;
+import org.apache.hadoop.security.alias.CredentialProviderFactory;
+import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
+import org.apache.hadoop.security.ssl.SSLFactory;
+import org.apache.hadoop.security.ssl.SSLHostnameVerifier;
+import org.codehaus.jettison.json.JSONArray;
+import org.mortbay.jetty.Server;
+import org.mortbay.jetty.webapp.WebAppContext;
+import org.testng.Assert;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.net.URL;
+import java.nio.file.Files;
+
+import static org.apache.atlas.security.SecurityProperties.CERT_STORES_CREDENTIAL_PROVIDER_PATH;
+import static org.apache.atlas.security.SecurityProperties.KEYSTORE_FILE_KEY;
+import static org.apache.atlas.security.SecurityProperties.KEYSTORE_PASSWORD_KEY;
+import static org.apache.atlas.security.SecurityProperties.SERVER_CERT_PASSWORD_KEY;
+import static org.apache.atlas.security.SecurityProperties.TLS_ENABLED;
+import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_FILE_KEY;
+import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_PASSWORD_KEY;
+
+public class SSLHiveHookIT {
+    private static final String DGI_URL = "https://localhost:21443/";
+    private Driver driver;
+    private MetadataServiceClient dgiCLient;
+    private SessionState ss;
+    private Path jksPath;
+    private String providerUrl;
+    private TestSecureEmbeddedServer secureEmbeddedServer;
+
+    class TestSecureEmbeddedServer extends SecureEmbeddedServer {
+
+        public TestSecureEmbeddedServer(int port, String path) throws IOException {
+            super(port, path);
+        }
+
+        public Server getServer () { return server; }
+
+        @Override
+        public PropertiesConfiguration getConfiguration() {
+            return super.getConfiguration();
+        }
+    }
+
+    @BeforeClass
+    public void setUp() throws Exception {
+        //Set-up hive session
+        HiveConf conf = getHiveConf();
+        driver = new Driver(conf);
+        ss = new SessionState(conf, System.getProperty("user.name"));
+        ss = SessionState.start(ss);
+        SessionState.setCurrentSessionState(ss);
+
+        jksPath = new Path(Files.createTempDirectory("tempproviders").toString(), "test.jks");
+        providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
+
+        String persistDir = null;
+        URL resource = SSLHiveHookIT.class.getResource("/");
+        if (resource != null) {
+            persistDir = resource.toURI().getPath();
+        }
+        // delete prior ssl-client.xml file
+        resource = SSLHiveHookIT.class.getResource("/" + SecurityProperties.SSL_CLIENT_PROPERTIES);
+        if (resource != null) {
+            File sslClientFile = new File(persistDir, SecurityProperties.SSL_CLIENT_PROPERTIES);
+            if (sslClientFile != null && sslClientFile.exists()) {
+                sslClientFile.delete();
+            }
+        }
+        setupCredentials();
+
+        final PropertiesConfiguration configuration = new PropertiesConfiguration();
+        configuration.setProperty(TLS_ENABLED, true);
+        configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
+        configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
+        configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
+        configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
+
+        configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
+
+        dgiCLient = new MetadataServiceClient(DGI_URL) {
+            @Override
+            protected PropertiesConfiguration getClientProperties() throws MetadataException {
+                return configuration;
+            }
+        };
+
+        secureEmbeddedServer = new TestSecureEmbeddedServer(21443, "webapp/target/apache-atlas") {
+            @Override
+            public PropertiesConfiguration getConfiguration() {
+                return configuration;
+            }
+        };
+        WebAppContext webapp = new WebAppContext();
+        webapp.setContextPath("/");
+        webapp.setWar(System.getProperty("user.dir") + getWarPath());
+        secureEmbeddedServer.getServer().setHandler(webapp);
+
+        secureEmbeddedServer.getServer().start();
+
+    }
+
+    @AfterClass
+    public void tearDown() throws Exception {
+        if (secureEmbeddedServer != null) {
+            secureEmbeddedServer.getServer().stop();
+        }
+    }
+
+    protected void setupCredentials() throws Exception {
+        Configuration conf = new Configuration(false);
+
+        File file = new File(jksPath.toUri().getPath());
+        file.delete();
+        conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
+
+        CredentialProvider provider =
+                CredentialProviderFactory.getProviders(conf).get(0);
+
+        // create new aliases
+        try {
+
+            char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
+            provider.createCredentialEntry(
+                    KEYSTORE_PASSWORD_KEY, storepass);
+
+            char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
+            provider.createCredentialEntry(
+                    TRUSTSTORE_PASSWORD_KEY, trustpass);
+
+            char[] trustpass2 = {'k', 'e', 'y', 'p', 'a', 's', 's'};
+            provider.createCredentialEntry(
+                    "ssl.client.truststore.password", trustpass2);
+
+            char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
+            provider.createCredentialEntry(
+                    SERVER_CERT_PASSWORD_KEY, certpass);
+
+            // write out so that it can be found in checks
+            provider.flush();
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw e;
+        }
+    }
+
+    protected String getWarPath() {
+        return String.format("/../../webapp/target/atlas-webapp-%s",
+                System.getProperty("project.version", "0.1-incubating-SNAPSHOT"));
+    }
+
+    private HiveConf getHiveConf() {
+        HiveConf hiveConf = new HiveConf(this.getClass());
+        hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
+        hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName());
+        hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
+        hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/atlas");
+        hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL);
+        hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
+        hiveConf.set("hive.hook.dgi.synchronous", "true");
+        return hiveConf;
+    }
+
+    private void runCommand(String cmd) throws Exception {
+        ss.setCommandType(null);
+        driver.run(cmd);
+    }
+
+    @Test
+    public void testCreateDatabase() throws Exception {
+        String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
+        runCommand("create database " + dbName);
+
+        assertDatabaseIsRegistered(dbName);
+    }
+
+    private void assertDatabaseIsRegistered(String dbName) throws Exception {
+        assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName);
+    }
+
+    private void assertInstanceIsRegistered(String typeName, String colName, String colValue) throws Exception{
+        JSONArray results = dgiCLient.rawSearch(typeName, colName, colValue);
+        Assert.assertEquals(results.length(), 1);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/BaseSSLAndKerberosTest.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/BaseSSLAndKerberosTest.java b/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/BaseSSLAndKerberosTest.java
deleted file mode 100644
index 75c23bc..0000000
--- a/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/BaseSSLAndKerberosTest.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metadata.hive.hook;
-
-import org.apache.commons.configuration.PropertiesConfiguration;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge;
-import org.apache.hadoop.metadata.security.BaseSecurityTest;
-import org.apache.hadoop.metadata.web.service.SecureEmbeddedServer;
-import org.apache.hadoop.security.alias.CredentialProvider;
-import org.apache.hadoop.security.alias.CredentialProviderFactory;
-import org.mortbay.jetty.Server;
-
-import java.io.File;
-import java.io.IOException;
-
-import static org.apache.hadoop.metadata.security.SecurityProperties.KEYSTORE_PASSWORD_KEY;
-import static org.apache.hadoop.metadata.security.SecurityProperties.SERVER_CERT_PASSWORD_KEY;
-import static org.apache.hadoop.metadata.security.SecurityProperties.TRUSTSTORE_PASSWORD_KEY;
-
-/**
- *
- */
-public class BaseSSLAndKerberosTest extends BaseSecurityTest {
-    public static final String TESTUSER = "testuser";
-    public static final String TESTPASS = "testpass";
-    protected static final String DGI_URL = "https://localhost:21443/";
-    protected Path jksPath;
-    protected String providerUrl;
-    protected File httpKeytabFile;
-    private File userKeytabFile;
-
-    class TestSecureEmbeddedServer extends SecureEmbeddedServer {
-
-        public TestSecureEmbeddedServer(int port, String path) throws IOException {
-            super(port, path);
-        }
-
-        public Server getServer() {
-            return server;
-        }
-
-        @Override
-        public PropertiesConfiguration getConfiguration() {
-            return super.getConfiguration();
-        }
-    }
-
-    protected void setupCredentials() throws Exception {
-        Configuration conf = new Configuration(false);
-
-        File file = new File(jksPath.toUri().getPath());
-        file.delete();
-        conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
-
-        CredentialProvider provider =
-                CredentialProviderFactory.getProviders(conf).get(0);
-
-        // create new aliases
-        try {
-
-            char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
-            provider.createCredentialEntry(
-                    KEYSTORE_PASSWORD_KEY, storepass);
-
-            char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
-            provider.createCredentialEntry(
-                    TRUSTSTORE_PASSWORD_KEY, trustpass);
-
-            char[] trustpass2 = {'k', 'e', 'y', 'p', 'a', 's', 's'};
-            provider.createCredentialEntry(
-                    "ssl.client.truststore.password", trustpass2);
-
-            char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
-            provider.createCredentialEntry(
-                    SERVER_CERT_PASSWORD_KEY, certpass);
-
-            // write out so that it can be found in checks
-            provider.flush();
-        } catch (Exception e) {
-            e.printStackTrace();
-            throw e;
-        }
-    }
-
-    public void setupKDCAndPrincipals() throws Exception {
-        // set up the KDC
-        File kdcWorkDir = startKDC();
-
-        userKeytabFile = createKeytab(kdc, kdcWorkDir, "dgi", "dgi.keytab");
-        httpKeytabFile = createKeytab(kdc, kdcWorkDir, "HTTP", "spnego.service.keytab");
-
-        // create a test user principal
-        kdc.createPrincipal(TESTUSER, TESTPASS);
-
-        StringBuilder jaas = new StringBuilder(1024);
-        jaas.append("TestUser {\n" +
-                "    com.sun.security.auth.module.Krb5LoginModule required\nuseTicketCache=true;\n" +
-                "};\n");
-        jaas.append(createJAASEntry("Client", "dgi", userKeytabFile));
-        jaas.append(createJAASEntry("Server", "HTTP", httpKeytabFile));
-
-        File jaasFile = new File(kdcWorkDir, "jaas.txt");
-        FileUtils.write(jaasFile, jaas.toString());
-        bindJVMtoJAASFile(jaasFile);
-    }
-
-    protected String getWarPath() {
-        return String.format("/../../webapp/target/metadata-webapp-%s",
-                System.getProperty("project.version", "0.1-incubating-SNAPSHOT"));
-    }
-
-    protected HiveConf getHiveConf() {
-        HiveConf hiveConf = new HiveConf(this.getClass());
-        hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
-        hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName());
-        hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
-        hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/metastore");
-        hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL);
-        hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
-        hiveConf.set("hive.hook.dgi.synchronous", "true");
-        return hiveConf;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/HiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/HiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/HiveHookIT.java
deleted file mode 100755
index 33ebfde..0000000
--- a/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/HiveHookIT.java
+++ /dev/null
@@ -1,379 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.hive.hook;
-
-import org.apache.commons.lang.RandomStringUtils;
-import org.apache.commons.lang.StringEscapeUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.TableType;
-import org.apache.hadoop.hive.ql.Driver;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.metadata.MetadataServiceClient;
-import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge;
-import org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator;
-import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
-import org.apache.hadoop.metadata.typesystem.Referenceable;
-import org.apache.hadoop.metadata.typesystem.persistence.Id;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONObject;
-import org.slf4j.Logger;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.util.Map;
-
-public class HiveHookIT {
-    public static final Logger LOG = org.slf4j.LoggerFactory.getLogger(HiveHookIT.class);
-
-    private static final String DGI_URL = "http://localhost:21000/";
-    private static final String CLUSTER_NAME = "test";
-    public static final String DEFAULT_DB = "default";
-    private Driver driver;
-    private MetadataServiceClient dgiCLient;
-    private SessionState ss;
-
-    @BeforeClass
-    public void setUp() throws Exception {
-        //Set-up hive session
-        HiveConf conf = getHiveConf();
-        driver = new Driver(conf);
-        ss = new SessionState(conf, System.getProperty("user.name"));
-        ss = SessionState.start(ss);
-        SessionState.setCurrentSessionState(ss);
-
-        dgiCLient = new MetadataServiceClient(DGI_URL);
-    }
-
-    private HiveConf getHiveConf() {
-        HiveConf hiveConf = new HiveConf(this.getClass());
-        hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
-        hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName());
-        hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
-        hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/metastore");
-        hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL);
-        hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
-        hiveConf.set("hive.hook.dgi.synchronous", "true");
-        hiveConf.set(HiveMetaStoreBridge.HIVE_CLUSTER_NAME, CLUSTER_NAME);
-        hiveConf.setBoolVar(HiveConf.ConfVars.HIVETESTMODE, true);  //to not use hdfs
-        hiveConf.setVar(HiveConf.ConfVars.HIVETESTMODEPREFIX, "");
-        hiveConf.set("fs.pfile.impl", "org.apache.hadoop.fs.ProxyLocalFileSystem");
-        return hiveConf;
-    }
-
-    private void runCommand(String cmd) throws Exception {
-        ss.setCommandType(null);
-        driver.run(cmd);
-    }
-
-    @Test
-    public void testCreateDatabase() throws Exception {
-        String dbName = "db" + random();
-        runCommand("create database " + dbName + " WITH DBPROPERTIES ('p1'='v1', 'p2'='v2')");
-        String dbId = assertDatabaseIsRegistered(dbName);
-        Referenceable definition = dgiCLient.getEntity(dbId);
-        Map params = (Map) definition.get("parameters");
-        Assert.assertNotNull(params);
-        Assert.assertEquals(params.size(), 2);
-        Assert.assertEquals(params.get("p1"), "v1");
-
-        //There should be just one entity per dbname
-        runCommand("drop database " + dbName);
-        runCommand("create database " + dbName);
-        assertDatabaseIsRegistered(dbName);
-    }
-
-    private String dbName() {
-        return "db" + random();
-    }
-
-    private String createDatabase() throws Exception {
-        String dbName = dbName();
-        runCommand("create database " + dbName);
-        return dbName;
-    }
-
-    private String tableName() {
-        return "table" + random();
-    }
-
-    private String createTable() throws Exception {
-        return createTable(true);
-    }
-
-    private String createTable(boolean partition) throws Exception {
-        String tableName = tableName();
-        runCommand("create table " + tableName + "(id int, name string) comment 'table comment' "
-                + (partition ? " partitioned by(dt string)" : ""));
-        return tableName;
-    }
-
-    @Test
-    public void testCreateTable() throws Exception {
-        String tableName = tableName();
-        String dbName = createDatabase();
-        String colName = "col" + random();
-        runCommand("create table " + dbName + "." + tableName + "(" + colName + " int, name string)");
-        assertTableIsRegistered(dbName, tableName);
-        //there is only one instance of column registered
-        assertColumnIsRegistered(colName);
-
-        tableName = createTable();
-        String tableId = assertTableIsRegistered(DEFAULT_DB, tableName);
-        Referenceable tableRef = dgiCLient.getEntity(tableId);
-        Assert.assertEquals(tableRef.get("tableType"), TableType.MANAGED_TABLE.name());
-        Assert.assertEquals(tableRef.get(HiveDataModelGenerator.COMMENT), "table comment");
-        String entityName = HiveMetaStoreBridge.getTableName(CLUSTER_NAME, DEFAULT_DB, tableName);
-        Assert.assertEquals(tableRef.get(HiveDataModelGenerator.NAME), entityName);
-
-        final Id sdId = (Id) tableRef.get("sd");
-        Referenceable sdRef = dgiCLient.getEntity(sdId.id);
-        Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_IS_STORED_AS_SUB_DIRS),false);
-
-        //Create table where database doesn't exist, will create database instance as well
-        assertDatabaseIsRegistered(DEFAULT_DB);
-    }
-
-    private String assertColumnIsRegistered(String colName) throws Exception {
-        LOG.debug("Searching for column {}", colName);
-        String query = String.format("%s where name = '%s'", HiveDataTypes.HIVE_COLUMN.getName(), colName.toLowerCase());
-        return assertEntityIsRegistered(query, true);
-
-    }
-
-    @Test
-    public void testCTAS() throws Exception {
-        String tableName = createTable();
-        String ctasTableName = "table" + random();
-        String query = "create table " + ctasTableName + " as select * from " + tableName;
-        runCommand(query);
-
-        assertTableIsRegistered(DEFAULT_DB, ctasTableName);
-        assertProcessIsRegistered(query);
-    }
-
-    @Test
-    public void testCreateView() throws Exception {
-        String tableName = createTable();
-        String viewName = tableName();
-        String query = "create view " + viewName + " as select * from " + tableName;
-        runCommand(query);
-
-        assertTableIsRegistered(DEFAULT_DB, viewName);
-        assertProcessIsRegistered(query);
-    }
-
-    @Test
-    public void testLoadData() throws Exception {
-        String tableName = createTable(false);
-
-        String loadFile = file("load");
-        String query = "load data local inpath 'file://" + loadFile + "' into table " + tableName;
-        runCommand(query);
-
-        assertProcessIsRegistered(query);
-    }
-
-    @Test
-    public void testInsert() throws Exception {
-        String tableName = createTable();
-        String insertTableName = createTable();
-        String query = "insert into " + insertTableName + " partition(dt = '2015-01-01') select id, name from "
-                + tableName + " where dt = '2015-01-01'";
-
-        runCommand(query);
-        assertProcessIsRegistered(query);
-        assertPartitionIsRegistered(DEFAULT_DB, insertTableName, "2015-01-01");
-    }
-
-    private String random() {
-        return RandomStringUtils.randomAlphanumeric(10);
-    }
-
-    private String file(String tag) throws Exception {
-        String filename = "./target/" + tag + "-data-" + random();
-        File file = new File(filename);
-        file.createNewFile();
-        return file.getAbsolutePath();
-    }
-
-    private String mkdir(String tag) throws Exception {
-        String filename = "./target/" + tag + "-data-" + random();
-        File file = new File(filename);
-        file.mkdirs();
-        return file.getAbsolutePath();
-    }
-
-    @Test
-    public void testExportImport() throws Exception {
-        String tableName = createTable(false);
-
-        String filename = "pfile://" + mkdir("export");
-        String query = "export table " + tableName + " to \"" + filename + "\"";
-        runCommand(query);
-        assertProcessIsRegistered(query);
-
-        tableName = createTable(false);
-
-        query = "import table " + tableName + " from '" + filename + "'";
-        runCommand(query);
-        assertProcessIsRegistered(query);
-    }
-
-    @Test
-    public void testSelect() throws Exception {
-        String tableName = createTable();
-        String query = "select * from " + tableName;
-        runCommand(query);
-        assertProcessIsRegistered(query);
-
-        //single entity per query
-        query = "SELECT * from " + tableName.toUpperCase();
-        runCommand(query);
-        assertProcessIsRegistered(query);
-    }
-
-    @Test
-    public void testAlterTable() throws Exception {
-        String tableName = createTable();
-        String newName = tableName();
-        String query = "alter table " + tableName + " rename to " + newName;
-        runCommand(query);
-
-        assertTableIsRegistered(DEFAULT_DB, newName);
-        assertTableIsNotRegistered(DEFAULT_DB, tableName);
-    }
-
-    @Test
-    public void testAlterView() throws Exception {
-        String tableName = createTable();
-        String viewName = tableName();
-        String newName = tableName();
-        String query = "create view " + viewName + " as select * from " + tableName;
-        runCommand(query);
-
-        query = "alter view " + viewName + " rename to " + newName;
-        runCommand(query);
-
-        assertTableIsRegistered(DEFAULT_DB, newName);
-        assertTableIsNotRegistered(DEFAULT_DB, viewName);
-    }
-
-    private void assertProcessIsRegistered(String queryStr) throws Exception {
-//        String dslQuery = String.format("%s where queryText = \"%s\"", HiveDataTypes.HIVE_PROCESS.getName(),
-//                normalize(queryStr));
-//        assertEntityIsRegistered(dslQuery, true);
-        //todo replace with DSL
-        String typeName = HiveDataTypes.HIVE_PROCESS.getName();
-        String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()",
-                typeName, typeName, normalize(queryStr));
-        JSONObject response = dgiCLient.searchByGremlin(gremlinQuery);
-        JSONArray results = response.getJSONArray(MetadataServiceClient.RESULTS);
-        Assert.assertEquals(results.length(), 1);
-    }
-
-    private String normalize(String str) {
-        if (StringUtils.isEmpty(str)) {
-            return null;
-        }
-        return StringEscapeUtils.escapeJava(str.toLowerCase());
-    }
-
-    private String assertTableIsRegistered(String dbName, String tableName) throws Exception {
-        return assertTableIsRegistered(dbName, tableName, true);
-    }
-
-    private String assertTableIsNotRegistered(String dbName, String tableName) throws Exception {
-        return assertTableIsRegistered(dbName, tableName, false);
-    }
-
-    private String assertTableIsRegistered(String dbName, String tableName, boolean registered) throws Exception {
-        LOG.debug("Searching for table {}.{}", dbName, tableName);
-        String query = String.format("%s as t where tableName = '%s', db where name = '%s' and clusterName = '%s'"
-                + " select t", HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(),
-                CLUSTER_NAME);
-        return assertEntityIsRegistered(query, registered);
-    }
-
-    private String assertDatabaseIsRegistered(String dbName) throws Exception {
-        LOG.debug("Searching for database {}", dbName);
-        String query = String.format("%s where name = '%s' and clusterName = '%s'", HiveDataTypes.HIVE_DB.getName(),
-                dbName.toLowerCase(), CLUSTER_NAME);
-        return assertEntityIsRegistered(query, true);
-    }
-
-    private void assertPartitionIsRegistered(String dbName, String tableName, String value) throws Exception {
-        String typeName = HiveDataTypes.HIVE_PARTITION.getName();
-        String dbType = HiveDataTypes.HIVE_DB.getName();
-        String tableType = HiveDataTypes.HIVE_TABLE.getName();
-
-        LOG.debug("Searching for partition of {}.{} with values {}", dbName, tableName, value);
-        //todo replace with DSL
-        String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.values', ['%s']).as('p')."
-                        + "out('__%s.table').has('%s.tableName', '%s').out('__%s.db').has('%s.name', '%s')"
-                        + ".has('%s.clusterName', '%s').back('p').toList()", typeName, typeName, value, typeName,
-                tableType, tableName.toLowerCase(), tableType, dbType, dbName.toLowerCase(), dbType, CLUSTER_NAME);
-        JSONObject response = dgiCLient.searchByGremlin(gremlinQuery);
-        JSONArray results = response.getJSONArray(MetadataServiceClient.RESULTS);
-        Assert.assertEquals(results.length(), 1);
-    }
-
-    private String assertEntityIsRegistered(String dslQuery, boolean registered) throws Exception{
-        JSONArray results = dgiCLient.searchByDSL(dslQuery);
-        if (registered) {
-            Assert.assertEquals(results.length(), 1);
-            JSONObject row = results.getJSONObject(0);
-            if (row.has("$id$")) {
-                return row.getJSONObject("$id$").getString("id");
-            } else {
-                return row.getJSONObject("_col_0").getString("id");
-            }
-        } else {
-            Assert.assertEquals(results.length(), 0);
-            return null;
-        }
-    }
-
-    @Test
-    public void testLineage() throws Exception {
-        String table1 = createTable(false);
-
-        String db2 = createDatabase();
-        String table2 = tableName();
-
-        String query = String.format("create table %s.%s as select * from %s", db2, table2, table1);
-        runCommand(query);
-        String table1Id = assertTableIsRegistered(DEFAULT_DB, table1);
-        String table2Id = assertTableIsRegistered(db2, table2);
-
-        String datasetName = HiveMetaStoreBridge.getTableName(CLUSTER_NAME, db2, table2);
-        JSONObject response = dgiCLient.getInputGraph(datasetName);
-        JSONObject vertices = response.getJSONObject("values").getJSONObject("vertices");
-        Assert.assertTrue(vertices.has(table1Id));
-        Assert.assertTrue(vertices.has(table2Id));
-
-        datasetName = HiveMetaStoreBridge.getTableName(CLUSTER_NAME, DEFAULT_DB, table1);
-        response = dgiCLient.getOutputGraph(datasetName);
-        vertices = response.getJSONObject("values").getJSONObject("vertices");
-        Assert.assertTrue(vertices.has(table1Id));
-        Assert.assertTrue(vertices.has(table2Id));
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/NegativeSSLAndKerberosHiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/NegativeSSLAndKerberosHiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/NegativeSSLAndKerberosHiveHookIT.java
deleted file mode 100755
index af073f5..0000000
--- a/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/NegativeSSLAndKerberosHiveHookIT.java
+++ /dev/null
@@ -1,159 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.hive.hook;
-
-import org.apache.commons.configuration.PropertiesConfiguration;
-import org.apache.commons.lang.RandomStringUtils;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.Driver;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.metadata.PropertiesUtil;
-import org.apache.hadoop.metadata.security.SecurityProperties;
-import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
-import org.apache.hadoop.security.ssl.SSLFactory;
-import org.apache.hadoop.security.ssl.SSLHostnameVerifier;
-import org.mortbay.jetty.webapp.WebAppContext;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.File;
-import java.io.FileWriter;
-import java.net.URL;
-import java.nio.file.Files;
-
-import static org.apache.hadoop.metadata.security.SecurityProperties.*;
-
-/**
- * Perform all the necessary setup steps for client and server comm over SSL/Kerberos, but then don't estalish a
- * kerberos user for the invocation.  Need a separate use case since the Jersey layer cached the URL connection handler,
- * which indirectly caches the kerberos delegation token.
- */
-public class NegativeSSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
-
-    private Driver driver;
-    private SessionState ss;
-    private TestSecureEmbeddedServer secureEmbeddedServer;
-    private String originalConf;
-
-    @BeforeClass
-    public void setUp() throws Exception {
-        //Set-up hive session
-        HiveConf conf = getHiveConf();
-        driver = new Driver(conf);
-        ss = new SessionState(conf, System.getProperty("user.name"));
-        ss = SessionState.start(ss);
-        SessionState.setCurrentSessionState(ss);
-
-        jksPath = new Path(Files.createTempDirectory("tempproviders").toString(), "test.jks");
-        providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
-
-        String persistDir = null;
-        URL resource = NegativeSSLAndKerberosHiveHookIT.class.getResource("/");
-        if (resource != null) {
-            persistDir = resource.toURI().getPath();
-        }
-        // delete prior ssl-client.xml file
-        resource = NegativeSSLAndKerberosHiveHookIT.class.getResource("/" + SecurityProperties.SSL_CLIENT_PROPERTIES);
-        if (resource != null) {
-            File sslClientFile = new File(persistDir, SecurityProperties.SSL_CLIENT_PROPERTIES);
-            if (sslClientFile != null && sslClientFile.exists()) {
-                sslClientFile.delete();
-            }
-        }
-        setupKDCAndPrincipals();
-        setupCredentials();
-
-        // client will actually only leverage subset of these properties
-        final PropertiesConfiguration configuration = new PropertiesConfiguration();
-        configuration.setProperty(TLS_ENABLED, true);
-        configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/metadata.keystore");
-        configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/metadata.keystore");
-        configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
-        configuration.setProperty("metadata.http.authentication.type", "kerberos");
-        configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
-
-        configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
-
-        String confLocation = System.getProperty("metadata.conf");
-        URL url;
-        if (confLocation == null) {
-            url = PropertiesUtil.class.getResource("/application.properties");
-        } else {
-            url = new File(confLocation, "application.properties").toURI().toURL();
-        }
-        configuration.load(url);
-        configuration.setProperty(TLS_ENABLED, true);
-        configuration.setProperty("metadata.http.authentication.enabled", "true");
-        configuration.setProperty("metadata.http.authentication.kerberos.principal", "HTTP/localhost@" + kdc.getRealm());
-        configuration.setProperty("metadata.http.authentication.kerberos.keytab", httpKeytabFile.getAbsolutePath());
-        configuration.setProperty("metadata.http.authentication.kerberos.name.rules",
-                "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT");
-
-        configuration.save(new FileWriter(persistDir + File.separator + "application.properties"));
-
-        secureEmbeddedServer = new TestSecureEmbeddedServer(21443, "webapp/target/metadata-governance") {
-            @Override
-            public PropertiesConfiguration getConfiguration() {
-                return configuration;
-            }
-        };
-        WebAppContext webapp = new WebAppContext();
-        webapp.setContextPath("/");
-        webapp.setWar(System.getProperty("user.dir") + getWarPath());
-        secureEmbeddedServer.getServer().setHandler(webapp);
-
-        // save original setting
-        originalConf = System.getProperty("metadata.conf");
-        System.setProperty("metadata.conf", persistDir);
-        secureEmbeddedServer.getServer().start();
-
-    }
-
-    @AfterClass
-    public void tearDown() throws Exception {
-        if (secureEmbeddedServer != null) {
-            secureEmbeddedServer.getServer().stop();
-        }
-
-        if (kdc != null) {
-            kdc.stop();
-        }
-
-        if (originalConf != null) {
-            System.setProperty("metadata.conf", originalConf);
-        }
-    }
-
-    private void runCommand(final String cmd) throws Exception {
-        ss.setCommandType(null);
-        driver.run(cmd);
-        Assert.assertNotNull(driver.getErrorMsg());
-        Assert.assertTrue(driver.getErrorMsg().contains("Mechanism level: Failed to find any Kerberos tgt"));
-    }
-
-    @Test
-    public void testUnsecuredCreateDatabase() throws Exception {
-        String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
-        runCommand("create database " + dbName);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/SSLAndKerberosHiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/SSLAndKerberosHiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/SSLAndKerberosHiveHookIT.java
deleted file mode 100755
index 62c8a69..0000000
--- a/addons/hive-bridge/src/test/java/org/apache/hadoop/metadata/hive/hook/SSLAndKerberosHiveHookIT.java
+++ /dev/null
@@ -1,223 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.hive.hook;
-
-import org.apache.commons.configuration.PropertiesConfiguration;
-import org.apache.commons.lang.RandomStringUtils;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.Driver;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.metadata.MetadataException;
-import org.apache.hadoop.metadata.MetadataServiceClient;
-import org.apache.hadoop.metadata.PropertiesUtil;
-import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
-import org.apache.hadoop.metadata.security.SecurityProperties;
-import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
-import org.apache.hadoop.security.ssl.SSLFactory;
-import org.apache.hadoop.security.ssl.SSLHostnameVerifier;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONObject;
-import org.mortbay.jetty.webapp.WebAppContext;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import javax.security.auth.Subject;
-import javax.security.auth.callback.*;
-import javax.security.auth.login.LoginContext;
-import javax.security.auth.login.LoginException;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.net.URL;
-import java.nio.file.Files;
-import java.security.PrivilegedExceptionAction;
-
-import static org.apache.hadoop.metadata.security.SecurityProperties.*;
-
-public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
-    public static final String TEST_USER_JAAS_SECTION = "TestUser";
-    public static final String TESTUSER = "testuser";
-    public static final String TESTPASS = "testpass";
-
-    private static final String DGI_URL = "https://localhost:21443/";
-    private Driver driver;
-    private MetadataServiceClient dgiCLient;
-    private SessionState ss;
-    private TestSecureEmbeddedServer secureEmbeddedServer;
-    private Subject subject;
-    private String originalConf;
-
-    @BeforeClass
-    public void setUp() throws Exception {
-        //Set-up hive session
-        HiveConf conf = getHiveConf();
-        driver = new Driver(conf);
-        ss = new SessionState(conf, System.getProperty("user.name"));
-        ss = SessionState.start(ss);
-        SessionState.setCurrentSessionState(ss);
-
-        jksPath = new Path(Files.createTempDirectory("tempproviders").toString(), "test.jks");
-        providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
-
-        String persistDir = null;
-        URL resource = SSLAndKerberosHiveHookIT.class.getResource("/");
-        if (resource != null) {
-            persistDir = resource.toURI().getPath();
-        }
-        // delete prior ssl-client.xml file
-        resource = SSLAndKerberosHiveHookIT.class.getResource("/" + SecurityProperties.SSL_CLIENT_PROPERTIES);
-        if (resource != null) {
-            File sslClientFile = new File(persistDir, SecurityProperties.SSL_CLIENT_PROPERTIES);
-            if (sslClientFile != null && sslClientFile.exists()) {
-                sslClientFile.delete();
-            }
-        }
-        setupKDCAndPrincipals();
-        setupCredentials();
-
-        // client will actually only leverage subset of these properties
-        final PropertiesConfiguration configuration = new PropertiesConfiguration();
-        configuration.setProperty(TLS_ENABLED, true);
-        configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/metadata.keystore");
-        configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/metadata.keystore");
-        configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
-        configuration.setProperty("metadata.http.authentication.type", "kerberos");
-        configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
-
-        configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
-
-        String confLocation = System.getProperty("metadata.conf");
-        URL url;
-        if (confLocation == null) {
-            url = PropertiesUtil.class.getResource("/application.properties");
-        } else {
-            url = new File(confLocation, "application.properties").toURI().toURL();
-        }
-        configuration.load(url);
-        configuration.setProperty(TLS_ENABLED, true);
-        configuration.setProperty("metadata.http.authentication.enabled", "true");
-        configuration.setProperty("metadata.http.authentication.kerberos.principal", "HTTP/localhost@" + kdc.getRealm());
-        configuration.setProperty("metadata.http.authentication.kerberos.keytab", httpKeytabFile.getAbsolutePath());
-        configuration.setProperty("metadata.http.authentication.kerberos.name.rules",
-                "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT");
-
-        configuration.save(new FileWriter(persistDir + File.separator + "application.properties"));
-
-        dgiCLient = new MetadataServiceClient(DGI_URL) {
-            @Override
-            protected PropertiesConfiguration getClientProperties() throws MetadataException {
-                return configuration;
-            }
-        };
-
-        secureEmbeddedServer = new TestSecureEmbeddedServer(21443, "webapp/target/metadata-governance") {
-            @Override
-            public PropertiesConfiguration getConfiguration() {
-                return configuration;
-            }
-        };
-        WebAppContext webapp = new WebAppContext();
-        webapp.setContextPath("/");
-        webapp.setWar(System.getProperty("user.dir") + getWarPath());
-        secureEmbeddedServer.getServer().setHandler(webapp);
-
-        // save original setting
-        originalConf = System.getProperty("metadata.conf");
-        System.setProperty("metadata.conf", persistDir);
-        secureEmbeddedServer.getServer().start();
-
-        subject = loginTestUser();
-    }
-
-    @AfterClass
-    public void tearDown() throws Exception {
-        if (secureEmbeddedServer != null) {
-            secureEmbeddedServer.getServer().stop();
-        }
-
-        if (kdc != null) {
-            kdc.stop();
-        }
-
-        if (originalConf != null) {
-            System.setProperty("metadata.conf", originalConf);
-        }
-    }
-
-    protected Subject loginTestUser() throws LoginException, IOException {
-        LoginContext lc = new LoginContext(TEST_USER_JAAS_SECTION, new CallbackHandler() {
-
-            @Override
-            public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
-                for (int i = 0; i < callbacks.length; i++) {
-                    if (callbacks[i] instanceof PasswordCallback) {
-                        PasswordCallback passwordCallback = (PasswordCallback) callbacks[i];
-                        passwordCallback.setPassword(TESTPASS.toCharArray());
-                    }
-                    if (callbacks[i] instanceof NameCallback) {
-                        NameCallback nameCallback = (NameCallback) callbacks[i];
-                        nameCallback.setName(TESTUSER);
-                    }
-                }
-            }
-        });
-        // attempt authentication
-        lc.login();
-        return lc.getSubject();
-    }
-
-    private void runCommand(final String cmd) throws Exception {
-        ss.setCommandType(null);
-        Subject.doAs(subject, new PrivilegedExceptionAction<Object>() {
-            @Override
-            public Object run() throws Exception {
-                driver.run(cmd);
-
-                return null;
-            }
-        });
-    }
-
-    @Test
-    public void testCreateDatabase() throws Exception {
-        String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
-        runCommand("create database " + dbName);
-
-        assertDatabaseIsRegistered(dbName);
-    }
-
-    private void assertDatabaseIsRegistered(String dbName) throws Exception {
-        assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName);
-    }
-
-    private void assertInstanceIsRegistered(final String typeName, final String colName, final String colValue) throws Exception {
-        Subject.doAs(subject, new PrivilegedExceptionAction<Object>() {
-            @Override
-            public Object run() throws Exception {
-                JSONArray results = dgiCLient.rawSearch(typeName, colName, colValue);
-                Assert.assertEquals(results.length(), 1);
-
-                return null;
-            }
-        });
-    }
-}


Mime
View raw message