atlas-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From suma...@apache.org
Subject incubator-atlas git commit: ATLAS-529 support drop database(sumasai)
Date Sat, 09 Apr 2016 17:28:44 GMT
Repository: incubator-atlas
Updated Branches:
  refs/heads/master 755e59c08 -> 1a390f011


ATLAS-529 support drop database(sumasai)


Project: http://git-wip-us.apache.org/repos/asf/incubator-atlas/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-atlas/commit/1a390f01
Tree: http://git-wip-us.apache.org/repos/asf/incubator-atlas/tree/1a390f01
Diff: http://git-wip-us.apache.org/repos/asf/incubator-atlas/diff/1a390f01

Branch: refs/heads/master
Commit: 1a390f011236b645404e025aa0f6134c4cb60994
Parents: 755e59c
Author: Suma Shivaprasad <sumasai.shivaprasad@gmail.com>
Authored: Sat Apr 9 10:28:34 2016 -0700
Committer: Suma Shivaprasad <sumasai.shivaprasad@gmail.com>
Committed: Sat Apr 9 10:28:34 2016 -0700

----------------------------------------------------------------------
 .../org/apache/atlas/hive/hook/HiveHook.java    | 40 ++++++++++--
 .../org/apache/atlas/hive/hook/HiveHookIT.java  | 68 +++++++++++++++++++-
 release-log.txt                                 |  1 +
 3 files changed, 102 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/1a390f01/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
index 505e5e7..b947a8c 100755
--- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
+++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
@@ -20,6 +20,7 @@ package org.apache.atlas.hive.hook;
 
 
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.apache.atlas.AtlasClient;
 import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
 import org.apache.atlas.hive.model.HiveDataModelGenerator;
 import org.apache.atlas.hive.model.HiveDataTypes;
@@ -347,6 +348,10 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext
{
             deleteTable(dgiBridge, event);
             break;
 
+        case DROPDATABASE:
+            deleteDatabase(dgiBridge, event);
+            break;
+
         default:
         }
 
@@ -354,15 +359,38 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext
{
     }
 
     private void deleteTable(HiveMetaStoreBridge dgiBridge, HiveEventContext event) {
-        for (Entity output : event.outputs) {
+        for (WriteEntity output : event.outputs) {
+            if (Type.TABLE.equals(output.getType())) {
+                deleteTable(dgiBridge, event, output);
+            }
+        }
+    }
+
+    private void deleteTable(HiveMetaStoreBridge dgiBridge, HiveEventContext event, WriteEntity
output) {
+        final String tblQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(),
output.getTable().getDbName(), output.getTable().getTableName());
+        LOG.info("Deleting table {} ", tblQualifiedName);
+        messages.add(
+            new HookNotification.EntityDeleteRequest(event.getUser(),
+                HiveDataTypes.HIVE_TABLE.getName(),
+                HiveDataModelGenerator.NAME,
+                tblQualifiedName));
+    }
+
+    private void deleteDatabase(HiveMetaStoreBridge dgiBridge, HiveEventContext event) {
+        if (event.outputs.size() > 1) {
+            LOG.info("Starting deletion of tables and databases with cascade {} " , event.queryStr);
+        }
+
+        for (WriteEntity output : event.outputs) {
             if (Type.TABLE.equals(output.getType())) {
-                final String tblQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(),
output.getTable().getDbName(), output.getTable().getTableName());
-                LOG.info("Deleting table {} ", tblQualifiedName);
+                deleteTable(dgiBridge, event, output);
+            } else if (Type.DATABASE.equals(output.getType())) {
+                final String dbQualifiedName = HiveMetaStoreBridge.getDBQualifiedName(dgiBridge.getClusterName(),
output.getDatabase().getName());
                 messages.add(
                     new HookNotification.EntityDeleteRequest(event.getUser(),
-                        HiveDataTypes.HIVE_TABLE.getName(),
-                        HiveDataModelGenerator.NAME,
-                        tblQualifiedName));
+                        HiveDataTypes.HIVE_DB.getName(),
+                        AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
+                        dbQualifiedName));
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/1a390f01/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
index 7a09b47..2f0c71f 100755
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
@@ -877,7 +877,6 @@ public class HiveHookIT {
         assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, tableName), "name"));
 
         final String query = String.format("drop table %s ", tableName);
-
         runCommand(query);
         assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, tableName), "id"));
         assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, tableName), "name"));
@@ -885,6 +884,65 @@ public class HiveHookIT {
     }
 
     @Test
+    public void testDropDatabaseWithCascade() throws Exception {
+        //Test Deletion of database and its corresponding tables
+        String dbName = "db" + random();
+        runCommand("create database " + dbName + " WITH DBPROPERTIES ('p1'='v1')");
+
+        final int numTables = 10;
+        String[] tableNames = new String[numTables];
+        for(int i = 0; i < numTables; i++) {
+            tableNames[i] = createTable(true, true, false);
+        }
+
+        final String query = String.format("drop database %s cascade", dbName);
+        runCommand(query);
+
+        //Verify columns are not registered for one of the tables
+        assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
dbName, tableNames[0]), "id"));
+        assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
dbName, tableNames[0]), "name"));
+
+        for(int i = 0; i < numTables; i++) {
+            assertTableIsNotRegistered(dbName, tableNames[i]);
+        }
+        assertDBIsNotRegistered(dbName);
+    }
+
+    @Test
+    public void testDropDatabaseWithoutCascade() throws Exception {
+        //Test Deletion of database and its corresponding tables
+        String dbName = "db" + random();
+        runCommand("create database " + dbName + " WITH DBPROPERTIES ('p1'='v1')");
+
+        final int numTables = 10;
+        String[] tableNames = new String[numTables];
+        for(int i = 0; i < numTables; i++) {
+            tableNames[i] = createTable(true, true, false);
+            String query = String.format("drop table %s", tableNames[i]);
+            runCommand(query);
+            assertTableIsNotRegistered(dbName, tableNames[i]);
+        }
+
+        final String query = String.format("drop database %s", dbName);
+        runCommand(query);
+
+        assertDBIsNotRegistered(dbName);
+    }
+
+    @Test
+    public void testDropNonExistingDB() throws Exception {
+        //Test Deletion of a non existing DB
+        final String dbName = "nonexistingdb";
+        assertDBIsNotRegistered(dbName);
+        final String query = String.format("drop database if exists %s cascade", dbName);
+        runCommand(query);
+
+        //Should have no effect
+        assertDBIsNotRegistered(dbName);
+        assertProcessIsNotRegistered(query);
+    }
+
+    @Test
     public void testDropNonExistingTable() throws Exception {
         //Test Deletion of a non existing table
         final String tableName = "nonexistingtable";
@@ -1095,6 +1153,14 @@ public class HiveHookIT {
         assertEntityIsNotRegistered(QUERY_TYPE.DSL, query);
     }
 
+    private void assertDBIsNotRegistered(String dbName) throws Exception {
+        LOG.debug("Searching for database {}.{}", dbName);
+        String query = String.format(
+            "%s as d where name = '%s' and clusterName = '%s'" + " select d",
+            HiveDataTypes.HIVE_DB.getName(), dbName.toLowerCase(), CLUSTER_NAME);
+        assertEntityIsNotRegistered(QUERY_TYPE.DSL, query);
+    }
+
     private String assertTableIsRegistered(String dbName, String tableName) throws Exception
{
         LOG.debug("Searching for table {}.{}", dbName, tableName);
         String query = String.format(

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/1a390f01/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index 2125f87..7c0a124 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -13,6 +13,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file
(dosset
 ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via
shwethags)
 
 ALL CHANGES:
+ATLAS-529 support drop database (sumasai)
 ATLAS-528 Support drop table,view (sumasai)
 ATLAS-603 Document High Availability of Atlas (yhemanth via sumasai)
 ATLAS-498 Support Embedded HBase (tbeerbower via sumasai)


Mime
View raw message