atlas-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From suma...@apache.org
Subject incubator-atlas git commit: ATLAS-524 Support alter database (sumasai via shwethags)
Date Wed, 16 Mar 2016 18:14:35 GMT
Repository: incubator-atlas
Updated Branches:
  refs/heads/master ad0d764dd -> cd392fd74


ATLAS-524 Support alter database (sumasai via shwethags)


Project: http://git-wip-us.apache.org/repos/asf/incubator-atlas/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-atlas/commit/cd392fd7
Tree: http://git-wip-us.apache.org/repos/asf/incubator-atlas/tree/cd392fd7
Diff: http://git-wip-us.apache.org/repos/asf/incubator-atlas/diff/cd392fd7

Branch: refs/heads/master
Commit: cd392fd7488b6e49484f98b6c729e25ce34b07e4
Parents: ad0d764
Author: Suma Shivaprasad <sumasai.shivaprasad@gmail.com>
Authored: Wed Mar 16 10:11:40 2016 -0700
Committer: Suma Shivaprasad <sumasai.shivaprasad@gmail.com>
Committed: Wed Mar 16 10:11:40 2016 -0700

----------------------------------------------------------------------
 .../atlas/hive/bridge/HiveMetaStoreBridge.java  |   2 +-
 .../org/apache/atlas/hive/hook/HiveHook.java    |  16 +++
 .../hive/model/HiveDataModelGenerator.java      |  12 +--
 .../org/apache/atlas/hive/hook/HiveHookIT.java  | 100 ++++++++++++-------
 docs/src/site/twiki/Bridge-Hive.twiki           |   2 +-
 release-log.txt                                 |   1 +
 6 files changed, 89 insertions(+), 44 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/cd392fd7/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
index 5317a9c..ea38750 100755
--- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
+++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
@@ -166,7 +166,7 @@ public class HiveMetaStoreBridge {
         dbRef.set(DESCRIPTION_ATTR, hiveDB.getDescription());
         dbRef.set("locationUri", hiveDB.getLocationUri());
         dbRef.set(HiveDataModelGenerator.PARAMETERS, hiveDB.getParameters());
-        dbRef.set("ownerName", hiveDB.getOwnerName());
+        dbRef.set(HiveDataModelGenerator.OWNER, hiveDB.getOwnerName());
         if (hiveDB.getOwnerType() != null) {
             dbRef.set("ownerType", hiveDB.getOwnerType().getValue());
         }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/cd392fd7/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
index fc2a57a..16ed452 100755
--- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
+++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
@@ -241,12 +241,28 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext
{
             alterTable(dgiBridge, event);
             break;
 
+        case ALTERDATABASE:
+        case ALTERDATABASE_OWNER:
+            alterDatabase(dgiBridge, event);
+            break;
+
         default:
         }
 
         notifyEntities(messages);
     }
 
+    private void alterDatabase(HiveMetaStoreBridge dgiBridge, HiveEvent event) throws Exception
{
+        assert event.outputs != null && event.outputs.size() > 0;
+
+        for (WriteEntity writeEntity : event.outputs) {
+            if (writeEntity.getType() == Type.DATABASE) {
+                //Create/update table entity
+                createOrUpdateEntities(dgiBridge, writeEntity);
+            }
+        }
+    }
+
     private void alterTable(HiveMetaStoreBridge dgiBridge, HiveEvent event) throws Exception
{
         assert event.inputs != null && event.inputs.size() == 1;
         assert event.outputs != null && event.outputs.size() > 0;

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/cd392fd7/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataModelGenerator.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataModelGenerator.java
b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataModelGenerator.java
index c123719..2ca5953 100755
--- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataModelGenerator.java
+++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataModelGenerator.java
@@ -42,11 +42,6 @@ import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.PrintWriter;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Map;
@@ -81,6 +76,7 @@ public class HiveDataModelGenerator {
     public static final String STORAGE_DESC = "sd";
     public static final String STORAGE_DESC_INPUT_FMT = "inputFormat";
     public static final String STORAGE_DESC_OUTPUT_FMT = "outputFormat";
+    public static final String OWNER = "owner";
 
     public HiveDataModelGenerator() {
         classTypeDefinitions = new HashMap<>();
@@ -242,7 +238,7 @@ public class HiveDataModelGenerator {
                 new AttributeDefinition("locationUri", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL,
false,
                         null),
                 new AttributeDefinition(HiveDataModelGenerator.PARAMETERS, STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("ownerName", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL,
false,
+                new AttributeDefinition(OWNER, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL,
false,
                         null),
                 new AttributeDefinition("ownerType", HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName(),
Multiplicity.OPTIONAL,
                         false, null),};
@@ -307,7 +303,7 @@ public class HiveDataModelGenerator {
                 new AttributeDefinition(TABLE_NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED,
false,
                         null),
                 new AttributeDefinition(DB, HiveDataTypes.HIVE_DB.getName(), Multiplicity.REQUIRED,
false, null),
-                new AttributeDefinition("owner", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL,
false, null),
+                new AttributeDefinition(OWNER, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL,
false, null),
                 new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL,
false,
                         null),
                 new AttributeDefinition("lastAccessTime", DataTypes.LONG_TYPE.getName(),
Multiplicity.OPTIONAL, false,
@@ -342,7 +338,7 @@ public class HiveDataModelGenerator {
                         null),
                 new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(), Multiplicity.REQUIRED,
false,
                         null),
-                new AttributeDefinition("ownerName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED,
false,
+                new AttributeDefinition(OWNER, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED,
false,
                         null),};
         HierarchicalTypeDefinition<ClassType> definition =
                 new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_ROLE.getName(),
null, null,

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/cd392fd7/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
index f2084b0..07ee139 100755
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
@@ -36,6 +36,7 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.hooks.Entity;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.codehaus.jettison.json.JSONArray;
@@ -236,7 +237,6 @@ public class HiveHookIT {
         String table2Id = assertTableIsRegistered(DEFAULT_DB, table2Name);
         Assert.assertEquals(assertTableIsRegistered(DEFAULT_DB, viewName), viewId);
 
-        //Check lineage which includes both table1 and table2
         datasetName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB,
viewName);
         response = dgiCLient.getInputGraph(datasetName);
         vertices = response.getJSONObject("values").getJSONObject("vertices");
@@ -469,7 +469,7 @@ public class HiveHookIT {
         Struct serdeInfo = (Struct) sdRef.get("serdeInfo");
         Assert.assertEquals(serdeInfo.get("serializationLib"), "org.apache.hadoop.hive.ql.io.orc.OrcSerde");
         Assert.assertNotNull(serdeInfo.get(HiveDataModelGenerator.PARAMETERS));
-        Assert.assertEquals(((Map<String, String>) serdeInfo.get(HiveDataModelGenerator.PARAMETERS)).get("serialization.format"),
"1");
+        Assert.assertEquals(((Map<String, String>)serdeInfo.get(HiveDataModelGenerator.PARAMETERS)).get("serialization.format"),
"1");
 
 
         /**
@@ -499,7 +499,6 @@ public class HiveHookIT {
         final String fmtQuery = "alter table %s CLUSTERED BY (%s) SORTED BY (%s) INTO %s
BUCKETS";
         String query = String.format(fmtQuery, tableName, stripListBrackets(bucketCols.toString()),
stripListBrackets(sortCols.toString()), numBuckets);
         runCommand(query);
-
         verifyBucketSortingProperties(tableName, numBuckets, bucketCols, sortCols);
     }
 
@@ -568,14 +567,35 @@ public class HiveHookIT {
     }
 
     @Test
+    public void testAlterDBOwner() throws Exception {
+        String dbName = createDatabase();
+        final String owner = "testOwner";
+        String dbId = assertDatabaseIsRegistered(dbName);
+        final String fmtQuery = "alter database %s set OWNER %s %s";
+        String query = String.format(fmtQuery, dbName, "USER", owner);
+
+        runCommand(query);
+
+        assertDatabaseIsRegistered(dbName);
+        Referenceable entity = dgiCLient.getEntity(dbId);
+        Assert.assertEquals(entity.get(HiveDataModelGenerator.OWNER), owner);
+    }
+
+    @Test
+    public void testAlterDBProperties() throws Exception {
+        String dbName = createDatabase();
+        final String fmtQuery = "alter database %s %s DBPROPERTIES (%s)";
+        testAlterProperties(Entity.Type.DATABASE, dbName, fmtQuery);
+    }
+
+    @Test
     public void testAlterTableProperties() throws Exception {
         String tableName = createTable();
         final String fmtQuery = "alter table %s %s TBLPROPERTIES (%s)";
-        testAlterProperties(tableName, fmtQuery);
+        testAlterProperties(Entity.Type.TABLE, tableName, fmtQuery);
     }
 
-    private void testAlterProperties(String tableName, String fmtQuery) throws Exception
{
-
+    private void testAlterProperties(Entity.Type entityType, String entityName, String fmtQuery)
throws Exception {
         final String SET_OP = "set";
         final String UNSET_OP = "unset";
 
@@ -584,24 +604,25 @@ public class HiveHookIT {
             put("comment", "test comment");
         }};
 
-        String query = String.format(fmtQuery, tableName, SET_OP, getSerializedProps(expectedProps));
+        String query = String.format(fmtQuery, entityName, SET_OP, getSerializedProps(expectedProps));
         runCommand(query);
-
-        verifyTableProperties(tableName, expectedProps, false);
+        verifyEntityProperties(entityType, entityName, expectedProps, false);
 
         expectedProps.put("testPropKey2", "testPropValue2");
         //Add another property
-        query = String.format(fmtQuery, tableName, SET_OP, getSerializedProps(expectedProps));
+        query = String.format(fmtQuery, entityName, SET_OP, getSerializedProps(expectedProps));
         runCommand(query);
+        verifyEntityProperties(entityType, entityName, expectedProps, false);
 
-        verifyTableProperties(tableName, expectedProps, false);
+        if (entityType != Entity.Type.DATABASE) {
+            //Database unset properties doesnt work strangely - alter database %s unset DBPROPERTIES
doesnt work
+            //Unset all the props
+            StringBuilder sb = new StringBuilder("'");
+            query = String.format(fmtQuery, entityName, UNSET_OP, Joiner.on("','").skipNulls().appendTo(sb,
expectedProps.keySet()).append('\''));
+            runCommand(query);
 
-        //Unset all the props
-        StringBuilder sb = new StringBuilder("'");
-        query = String.format(fmtQuery, tableName, UNSET_OP, Joiner.on("','").skipNulls().appendTo(sb,
expectedProps.keySet()).append('\''));
-        runCommand(query);
-
-        verifyTableProperties(tableName, expectedProps, true);
+            verifyEntityProperties(entityType, entityName, expectedProps, true);
+        }
     }
 
     @Test
@@ -612,13 +633,38 @@ public class HiveHookIT {
         runCommand(query);
 
         final String fmtQuery = "alter view %s %s TBLPROPERTIES (%s)";
-        testAlterProperties(viewName, fmtQuery);
+        testAlterProperties(Entity.Type.TABLE, viewName, fmtQuery);
+    }
+
+    private void verifyEntityProperties(Entity.Type type, String entityName, Map<String,
String> expectedProps, boolean checkIfNotExists) throws Exception {
+
+        String entityId = null;
+
+        switch(type) {
+        case TABLE:
+            entityId = assertTableIsRegistered(DEFAULT_DB, entityName);
+            break;
+        case DATABASE:
+            entityId = assertDatabaseIsRegistered(entityName);
+            break;
+        }
+
+        Referenceable ref = dgiCLient.getEntity(entityId);
+        verifyProperties(ref, expectedProps, checkIfNotExists);
     }
 
-    private void verifyTableProperties(String tableName, Map<String, String> expectedProps,
boolean checkIfNotExists) throws Exception {
+    private void verifyTableSdProperties(String tableName, String serdeLib, Map<String,
String> expectedProps) throws Exception {
         String tableId = assertTableIsRegistered(DEFAULT_DB, tableName);
         Referenceable tableRef = dgiCLient.getEntity(tableId);
-        Map<String, String> parameters = (Map<String, String>) tableRef.get(HiveDataModelGenerator.PARAMETERS);
+        Referenceable sdRef = (Referenceable) tableRef.get(HiveDataModelGenerator.STORAGE_DESC);
+        Struct serdeInfo = (Struct) sdRef.get("serdeInfo");
+        Assert.assertEquals(serdeInfo.get("serializationLib"), serdeLib);
+        verifyProperties(serdeInfo, expectedProps, false);
+    }
+
+    private void verifyProperties(Struct referenceable, Map<String, String> expectedProps,
boolean checkIfNotExists) {
+        Map<String, String> parameters = (Map<String, String>) referenceable.get(HiveDataModelGenerator.PARAMETERS);
+
         if (checkIfNotExists == false) {
             //Check if properties exist
             Assert.assertNotNull(parameters);
@@ -635,20 +681,6 @@ public class HiveHookIT {
         }
     }
 
-    private void verifyTableSdProperties(String tableName, String serdeLib, Map<String,
String> expectedProps) throws Exception {
-        String tableId = assertTableIsRegistered(DEFAULT_DB, tableName);
-        Referenceable tableRef = dgiCLient.getEntity(tableId);
-        Referenceable sdRef = (Referenceable) tableRef.get(HiveDataModelGenerator.STORAGE_DESC);
-        Struct serdeInfo = (Struct) sdRef.get("serdeInfo");
-        Assert.assertEquals(serdeInfo.get("serializationLib"), serdeLib);
-        Map<String, String> parameters = (Map<String, String>) serdeInfo.get(HiveDataModelGenerator.PARAMETERS);
-        Assert.assertNotNull(parameters);
-        //Comment should exist since SET TBLPOPERTIES only adds properties. Doe not remove
existing ones
-        for (String propKey : expectedProps.keySet()) {
-            Assert.assertEquals(parameters.get(propKey), expectedProps.get(propKey));
-        }
-    }
-
     private String assertProcessIsRegistered(String queryStr) throws Exception {
         //        String dslQuery = String.format("%s where queryText = \"%s\"", HiveDataTypes.HIVE_PROCESS.getName(),
         //                normalize(queryStr));

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/cd392fd7/docs/src/site/twiki/Bridge-Hive.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/Bridge-Hive.twiki b/docs/src/site/twiki/Bridge-Hive.twiki
index 3887871..c52de98 100644
--- a/docs/src/site/twiki/Bridge-Hive.twiki
+++ b/docs/src/site/twiki/Bridge-Hive.twiki
@@ -79,4 +79,4 @@ Refer [[Configuration][Configuration]] for notification related configurations
 
 ---++ Limitations
    * Since database name, table name and column names are case insensitive in hive, the corresponding
names in entities are lowercase. So, any search APIs should use lowercase while querying on
the entity names
-   * Only the following hive operations are captured by hive hook currently - create database,
create table, create view, CTAS, load, import, export, query, alter table rename and alter
view rename
+   * Only the following hive operations are captured by hive hook currently - create database,
create table, create view, CTAS, load, import, export, query, alter database, alter table(except
alter table replace columns and alter table change column position), alter view (except replacing
and changing column position)

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/cd392fd7/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index 86e5c5d..f640488 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -13,6 +13,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file
(dosset
 ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via
shwethags)
 
 ALL CHANGES:
+ATLAS-524 Support alter database (sumasai via shwethags)
 ATLAS-539 Store for entity audit events (shwethags)
 ATLAS-523 Support alter view (sumasai via shwethags)
 ATLAS-555 Tag creation from UI fails due to missing description attribute (guptaneeru via
shwethags)


Mime
View raw message