atlas-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From suma...@apache.org
Subject incubator-atlas git commit: ATLAS-538 Rename table should retain traits/tags assigned to columns/storage descriptors (sumasai)
Date Fri, 22 Apr 2016 00:23:09 GMT
Repository: incubator-atlas
Updated Branches:
  refs/heads/master 81a0c6ffe -> f147d3fff


ATLAS-538 Rename table should retain traits/tags assigned to columns/storage descriptors (sumasai)


Project: http://git-wip-us.apache.org/repos/asf/incubator-atlas/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-atlas/commit/f147d3ff
Tree: http://git-wip-us.apache.org/repos/asf/incubator-atlas/tree/f147d3ff
Diff: http://git-wip-us.apache.org/repos/asf/incubator-atlas/diff/f147d3ff

Branch: refs/heads/master
Commit: f147d3fff28e6a26395de48c6a0b964e79fc7d48
Parents: 81a0c6f
Author: Suma Shivaprasad <sumasai.shivaprasad@gmail.com>
Authored: Thu Apr 21 17:22:58 2016 -0700
Committer: Suma Shivaprasad <sumasai.shivaprasad@gmail.com>
Committed: Thu Apr 21 17:22:58 2016 -0700

----------------------------------------------------------------------
 .../atlas/hive/bridge/HiveMetaStoreBridge.java  |   2 +-
 .../org/apache/atlas/hive/hook/HiveHook.java    |  95 ++++++++++---
 .../hive/model/HiveDataModelGenerator.java      |   3 +-
 .../org/apache/atlas/hive/hook/HiveHookIT.java  | 141 +++++++++++++------
 .../main/java/org/apache/atlas/AtlasClient.java |  62 ++++++++
 release-log.txt                                 |   1 +
 6 files changed, 245 insertions(+), 59 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f147d3ff/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
index a28b4ac..630b49b 100755
--- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
+++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
@@ -353,7 +353,7 @@ public class HiveMetaStoreBridge {
         return tableReference;
     }
 
-    private String getStorageDescQFName(String entityQualifiedName) {
+    public static String getStorageDescQFName(String entityQualifiedName) {
         return entityQualifiedName + "_storage";
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f147d3ff/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
index 749294f..813177f 100755
--- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
+++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.QueryPlan;
 import org.apache.hadoop.hive.ql.exec.ExplainTask;
 import org.apache.hadoop.hive.ql.exec.Task;
@@ -299,7 +300,9 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext
{
 
         case CREATETABLE:
             List<Pair<? extends Entity, Referenceable>> tablesCreated = handleEventOutputs(dgiBridge,
event, Type.TABLE);
-            handleExternalTables(dgiBridge, event, tablesCreated.get(0).getLeft(), tablesCreated.get(0).getRight());
+            if (tablesCreated.size() > 0) {
+                handleExternalTables(dgiBridge, event, tablesCreated.get(0).getLeft(), tablesCreated.get(0).getRight());
+            }
             break;
 
         case CREATETABLE_AS_SELECT:
@@ -409,28 +412,87 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext
{
                 Table newTable = writeEntity.getTable();
                 //Hive sends with both old and new table names in the outputs which is weird.
So skipping that with the below check
                 if (!newTable.getDbName().equals(oldTable.getDbName()) || !newTable.getTableName().equals(oldTable.getTableName()))
{
-                    //Create/update old table entity - create new entity with oldQFNme and
tableName
-                    Referenceable tableEntity = createOrUpdateEntities(dgiBridge, event.getUser(),
writeEntity);
-                    String oldQualifiedName = dgiBridge.getTableQualifiedName(dgiBridge.getClusterName(),
+                    final String oldQualifiedName = dgiBridge.getTableQualifiedName(dgiBridge.getClusterName(),
                         oldTable.getDbName(), oldTable.getTableName());
-                    tableEntity.set(HiveDataModelGenerator.NAME, oldQualifiedName);
-                    tableEntity.set(HiveDataModelGenerator.TABLE_NAME, oldTable.getTableName().toLowerCase());
-
-                    String newQualifiedName = dgiBridge.getTableQualifiedName(dgiBridge.getClusterName(),
+                    final String newQualifiedName = dgiBridge.getTableQualifiedName(dgiBridge.getClusterName(),
                         newTable.getDbName(), newTable.getTableName());
 
-                    //Replace entity with new name
-                    Referenceable newEntity = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
-                    newEntity.set(HiveDataModelGenerator.NAME, newQualifiedName);
-                    newEntity.set(HiveDataModelGenerator.TABLE_NAME, newTable.getTableName().toLowerCase());
-                    messages.add(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
-                        HiveDataTypes.HIVE_TABLE.getName(), HiveDataModelGenerator.NAME,
-                        oldQualifiedName, newEntity));
+                    //Create/update old table entity - create entity with oldQFNme and old
tableName if it doesnt exist. If exists, will update
+                    //We always use the new entity while creating the table since some flags,
attributes of the table are not set in inputEntity and Hive.getTable(oldTableName) also fails
since the table doesnt exist in hive anymore
+                    final Referenceable tableEntity = createOrUpdateEntities(dgiBridge, event.getUser(),
writeEntity);
+
+                    //Reset regular column QF Name to old Name and create a new partial notification
request to replace old column QFName to newName to retain any existing traits
+                    replaceColumnQFName(event, (List<Referenceable>) tableEntity.get(HiveDataModelGenerator.COLUMNS),
oldQualifiedName, newQualifiedName);
+
+                    //Reset partition key column QF Name to old Name and create a new partial
notification request to replace old column QFName to newName to retain any existing traits
+                    replaceColumnQFName(event, (List<Referenceable>) tableEntity.get(HiveDataModelGenerator.PART_COLS),
oldQualifiedName, newQualifiedName);
+
+                    //Reset SD QF Name to old Name and create a new partial notification
request to replace old SD QFName to newName to retain any existing traits
+                    replaceSDQFName(event, tableEntity, oldQualifiedName, newQualifiedName);
+
+                    //Reset Table QF Name to old Name and create a new partial notification
request to replace old Table QFName to newName
+                    replaceTableQFName(dgiBridge, event, oldTable, newTable, tableEntity,
oldQualifiedName, newQualifiedName);
                 }
             }
         }
     }
 
+    private Referenceable replaceTableQFName(HiveMetaStoreBridge dgiBridge, HiveEventContext
event, Table oldTable, Table newTable, final Referenceable tableEntity, final String oldTableQFName,
final String newTableQFName) throws HiveException {
+        tableEntity.set(HiveDataModelGenerator.NAME, oldTableQFName);
+        tableEntity.set(HiveDataModelGenerator.TABLE_NAME, oldTable.getTableName().toLowerCase());
+        final Referenceable newDbInstance = (Referenceable) tableEntity.get(HiveDataModelGenerator.DB);
+        tableEntity.set(HiveDataModelGenerator.DB, dgiBridge.createDBInstance(dgiBridge.hiveClient.getDatabase(oldTable.getDbName())));
+
+        //Replace table entity with new name
+        final Referenceable newEntity = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
+        newEntity.set(HiveDataModelGenerator.NAME, newTableQFName);
+        newEntity.set(HiveDataModelGenerator.TABLE_NAME, newTable.getTableName().toLowerCase());
+        newEntity.set(HiveDataModelGenerator.DB, newDbInstance);
+
+        messages.add(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
+            HiveDataTypes.HIVE_TABLE.getName(), HiveDataModelGenerator.NAME,
+            oldTableQFName, newEntity));
+
+        return newEntity;
+    }
+
+    private List<Referenceable> replaceColumnQFName(final HiveEventContext event, final
List<Referenceable> cols, final String oldTableQFName, final String newTableQFName)
{
+        List<Referenceable> newColEntities = new ArrayList<>();
+        for (Referenceable col : cols) {
+            final String colName = (String) col.get(HiveDataModelGenerator.NAME);
+            String oldColumnQFName = HiveMetaStoreBridge.getColumnQualifiedName(oldTableQFName,
colName);
+            String newColumnQFName = HiveMetaStoreBridge.getColumnQualifiedName(newTableQFName,
colName);
+            col.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, oldColumnQFName);
+
+            Referenceable newColEntity = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
+            ///Only QF Name changes
+            newColEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, newColumnQFName);
+            messages.add(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
+                HiveDataTypes.HIVE_COLUMN.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
+                oldColumnQFName, newColEntity));
+            newColEntities.add(newColEntity);
+        }
+        return newColEntities;
+    }
+
+    private Referenceable replaceSDQFName(final HiveEventContext event, Referenceable tableEntity,
final String oldTblQFName, final String newTblQFName) {
+        //Reset storage desc QF Name to old Name
+        final Referenceable sdRef = ((Referenceable) tableEntity.get(HiveDataModelGenerator.STORAGE_DESC));
+        sdRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, HiveMetaStoreBridge.getStorageDescQFName(oldTblQFName));
+
+        //Replace SD QF name first to retain tags
+        final String oldSDQFName = HiveMetaStoreBridge.getStorageDescQFName(oldTblQFName);
+        final String newSDQFName = HiveMetaStoreBridge.getStorageDescQFName(newTblQFName);
+
+        final Referenceable newSDEntity = new Referenceable(HiveDataTypes.HIVE_STORAGEDESC.getName());
+        newSDEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, newSDQFName);
+        messages.add(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
+            HiveDataTypes.HIVE_STORAGEDESC.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
+            oldSDQFName, newSDEntity));
+
+        return newSDEntity;
+    }
+
     private Referenceable createOrUpdateEntities(HiveMetaStoreBridge dgiBridge, String user,
Entity entity) throws Exception {
         Database db = null;
         Table table = null;
@@ -459,6 +521,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext
{
         entities.add(dbEntity);
 
         Referenceable tableEntity = null;
+
         if (table != null) {
             table = dgiBridge.hiveClient.getTable(table.getDbName(), table.getTableName());
             tableEntity = dgiBridge.createTableInstance(dbEntity, table);
@@ -508,7 +571,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext
{
 
         boolean isSelectQuery = isSelectQuery(event);
 
-        // Also filter out select queries which do not modify data
+        // filter out select queries which do not modify data
         if (!isSelectQuery) {
             for (ReadEntity readEntity : event.getInputs()) {
                 processHiveEntity(dgiBridge, event, readEntity, source);

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f147d3ff/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataModelGenerator.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataModelGenerator.java
b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataModelGenerator.java
index f099e39..1541944 100755
--- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataModelGenerator.java
+++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataModelGenerator.java
@@ -64,6 +64,7 @@ public class HiveDataModelGenerator {
     public static final String COMMENT = "comment";
     public static final String PARAMETERS = "parameters";
     public static final String COLUMNS = "columns";
+    public static final String PART_COLS = "partitionKeys";
 
     public static final String STORAGE_NUM_BUCKETS = "numBuckets";
     public static final String STORAGE_IS_STORED_AS_SUB_DIRS = "storedAsSubDirectories";
@@ -244,7 +245,7 @@ public class HiveDataModelGenerator {
                 new AttributeDefinition("retention", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL,
false, null),
                 new AttributeDefinition(STORAGE_DESC, HiveDataTypes.HIVE_STORAGEDESC.getName(),
Multiplicity.OPTIONAL, true,
                         null),
-                new AttributeDefinition("partitionKeys", DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
+                new AttributeDefinition(PART_COLS, DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
                         Multiplicity.OPTIONAL, true, null),
                 new AttributeDefinition("columns", DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
                         Multiplicity.OPTIONAL, true, null),

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f147d3ff/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
index 4c7ac70..5a6dba3 100755
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
@@ -20,6 +20,7 @@ package org.apache.atlas.hive.hook;
 
 import com.google.common.base.Joiner;
 import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
 import com.sun.jersey.api.client.ClientResponse;
 import org.apache.atlas.ApplicationProperties;
 import org.apache.atlas.AtlasClient;
@@ -30,7 +31,12 @@ import org.apache.atlas.hive.model.HiveDataModelGenerator;
 import org.apache.atlas.hive.model.HiveDataTypes;
 import org.apache.atlas.typesystem.Referenceable;
 import org.apache.atlas.typesystem.Struct;
+import org.apache.atlas.typesystem.json.InstanceSerialization;
+import org.apache.atlas.typesystem.json.TypesSerialization$;
 import org.apache.atlas.typesystem.persistence.Id;
+import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
+import org.apache.atlas.typesystem.types.TraitType;
+import org.apache.atlas.typesystem.types.utils.TypesUtil;
 import org.apache.atlas.typesystem.types.TypeSystem;
 import org.apache.atlas.utils.ParamChecker;
 import org.apache.commons.configuration.Configuration;
@@ -45,12 +51,17 @@ import org.apache.hadoop.hive.ql.hooks.Entity;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
 import org.slf4j.Logger;
 import org.testng.Assert;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Test;
 
+import javax.ws.rs.HttpMethod;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
 import java.io.File;
 import java.text.ParseException;
 import java.util.Date;
@@ -268,6 +279,12 @@ public class HiveHookIT {
                 colName, assertPredicate);
     }
 
+    private String assertSDIsRegistered(String sdQFName, AssertPredicate assertPredicate)
throws Exception {
+        LOG.debug("Searching for sd {}", sdQFName.toLowerCase());
+        return assertEntityIsRegistered(HiveDataTypes.HIVE_STORAGEDESC.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
+            sdQFName.toLowerCase(), assertPredicate);
+    }
+
     private void assertColumnIsNotRegistered(String colName) throws Exception {
         LOG.debug("Searching for column {}", colName);
         assertEntityIsNotRegistered(HiveDataTypes.HIVE_COLUMN.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
@@ -476,6 +493,7 @@ public class HiveHookIT {
     public void testInsertIntoTempTable() throws Exception {
         String tableName = createTable();
         String insertTableName = createTable(false, false, true);
+
         String query =
             "insert into " + insertTableName + " select id, name from " + tableName;
 
@@ -590,17 +608,42 @@ public class HiveHookIT {
 
     @Test
     public void testAlterTableRename() throws Exception {
-        String tableName = createTable();
+        String tableName = createTable(true);
         final String newDBName = createDatabase();
+
         assertTableIsRegistered(DEFAULT_DB, tableName);
+        String columnGuid = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, tableName), HiveDataModelGenerator.NAME));
+        String sdGuid = assertSDIsRegistered(HiveMetaStoreBridge.getStorageDescQFName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, tableName)), null);
         assertDatabaseIsRegistered(newDBName);
 
+        //Add trait to column
+        String colTraitDetails = createTrait(columnGuid);
+
+        //Add trait to sd
+        String sdTraitDetails = createTrait(sdGuid);
+
+        String partColumnGuid = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, tableName), "dt"));
+        //Add trait to part col keys
+        String partColTraitDetails = createTrait(partColumnGuid);
+
         String newTableName = tableName();
         String query = String.format("alter table %s rename to %s", DEFAULT_DB + "." + tableName,
newDBName + "." + newTableName);
         runCommand(query);
 
-        assertTableIsRegistered(newDBName, newTableName);
+        String newColGuid = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
newDBName, newTableName), HiveDataModelGenerator.NAME));
+        Assert.assertEquals(newColGuid, columnGuid);
+
+        assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
newDBName, tableName), HiveDataModelGenerator.NAME));
+
+        assertTrait(columnGuid, colTraitDetails);
+        String newSdGuid = assertSDIsRegistered(HiveMetaStoreBridge.getStorageDescQFName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
newDBName, newTableName)), null);
+        Assert.assertEquals(newSdGuid, sdGuid);
+
+        assertTrait(sdGuid, sdTraitDetails);
+        assertTrait(partColumnGuid, partColTraitDetails);
+
         assertTableIsNotRegistered(DEFAULT_DB, tableName);
+        assertTableIsRegistered(newDBName, newTableName);
     }
 
     private List<Referenceable> getColumns(String dbName, String tableName) throws
Exception {
@@ -609,6 +652,22 @@ public class HiveHookIT {
         return ((List<Referenceable>)tableRef.get(HiveDataModelGenerator.COLUMNS));
     }
 
+
+    private String createTrait(String guid) throws AtlasServiceException, JSONException {
+        //add trait
+        String traitName = "PII_Trait" + RandomStringUtils.random(10);
+        atlasClient.createTraitType(traitName);
+
+        Struct traitInstance = new Struct(traitName);
+        atlasClient.addTrait(guid, traitInstance);
+        return traitName;
+    }
+
+    private void assertTrait(String guid, String traitName) throws AtlasServiceException,
JSONException {
+        List<String> traits = atlasClient.listTraits(guid);
+        Assert.assertEquals(traits.get(0), traitName);
+    }
+
     @Test
     public void testAlterTableAddColumn() throws Exception {
         String tableName = createTable();
@@ -618,7 +677,7 @@ public class HiveHookIT {
 
         assertColumnIsRegistered(HiveMetaStoreBridge
                 .getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, tableName),
-                        column));
+                    column));
 
         //Verify the number of columns present in the table
         final List<Referenceable> columns = getColumns(DEFAULT_DB, tableName);
@@ -633,8 +692,8 @@ public class HiveHookIT {
         runCommand(query);
 
         assertColumnIsNotRegistered(HiveMetaStoreBridge
-                .getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, tableName),
-                        colDropped));
+            .getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, tableName),
+                colDropped));
 
         //Verify the number of columns present in the table
         assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() {
@@ -642,7 +701,7 @@ public class HiveHookIT {
             public void assertOnEntity(Referenceable tableRef) throws Exception {
                 List<Referenceable> columns = (List<Referenceable>) tableRef.get(HiveDataModelGenerator.COLUMNS);
                 Assert.assertEquals(columns.size(), 1);
-                Assert.assertEquals(columns.get(0).get(HiveDataModelGenerator.NAME), "name");
+                Assert.assertEquals(columns.get(0).get(HiveDataModelGenerator.NAME), HiveDataModelGenerator.NAME);
 
             }
         });
@@ -651,7 +710,7 @@ public class HiveHookIT {
     @Test
     public void testAlterTableChangeColumn() throws Exception {
         //Change name
-        String oldColName = "name";
+        String oldColName = HiveDataModelGenerator.NAME;
         String newColName = "name1";
         String tableName = createTable();
         String query = String.format("alter table %s change %s %s string", tableName, oldColName,
newColName);
@@ -691,14 +750,14 @@ public class HiveHookIT {
         newColName = "name3";
         final String comment = "added comment";
         query = String.format("alter table %s change column %s %s %s COMMENT '%s' after id",
tableName, oldColName,
-                newColName, newColType, comment);
+            newColName, newColType, comment);
         runCommand(query);
 
         columns = getColumns(DEFAULT_DB, tableName);
         Assert.assertEquals(columns.size(), 2);
 
         assertColumnIsNotRegistered(HiveMetaStoreBridge.getColumnQualifiedName(
-                HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName),
oldColName));
+            HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName),
oldColName));
         newColQualifiedName = HiveMetaStoreBridge.getColumnQualifiedName(
                 HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName),
newColName);
 
@@ -728,13 +787,13 @@ public class HiveHookIT {
 
         final String finalNewColName = newColName;
         assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() {
-                    @Override
-                    public void assertOnEntity(Referenceable entity) throws Exception {
-                        List<Referenceable> columns = (List<Referenceable>) entity.get(HiveDataModelGenerator.COLUMNS);
-                        assertEquals(columns.get(0).get(HiveDataModelGenerator.NAME), finalNewColName);
-                        assertEquals(columns.get(1).get(HiveDataModelGenerator.NAME), "id");
-                    }
+                @Override
+                public void assertOnEntity(Referenceable entity) throws Exception {
+                    List<Referenceable> columns = (List<Referenceable>) entity.get(HiveDataModelGenerator.COLUMNS);
+                    assertEquals(columns.get(0).get(HiveDataModelGenerator.NAME), finalNewColName);
+                    assertEquals(columns.get(1).get(HiveDataModelGenerator.NAME), "id");
                 }
+            }
         );
 
         //Change col position again
@@ -756,13 +815,13 @@ public class HiveHookIT {
         //Check col position
         final String finalNewColName2 = newColName;
         assertTableIsRegistered(DEFAULT_DB, tableName, new AssertPredicate() {
-                    @Override
-                    public void assertOnEntity(Referenceable entity) throws Exception {
-                        List<Referenceable> columns = (List<Referenceable>) entity.get(HiveDataModelGenerator.COLUMNS);
-                        assertEquals(columns.get(1).get(HiveDataModelGenerator.NAME), finalNewColName2);
-                        assertEquals(columns.get(0).get(HiveDataModelGenerator.NAME), "id");
-                    }
+                @Override
+                public void assertOnEntity(Referenceable entity) throws Exception {
+                    List<Referenceable> columns = (List<Referenceable>) entity.get(HiveDataModelGenerator.COLUMNS);
+                    assertEquals(columns.get(1).get(HiveDataModelGenerator.NAME), finalNewColName2);
+                    assertEquals(columns.get(0).get(HiveDataModelGenerator.NAME), "id");
                 }
+            }
         );
     }
 
@@ -791,7 +850,7 @@ public class HiveHookIT {
         runCommand(query);
 
         String colQualifiedName = HiveMetaStoreBridge.getColumnQualifiedName(
-                HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName),
"dt");
+            HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName),
"dt");
         final String dtColId = assertColumnIsRegistered(colQualifiedName, new AssertPredicate()
{
             @Override
             public void assertOnEntity(Referenceable column) throws Exception {
@@ -837,7 +896,7 @@ public class HiveHookIT {
             @Override
             public void assertOnEntity(Referenceable tableRef) throws Exception {
                 Referenceable sdRef = (Referenceable) tableRef.get(HiveDataModelGenerator.STORAGE_DESC);
-                Assert.assertEquals(sdRef.get("location"), testPath);
+                Assert.assertEquals(new Path((String)sdRef.get("location")).toString(), new
Path(testPath).toString());
             }
         });
 
@@ -856,7 +915,7 @@ public class HiveHookIT {
 
         Referenceable hdfsPathRef = atlasClient.getEntity(hdfsPathId);
         Assert.assertEquals(hdfsPathRef.get("path"), testPathNormed);
-        Assert.assertEquals(hdfsPathRef.get("name"), testPathNormed);
+        Assert.assertEquals(hdfsPathRef.get(HiveDataModelGenerator.NAME), testPathNormed);
 //        Assert.assertEquals(hdfsPathRef.get("name"), new Path(testPath).getName());
         Assert.assertEquals(hdfsPathRef.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME), testPathNormed);
 
@@ -865,7 +924,7 @@ public class HiveHookIT {
 
     private String assertHDFSPathIsRegistered(String path) throws Exception {
         LOG.debug("Searching for hdfs path {}", path);
-        return assertEntityIsRegistered(FSDataTypes.HDFS_PATH().toString(), "name", path,
null);
+        return assertEntityIsRegistered(FSDataTypes.HDFS_PATH().toString(), HiveDataModelGenerator.NAME,
path, null);
     }
 
     @Test
@@ -880,18 +939,18 @@ public class HiveHookIT {
             public void assertOnEntity(Referenceable tableRef) throws Exception {
                 Referenceable sdRef = (Referenceable) tableRef.get(HiveDataModelGenerator.STORAGE_DESC);
                 Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_DESC_INPUT_FMT),
-                        "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat");
+                    "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat");
                 Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_DESC_OUTPUT_FMT),
-                        "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat");
+                    "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat");
                 Assert.assertNotNull(sdRef.get("serdeInfo"));
 
                 Struct serdeInfo = (Struct) sdRef.get("serdeInfo");
                 Assert.assertEquals(serdeInfo.get("serializationLib"), "org.apache.hadoop.hive.ql.io.orc.OrcSerde");
                 Assert.assertNotNull(serdeInfo.get(HiveDataModelGenerator.PARAMETERS));
                 Assert.assertEquals(
-                        ((Map<String, String>) serdeInfo.get(HiveDataModelGenerator.PARAMETERS))
-                                .get("serialization.format"),
-                        "1");
+                    ((Map<String, String>) serdeInfo.get(HiveDataModelGenerator.PARAMETERS))
+                        .get("serialization.format"),
+                    "1");
             }
         });
 
@@ -915,7 +974,7 @@ public class HiveHookIT {
         ImmutableList<String> cols = ImmutableList.of("id");
         runBucketSortQuery(tableName, 5, cols, cols);
 
-        cols = ImmutableList.of("id", "name");
+        cols = ImmutableList.of("id", HiveDataModelGenerator.NAME);
         runBucketSortQuery(tableName, 2, cols, cols);
     }
 
@@ -978,16 +1037,16 @@ public class HiveHookIT {
 
         assertTableIsRegistered(DEFAULT_DB, tableName);
         assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, tableName), "id"));
-        assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, tableName), "name"));
+        assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, tableName), HiveDataModelGenerator.NAME));
 
         final String query = String.format("drop table %s ", tableName);
         runCommand(query);
         assertColumnIsNotRegistered(HiveMetaStoreBridge
                 .getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, tableName),
-                        "id"));
+                    "id"));
         assertColumnIsNotRegistered(HiveMetaStoreBridge
                 .getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, tableName),
-                        "name"));
+                    HiveDataModelGenerator.NAME));
         assertTableIsNotRegistered(DEFAULT_DB, tableName);
     }
 
@@ -1011,7 +1070,7 @@ public class HiveHookIT {
                 HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, dbName, tableNames[0]),
"id"));
         assertColumnIsNotRegistered(HiveMetaStoreBridge
                 .getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
dbName, tableNames[0]),
-                        "name"));
+                    HiveDataModelGenerator.NAME));
 
         for(int i = 0; i < numTables; i++) {
             assertTableIsNotRegistered(dbName, tableNames[i]);
@@ -1076,17 +1135,17 @@ public class HiveHookIT {
 
         assertTableIsRegistered(DEFAULT_DB, viewName);
         assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, viewName), "id"));
-        assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, viewName), "name"));
+        assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, viewName), HiveDataModelGenerator.NAME));
 
         query = String.format("drop view %s ", viewName);
 
         runCommand(query);
         assertColumnIsNotRegistered(HiveMetaStoreBridge
                 .getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, viewName),
-                        "id"));
+                    "id"));
         assertColumnIsNotRegistered(HiveMetaStoreBridge
                 .getColumnQualifiedName(HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME,
DEFAULT_DB, viewName),
-                        "name"));
+                    HiveDataModelGenerator.NAME));
         assertTableIsNotRegistered(DEFAULT_DB, viewName);
     }
 
@@ -1308,12 +1367,12 @@ public class HiveHookIT {
                 try {
                     atlasClient.getEntity(typeName, property, value);
                 } catch (AtlasServiceException e) {
-                    if(e.getStatus() == ClientResponse.Status.NOT_FOUND) {
+                    if (e.getStatus() == ClientResponse.Status.NOT_FOUND) {
                         return;
                     }
                 }
                 fail(String.format("Entity was not supposed to exist for typeName = %s, attributeName
= %s, "
-                        + "attributeValue = %s", typeName, property, value));
+                    + "attributeValue = %s", typeName, property, value));
             }
         });
     }
@@ -1383,7 +1442,7 @@ public class HiveHookIT {
                     fail("Assertions failed. Failing after waiting for timeout " + timeout
+ " msecs", e);
                 }
                 LOG.debug("Waiting up to " + (mustEnd - System.currentTimeMillis()) + " msec
as assertion failed", e);
-                Thread.sleep(300);
+                Thread.sleep(400);
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f147d3ff/client/src/main/java/org/apache/atlas/AtlasClient.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/atlas/AtlasClient.java b/client/src/main/java/org/apache/atlas/AtlasClient.java
index 22a1726..be34802 100755
--- a/client/src/main/java/org/apache/atlas/AtlasClient.java
+++ b/client/src/main/java/org/apache/atlas/AtlasClient.java
@@ -19,6 +19,7 @@
 package org.apache.atlas;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ImmutableSet;
 import com.sun.jersey.api.client.Client;
 import com.sun.jersey.api.client.ClientHandlerException;
 import com.sun.jersey.api.client.ClientResponse;
@@ -27,9 +28,15 @@ import com.sun.jersey.api.client.config.DefaultClientConfig;
 import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
 import org.apache.atlas.security.SecureClientUtils;
 import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.typesystem.Struct;
 import org.apache.atlas.typesystem.TypesDef;
 import org.apache.atlas.typesystem.json.InstanceSerialization;
 import org.apache.atlas.typesystem.json.TypesSerialization;
+import org.apache.atlas.typesystem.json.TypesSerialization$;
+import org.apache.atlas.typesystem.types.AttributeDefinition;
+import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
+import org.apache.atlas.typesystem.types.TraitType;
+import org.apache.atlas.typesystem.types.utils.TypesUtil;
 import org.apache.commons.configuration.Configuration;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -84,6 +91,7 @@ public class AtlasClient {
     public static final String URI_ENTITY_AUDIT = "audit";
     public static final String URI_SEARCH = "discovery/search";
     public static final String URI_LINEAGE = "lineage/hive/table";
+    public static final String URI_TRAITS = "traits";
 
     public static final String QUERY = "query";
     public static final String QUERY_TYPE = "queryType";
@@ -294,6 +302,10 @@ public class AtlasClient {
         }
     }
 
+    public WebResource getResource() {
+        return service;
+    }
+
     /**
      * Return status of the service instance the client is pointing to.
      *
@@ -425,6 +437,33 @@ public class AtlasClient {
     }
 
     /**
+     * Creates trait type with specifiedName, superTraits and attributes
+     * @param traitName the name of the trait type
+     * @param superTraits the list of super traits from which this trait type inherits attributes
+     * @param attributeDefinitions the list of attributes of the trait type
+     * @return the list of types created
+     * @throws AtlasServiceException
+     */
+    public List<String> createTraitType(String traitName, ImmutableSet<String>
superTraits, AttributeDefinition... attributeDefinitions) throws AtlasServiceException {
+        HierarchicalTypeDefinition<TraitType> piiTrait =
+            TypesUtil.createTraitTypeDef(traitName, superTraits, attributeDefinitions);
+
+        String traitDefinitionAsJSON = TypesSerialization.toJson(piiTrait, true);
+        LOG.debug("Creating trait type {} {}" , traitName, traitDefinitionAsJSON);
+        return createType(traitDefinitionAsJSON);
+    }
+
+    /**
+     * Creates simple trait type with specifiedName with no superTraits or attributes
+     * @param traitName the name of the trait type
+     * @return the list of types created
+     * @throws AtlasServiceException
+     */
+    public List<String> createTraitType(String traitName) throws AtlasServiceException
{
+        return createTraitType(traitName, null);
+    }
+
+    /**
      * Register the given type(meta model)
      * @param typeAsJson type definition a jaon
      * @return result json object
@@ -589,6 +628,18 @@ public class AtlasClient {
     }
 
     /**
+     * Associate trait to an entity
+     *
+     * @param guid      guid
+     * @param traitDefinition trait definition
+     */
+    public void addTrait(String guid, Struct traitDefinition) throws AtlasServiceException
{
+        String traitJson = InstanceSerialization.toJson(traitDefinition, true);
+        LOG.debug("Adding trait to entity with id {} {}", guid, traitJson);
+        callAPI(API.ADD_TRAITS, traitJson, guid, URI_TRAITS);
+    }
+
+    /**
      * Supports Partial updates
      * Updates properties set in the definition for the entity corresponding to guid
      * @param entityType Type of the entity being updated
@@ -727,6 +778,17 @@ public class AtlasClient {
         return extractResults(jsonResponse, AtlasClient.RESULTS, new ExtractOperation<String,
String>());
     }
 
+    /**
+     * List traits for a given entity identified by its GUID
+     * @param guid GUID of the entity
+     * @return List<String> - traitnames associated with entity
+     * @throws AtlasServiceException
+     */
+    public List<String> listTraits(final String guid) throws AtlasServiceException
{
+        JSONObject jsonResponse = callAPI(API.LIST_TRAITS, null, guid, URI_TRAITS);
+        return extractResults(jsonResponse, AtlasClient.RESULTS, new ExtractOperation<String,
String>());
+    }
+
     private class ExtractOperation<T, U> {
         T extractElement(U element) throws JSONException {
             return (T) element;

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f147d3ff/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index 7ae9547..f0ad4a6 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -17,6 +17,7 @@ ATLAS-409 Atlas will not import avro tables with schema read from a file
(dosset
 ATLAS-379 Create sqoop and falcon metadata addons (venkatnrangan,bvellanki,sowmyaramesh via
shwethags)
 
 ALL CHANGES:
+ATLAS-538 Rename table should retain traits/tags assigned to columns/storage descriptors
(sumasai)
 ATLAS-628 Starting two Atlas instances at the same time causes exceptions in HA mode (yhemanth
via sumasai)
 ATLAS-594 alter table rename doesnt work across databases (sumasai via shwethags)
 ATLAS-586 While updating the multiple attributes, Atlas returns the response with escape
characters (dkantor via shwethags)



Mime
View raw message