atlas-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mad...@apache.org
Subject incubator-atlas git commit: ATLAS-1666: updated exception handling to avoid use of generic exceptions
Date Sat, 18 Mar 2017 00:32:46 GMT
Repository: incubator-atlas
Updated Branches:
  refs/heads/master 1612b3058 -> f379c9ffd


ATLAS-1666: updated exception handling to avoid use of generic exceptions

Signed-off-by: Madhan Neethiraj <madhan@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/incubator-atlas/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-atlas/commit/f379c9ff
Tree: http://git-wip-us.apache.org/repos/asf/incubator-atlas/tree/f379c9ff
Diff: http://git-wip-us.apache.org/repos/asf/incubator-atlas/diff/f379c9ff

Branch: refs/heads/master
Commit: f379c9ffd08dbdafd27d163cd155f9b9f5c2a695
Parents: 1612b30
Author: ashutoshm <amestry@hortonworks.com>
Authored: Fri Mar 17 16:30:13 2017 -0700
Committer: Madhan Neethiraj <madhan@apache.org>
Committed: Fri Mar 17 17:26:47 2017 -0700

----------------------------------------------------------------------
 .../atlas/falcon/bridge/FalconBridge.java       |  10 +-
 .../atlas/hive/bridge/HiveMetaStoreBridge.java  |  62 +--
 .../org/apache/atlas/hive/hook/HiveHook.java    | 438 ++++++++++---------
 .../org/apache/atlas/sqoop/hook/SqoopHook.java  |  53 ++-
 .../apache/atlas/storm/hook/StormAtlasHook.java |   1 +
 .../atlas/storm/hook/StormTopologyUtil.java     |   2 +-
 .../java/org/apache/atlas/AtlasErrorCode.java   |  15 +-
 .../apache/atlas/hook/AtlasHookException.java   |  43 ++
 .../org/apache/atlas/examples/QuickStart.java   | 154 ++++---
 .../apache/atlas/web/params/AbstractParam.java  |   4 +-
 .../apache/atlas/web/params/BooleanParam.java   |   7 +-
 .../apache/atlas/web/params/DateTimeParam.java  |   5 +-
 .../AtlasLdapAuthenticationProvider.java        |  13 +-
 .../atlas/web/service/ActiveInstanceState.java  |  38 +-
 .../atlas/web/service/EmbeddedServer.java       |  12 +-
 .../org/apache/atlas/web/setup/SetupSteps.java  |  10 +-
 16 files changed, 510 insertions(+), 357 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f379c9ff/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java
----------------------------------------------------------------------
diff --git a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java
index d1a7e87..349da66 100644
--- a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java
+++ b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java
@@ -73,8 +73,7 @@ public class FalconBridge {
      * @param cluster ClusterEntity
      * @return cluster instance reference
      */
-    public static Referenceable createClusterEntity(final org.apache.falcon.entity.v0.cluster.Cluster cluster)
-            throws Exception {
+    public static Referenceable createClusterEntity(final org.apache.falcon.entity.v0.cluster.Cluster cluster) {
         LOG.info("Creating cluster Entity : {}", cluster.getName());
 
         Referenceable clusterRef = new Referenceable(FalconDataTypes.FALCON_CLUSTER.getName());
@@ -97,7 +96,7 @@ public class FalconBridge {
         return clusterRef;
     }
 
-    private static Referenceable createFeedEntity(Feed feed, Referenceable clusterReferenceable) throws Exception {
+    private static Referenceable createFeedEntity(Feed feed, Referenceable clusterReferenceable) {
         LOG.info("Creating feed dataset: {}", feed.getName());
 
         Referenceable feedEntity = new Referenceable(FalconDataTypes.FALCON_FEED.getName());
@@ -338,8 +337,7 @@ public class FalconBridge {
         return entities;
     }
 
-    private static Referenceable createHiveDatabaseInstance(String clusterName, String dbName)
-            throws Exception {
+    private static Referenceable createHiveDatabaseInstance(String clusterName, String dbName) {
         Referenceable dbRef = new Referenceable(HiveDataTypes.HIVE_DB.getName());
         dbRef.set(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, clusterName);
         dbRef.set(AtlasClient.NAME, dbName);
@@ -349,7 +347,7 @@ public class FalconBridge {
     }
 
     private static List<Referenceable> createHiveTableInstance(String clusterName, String dbName,
-                                                               String tableName) throws Exception {
+                                                               String tableName) {
         List<Referenceable> entities = new ArrayList<>();
         Referenceable dbRef = createHiveDatabaseInstance(clusterName, dbName);
         entities.add(dbRef);

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f379c9ff/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
index dce55c4..f27c301 100755
--- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
+++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
@@ -18,16 +18,15 @@
 
 package org.apache.atlas.hive.bridge;
 
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-
+import com.google.common.annotations.VisibleForTesting;
+import com.sun.jersey.api.client.ClientResponse;
 import org.apache.atlas.ApplicationProperties;
 import org.apache.atlas.AtlasClient;
 import org.apache.atlas.AtlasConstants;
 import org.apache.atlas.AtlasServiceException;
 import org.apache.atlas.hive.hook.HiveHook;
 import org.apache.atlas.hive.model.HiveDataTypes;
+import org.apache.atlas.hook.AtlasHookException;
 import org.apache.atlas.typesystem.Referenceable;
 import org.apache.atlas.typesystem.Struct;
 import org.apache.atlas.typesystem.json.InstanceSerialization;
@@ -55,8 +54,9 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.sun.jersey.api.client.ClientResponse;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
 
 /**
  * A Bridge Utility that imports metadata from the Hive Meta Store
@@ -420,7 +420,7 @@ public class HiveMetaStoreBridge {
      * @throws Exception
      */
     public Referenceable createTableInstance(Referenceable dbReference, Table hiveTable)
-            throws Exception {
+            throws AtlasHookException {
         return createOrUpdateTableInstance(dbReference, null, hiveTable);
     }
 
@@ -429,7 +429,7 @@ public class HiveMetaStoreBridge {
     }
 
     private Referenceable createOrUpdateTableInstance(Referenceable dbReference, Referenceable tableReference,
-                                                      final Table hiveTable) throws Exception {
+                                                      final Table hiveTable) throws AtlasHookException {
         LOG.info("Importing objects from {}.{}", hiveTable.getDbName(), hiveTable.getTableName());
 
         if (tableReference == null) {
@@ -494,22 +494,26 @@ public class HiveMetaStoreBridge {
         return entityQualifiedName + "_storage";
     }
 
-    private Referenceable registerTable(Referenceable dbReference, Table table) throws Exception {
-        String dbName = table.getDbName();
-        String tableName = table.getTableName();
-        LOG.info("Attempting to register table [{}]", tableName);
-        Referenceable tableReference = getTableReference(table);
-        LOG.info("Found result {}", tableReference);
-        if (tableReference == null) {
-            tableReference = createTableInstance(dbReference, table);
-            tableReference = registerInstance(tableReference);
-        } else {
-            LOG.info("Table {}.{} is already registered with id {}. Updating entity.", dbName, tableName,
-                    tableReference.getId().id);
-            tableReference = createOrUpdateTableInstance(dbReference, tableReference, table);
-            updateInstance(tableReference);
+    private Referenceable registerTable(Referenceable dbReference, Table table) throws AtlasHookException {
+        try {
+            String dbName = table.getDbName();
+            String tableName = table.getTableName();
+            LOG.info("Attempting to register table [{}]", tableName);
+            Referenceable tableReference = getTableReference(table);
+            LOG.info("Found result {}", tableReference);
+            if (tableReference == null) {
+                tableReference = createTableInstance(dbReference, table);
+                tableReference = registerInstance(tableReference);
+            } else {
+                LOG.info("Table {}.{} is already registered with id {}. Updating entity.", dbName, tableName,
+                        tableReference.getId().id);
+                tableReference = createOrUpdateTableInstance(dbReference, tableReference, table);
+                updateInstance(tableReference);
+            }
+            return tableReference;
+        } catch (Exception e) {
+            throw new AtlasHookException("HiveMetaStoreBridge.getStorageDescQFName() failed.", e);
         }
-        return tableReference;
     }
 
     private void updateInstance(Referenceable referenceable) throws AtlasServiceException {
@@ -523,7 +527,7 @@ public class HiveMetaStoreBridge {
     }
 
     public Referenceable fillStorageDesc(StorageDescriptor storageDesc, String tableQualifiedName,
-        String sdQualifiedName, Id tableId) throws Exception {
+        String sdQualifiedName, Id tableId) throws AtlasHookException {
         LOG.debug("Filling storage descriptor information for {}", storageDesc);
 
         Referenceable sdReferenceable = new Referenceable(HiveDataTypes.HIVE_STORAGEDESC.getName());
@@ -590,7 +594,7 @@ public class HiveMetaStoreBridge {
         return String.format("%s.%s@%s", tableName, colName.toLowerCase(), clusterName);
     }
 
-    public List<Referenceable> getColumns(List<FieldSchema> schemaList, Referenceable tableReference) throws Exception {
+    public List<Referenceable> getColumns(List<FieldSchema> schemaList, Referenceable tableReference) throws AtlasHookException {
         List<Referenceable> colList = new ArrayList<>();
         int columnPosition = 0;
         for (FieldSchema fs : schemaList) {
@@ -612,8 +616,8 @@ public class HiveMetaStoreBridge {
     }
 
 
-    public static void main(String[] args) throws Exception {
-
+    public static void main(String[] args) throws AtlasHookException {
+        try {
         Configuration atlasConf = ApplicationProperties.get();
         String[] atlasEndpoint = atlasConf.getStringArray(ATLAS_ENDPOINT);
         if (atlasEndpoint == null || atlasEndpoint.length == 0){
@@ -640,5 +644,9 @@ public class HiveMetaStoreBridge {
 
         HiveMetaStoreBridge hiveMetaStoreBridge = new HiveMetaStoreBridge(atlasConf, new HiveConf(), atlasClient);
         hiveMetaStoreBridge.importHiveMetadata(failOnError);
+        }
+        catch(Exception e) {
+            throw new AtlasHookException("HiveMetaStoreBridge.main() failed.", e);
+        }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f379c9ff/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
index 16835cf..7dc2e2f 100755
--- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
+++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
@@ -23,10 +23,11 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import org.apache.atlas.AtlasClient;
 import org.apache.atlas.AtlasConstants;
-import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
 import org.apache.atlas.hive.bridge.ColumnLineageUtils;
+import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
 import org.apache.atlas.hive.model.HiveDataTypes;
 import org.apache.atlas.hook.AtlasHook;
+import org.apache.atlas.hook.AtlasHookException;
 import org.apache.atlas.notification.hook.HookNotification;
 import org.apache.atlas.typesystem.Referenceable;
 import org.apache.commons.lang.StringUtils;
@@ -36,8 +37,13 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.hooks.*;
+import org.apache.hadoop.hive.ql.hooks.Entity;
 import org.apache.hadoop.hive.ql.hooks.Entity.Type;
+import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext;
+import org.apache.hadoop.hive.ql.hooks.HookContext;
+import org.apache.hadoop.hive.ql.hooks.LineageInfo;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
@@ -49,7 +55,6 @@ import org.json.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-
 import java.net.MalformedURLException;
 import java.net.URI;
 import java.security.PrivilegedExceptionAction;
@@ -67,7 +72,6 @@ import java.util.SortedMap;
 import java.util.SortedSet;
 import java.util.TreeMap;
 import java.util.TreeSet;
-import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadPoolExecutor;
@@ -378,77 +382,87 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
         return Pair.of(changedColStringOldName, changedColStringNewName);
     }
 
-    private void renameColumn(HiveMetaStoreBridge dgiBridge, HiveEventContext event) throws Exception {
-        assert event.getInputs() != null && event.getInputs().size() == 1;
-        assert event.getOutputs() != null && event.getOutputs().size() > 0;
-
-        Table oldTable = event.getInputs().iterator().next().getTable();
-        List<FieldSchema> oldColList = oldTable.getAllCols();
-        Table outputTbl = event.getOutputs().iterator().next().getTable();
-        outputTbl = dgiBridge.hiveClient.getTable(outputTbl.getDbName(), outputTbl.getTableName());
-        List<FieldSchema> newColList = outputTbl.getAllCols();
-        assert oldColList.size() == newColList.size();
-
-        Pair<String, String> changedColNamePair = findChangedColNames(oldColList, newColList);
-        String oldColName = changedColNamePair.getLeft();
-        String newColName = changedColNamePair.getRight();
-        for (WriteEntity writeEntity : event.getOutputs()) {
-            if (writeEntity.getType() == Type.TABLE) {
-                Table newTable = writeEntity.getTable();
-                createOrUpdateEntities(dgiBridge, event, writeEntity, true, oldTable);
-                final String newQualifiedTableName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(),
-                    newTable);
-                String oldColumnQFName = HiveMetaStoreBridge.getColumnQualifiedName(newQualifiedTableName, oldColName);
-                String newColumnQFName = HiveMetaStoreBridge.getColumnQualifiedName(newQualifiedTableName, newColName);
-                Referenceable newColEntity = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
-                newColEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, newColumnQFName);
-
-                event.addMessage(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
-                    HiveDataTypes.HIVE_COLUMN.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
-                    oldColumnQFName, newColEntity));
+    private void renameColumn(HiveMetaStoreBridge dgiBridge, HiveEventContext event) throws AtlasHookException {
+        try {
+            assert event.getInputs() != null && event.getInputs().size() == 1;
+            assert event.getOutputs() != null && event.getOutputs().size() > 0;
+
+            Table oldTable = event.getInputs().iterator().next().getTable();
+            List<FieldSchema> oldColList = oldTable.getAllCols();
+            Table outputTbl = event.getOutputs().iterator().next().getTable();
+            outputTbl = dgiBridge.hiveClient.getTable(outputTbl.getDbName(), outputTbl.getTableName());
+            List<FieldSchema> newColList = outputTbl.getAllCols();
+            assert oldColList.size() == newColList.size();
+
+            Pair<String, String> changedColNamePair = findChangedColNames(oldColList, newColList);
+            String oldColName = changedColNamePair.getLeft();
+            String newColName = changedColNamePair.getRight();
+            for (WriteEntity writeEntity : event.getOutputs()) {
+                if (writeEntity.getType() == Type.TABLE) {
+                    Table newTable = writeEntity.getTable();
+                    createOrUpdateEntities(dgiBridge, event, writeEntity, true, oldTable);
+                    final String newQualifiedTableName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(),
+                            newTable);
+                    String oldColumnQFName = HiveMetaStoreBridge.getColumnQualifiedName(newQualifiedTableName, oldColName);
+                    String newColumnQFName = HiveMetaStoreBridge.getColumnQualifiedName(newQualifiedTableName, newColName);
+                    Referenceable newColEntity = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
+                    newColEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, newColumnQFName);
+
+                    event.addMessage(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
+                            HiveDataTypes.HIVE_COLUMN.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
+                            oldColumnQFName, newColEntity));
+                }
             }
+            handleEventOutputs(dgiBridge, event, Type.TABLE);
+        }
+        catch(Exception e) {
+            throw new AtlasHookException("HiveHook.renameColumn() failed.", e);
         }
-        handleEventOutputs(dgiBridge, event, Type.TABLE);
     }
 
-    private void renameTable(HiveMetaStoreBridge dgiBridge, HiveEventContext event) throws Exception {
-        //crappy, no easy of getting new name
-        assert event.getInputs() != null && event.getInputs().size() == 1;
-        assert event.getOutputs() != null && event.getOutputs().size() > 0;
-
-        //Update entity if not exists
-        ReadEntity oldEntity = event.getInputs().iterator().next();
-        Table oldTable = oldEntity.getTable();
-
-        for (WriteEntity writeEntity : event.getOutputs()) {
-            if (writeEntity.getType() == Entity.Type.TABLE) {
-                Table newTable = writeEntity.getTable();
-                //Hive sends with both old and new table names in the outputs which is weird. So skipping that with the below check
-                if (!newTable.getDbName().equals(oldTable.getDbName()) || !newTable.getTableName().equals(oldTable.getTableName())) {
-                    final String oldQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(),
-                        oldTable);
-                    final String newQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(),
-                        newTable);
-
-                    //Create/update old table entity - create entity with oldQFNme and old tableName if it doesnt exist. If exists, will update
-                    //We always use the new entity while creating the table since some flags, attributes of the table are not set in inputEntity and Hive.getTable(oldTableName) also fails since the table doesnt exist in hive anymore
-                    final LinkedHashMap<Type, Referenceable> tables = createOrUpdateEntities(dgiBridge, event, writeEntity, true);
-                    Referenceable tableEntity = tables.get(Type.TABLE);
-
-                    //Reset regular column QF Name to old Name and create a new partial notification request to replace old column QFName to newName to retain any existing traits
-                    replaceColumnQFName(event, (List<Referenceable>) tableEntity.get(HiveMetaStoreBridge.COLUMNS), oldQualifiedName, newQualifiedName);
-
-                    //Reset partition key column QF Name to old Name and create a new partial notification request to replace old column QFName to newName to retain any existing traits
-                    replaceColumnQFName(event, (List<Referenceable>) tableEntity.get(HiveMetaStoreBridge.PART_COLS), oldQualifiedName, newQualifiedName);
-
-                    //Reset SD QF Name to old Name and create a new partial notification request to replace old SD QFName to newName to retain any existing traits
-                    replaceSDQFName(event, tableEntity, oldQualifiedName, newQualifiedName);
-
-                    //Reset Table QF Name to old Name and create a new partial notification request to replace old Table QFName to newName
-                    replaceTableQFName(event, oldTable, newTable, tableEntity, oldQualifiedName, newQualifiedName);
+    private void renameTable(HiveMetaStoreBridge dgiBridge, HiveEventContext event) throws AtlasHookException {
+        try {
+            //crappy, no easy of getting new name
+            assert event.getInputs() != null && event.getInputs().size() == 1;
+            assert event.getOutputs() != null && event.getOutputs().size() > 0;
+
+            //Update entity if not exists
+            ReadEntity oldEntity = event.getInputs().iterator().next();
+            Table oldTable = oldEntity.getTable();
+
+            for (WriteEntity writeEntity : event.getOutputs()) {
+                if (writeEntity.getType() == Entity.Type.TABLE) {
+                    Table newTable = writeEntity.getTable();
+                    //Hive sends with both old and new table names in the outputs which is weird. So skipping that with the below check
+                    if (!newTable.getDbName().equals(oldTable.getDbName()) || !newTable.getTableName().equals(oldTable.getTableName())) {
+                        final String oldQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(),
+                                oldTable);
+                        final String newQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(),
+                                newTable);
+
+                        //Create/update old table entity - create entity with oldQFNme and old tableName if it doesnt exist. If exists, will update
+                        //We always use the new entity while creating the table since some flags, attributes of the table are not set in inputEntity and Hive.getTable(oldTableName) also fails since the table doesnt exist in hive anymore
+                        final LinkedHashMap<Type, Referenceable> tables = createOrUpdateEntities(dgiBridge, event, writeEntity, true);
+                        Referenceable tableEntity = tables.get(Type.TABLE);
+
+                        //Reset regular column QF Name to old Name and create a new partial notification request to replace old column QFName to newName to retain any existing traits
+                        replaceColumnQFName(event, (List<Referenceable>) tableEntity.get(HiveMetaStoreBridge.COLUMNS), oldQualifiedName, newQualifiedName);
+
+                        //Reset partition key column QF Name to old Name and create a new partial notification request to replace old column QFName to newName to retain any existing traits
+                        replaceColumnQFName(event, (List<Referenceable>) tableEntity.get(HiveMetaStoreBridge.PART_COLS), oldQualifiedName, newQualifiedName);
+
+                        //Reset SD QF Name to old Name and create a new partial notification request to replace old SD QFName to newName to retain any existing traits
+                        replaceSDQFName(event, tableEntity, oldQualifiedName, newQualifiedName);
+
+                        //Reset Table QF Name to old Name and create a new partial notification request to replace old Table QFName to newName
+                        replaceTableQFName(event, oldTable, newTable, tableEntity, oldQualifiedName, newQualifiedName);
+                    }
                 }
             }
         }
+        catch(Exception e) {
+            throw new AtlasHookException("HiveHook.renameTable() failed.", e);
+        }
     }
 
     private Referenceable replaceTableQFName(HiveEventContext event, Table oldTable, Table newTable, final Referenceable tableEntity, final String oldTableQFName, final String newTableQFName) throws HiveException {
@@ -494,7 +508,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
         final Referenceable sdRef = ((Referenceable) tableEntity.get(HiveMetaStoreBridge.STORAGE_DESC));
         sdRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, HiveMetaStoreBridge.getStorageDescQFName(oldTblQFName));
 
-        //Replace SD QF name first to retain tags
+        //Replace SD QF name fir    st to retain tags
         final String oldSDQFName = HiveMetaStoreBridge.getStorageDescQFName(oldTblQFName);
         final String newSDQFName = HiveMetaStoreBridge.getStorageDescQFName(newTblQFName);
 
@@ -507,81 +521,95 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
         return newSDEntity;
     }
 
-    private LinkedHashMap<Type, Referenceable> createOrUpdateEntities(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Entity entity, boolean skipTempTables, Table existTable) throws Exception {
-        Database db = null;
-        Table table = null;
-        Partition partition = null;
-        LinkedHashMap<Type, Referenceable> result = new LinkedHashMap<>();
-        List<Referenceable> entities = new ArrayList<>();
-
-        switch (entity.getType()) {
-        case DATABASE:
-            db = entity.getDatabase();
-            break;
-
-        case TABLE:
-            table = entity.getTable();
-            db = dgiBridge.hiveClient.getDatabase(table.getDbName());
-            break;
-
-        case PARTITION:
-            partition = entity.getPartition();
-            table = partition.getTable();
-            db = dgiBridge.hiveClient.getDatabase(table.getDbName());
-            break;
-
-        default:
-            LOG.info("{}: entity-type not handled by Atlas hook. Ignored", entity.getType());
-        }
+    private LinkedHashMap<Type, Referenceable> createOrUpdateEntities(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Entity entity, boolean skipTempTables, Table existTable) throws AtlasHookException {
+        try {
+            Database db = null;
+            Table table = null;
+            Partition partition = null;
+            LinkedHashMap<Type, Referenceable> result = new LinkedHashMap<>();
+            List<Referenceable> entities = new ArrayList<>();
+
+            switch (entity.getType()) {
+                case DATABASE:
+                    db = entity.getDatabase();
+                    break;
+
+                case TABLE:
+                    table = entity.getTable();
+                    db = dgiBridge.hiveClient.getDatabase(table.getDbName());
+                    break;
+
+                case PARTITION:
+                    partition = entity.getPartition();
+                    table = partition.getTable();
+                    db = dgiBridge.hiveClient.getDatabase(table.getDbName());
+                    break;
+
+                default:
+                    LOG.info("{}: entity-type not handled by Atlas hook. Ignored", entity.getType());
+            }
 
-        if (db != null) {
-            db = dgiBridge.hiveClient.getDatabase(db.getName());
-        }
+            if (db != null) {
+                db = dgiBridge.hiveClient.getDatabase(db.getName());
+            }
 
-        if (db != null) {
-            Referenceable dbEntity = dgiBridge.createDBInstance(db);
+            if (db != null) {
+                Referenceable dbEntity = dgiBridge.createDBInstance(db);
 
-            entities.add(dbEntity);
-            result.put(Type.DATABASE, dbEntity);
+                entities.add(dbEntity);
+                result.put(Type.DATABASE, dbEntity);
 
-            Referenceable tableEntity = null;
+                Referenceable tableEntity = null;
 
-            if (table != null) {
-                if (existTable != null) {
-                    table = existTable;
-                } else {
-                    table = dgiBridge.hiveClient.getTable(table.getDbName(), table.getTableName());
+                if (table != null) {
+                    if (existTable != null) {
+                        table = existTable;
+                    } else {
+                        table = dgiBridge.hiveClient.getTable(table.getDbName(), table.getTableName());
+                    }
+                    //If its an external table, even though the temp table skip flag is on,
+                    // we create the table since we need the HDFS path to temp table lineage.
+                    if (skipTempTables &&
+                            table.isTemporary() &&
+                            !TableType.EXTERNAL_TABLE.equals(table.getTableType())) {
+                        LOG.debug("Skipping temporary table registration {} since it is not an external table {} ", table.getTableName(), table.getTableType().name());
+
+                    } else {
+                        tableEntity = dgiBridge.createTableInstance(dbEntity, table);
+                        entities.add(tableEntity);
+                        result.put(Type.TABLE, tableEntity);
+                    }
                 }
-                //If its an external table, even though the temp table skip flag is on,
-                // we create the table since we need the HDFS path to temp table lineage.
-                if (skipTempTables &&
-                        table.isTemporary() &&
-                        !TableType.EXTERNAL_TABLE.equals(table.getTableType())) {
-                    LOG.debug("Skipping temporary table registration {} since it is not an external table {} ", table.getTableName(), table.getTableType().name());
 
-                } else {
-                    tableEntity = dgiBridge.createTableInstance(dbEntity, table);
-                    entities.add(tableEntity);
-                    result.put(Type.TABLE, tableEntity);
-                }
+                event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), entities));
             }
-
-            event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), entities));
+            return result;
+        }
+        catch(Exception e) {
+            throw new AtlasHookException("HiveHook.createOrUpdateEntities() failed.", e);
         }
-        return result;
     }
 
-    private LinkedHashMap<Type, Referenceable> createOrUpdateEntities(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Entity entity, boolean skipTempTables) throws Exception{
-        return createOrUpdateEntities(dgiBridge, event, entity, skipTempTables, null);
+    private LinkedHashMap<Type, Referenceable> createOrUpdateEntities(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Entity entity, boolean skipTempTables) throws AtlasHookException {
+        try {
+            return createOrUpdateEntities(dgiBridge, event, entity, skipTempTables, null);
+        } catch (Exception e) {
+            throw new AtlasHookException("HiveHook.createOrUpdateEntities() failed.", e);
+        }
     }
 
-    private LinkedHashMap<Type, Referenceable> handleEventOutputs(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Type entityType) throws Exception {
-        for (Entity entity : event.getOutputs()) {
-            if (entity.getType() == entityType) {
-                return createOrUpdateEntities(dgiBridge, event, entity, true);
+    private LinkedHashMap<Type, Referenceable> handleEventOutputs(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Type entityType) throws AtlasHookException {
+        try {
+            for (Entity entity : event.getOutputs()) {
+                if (entity.getType() == entityType) {
+                    return createOrUpdateEntities(dgiBridge, event, entity, true);
+                }
             }
+            return null;
+        }
+        catch(Exception e) {
+            throw new AtlasHookException("HiveHook.handleEventOutputs() failed.", e);
         }
-        return null;
     }
 
     private static Entity getEntityByType(Set<? extends Entity> entities, Type entityType) {
@@ -600,98 +628,108 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
         return str.toLowerCase().trim();
     }
 
-    private void registerProcess(HiveMetaStoreBridge dgiBridge, HiveEventContext event) throws Exception {
-        Set<ReadEntity> inputs = event.getInputs();
-        Set<WriteEntity> outputs = event.getOutputs();
+    private void registerProcess(HiveMetaStoreBridge dgiBridge, HiveEventContext event) throws AtlasHookException {
+        try {
+            Set<ReadEntity> inputs = event.getInputs();
+            Set<WriteEntity> outputs = event.getOutputs();
 
-        //Even explain CTAS has operation name as CREATETABLE_AS_SELECT
-        if (inputs.isEmpty() && outputs.isEmpty()) {
-            LOG.info("Explain statement. Skipping...");
-            return;
-        }
+            //Even explain CTAS has operation name as CREATETABLE_AS_SELECT
+            if (inputs.isEmpty() && outputs.isEmpty()) {
+                LOG.info("Explain statement. Skipping...");
+                return;
+            }
 
-        if (event.getQueryId() == null) {
-            LOG.info("Query id/plan is missing for {}", event.getQueryStr());
-        }
+            if (event.getQueryId() == null) {
+                LOG.info("Query id/plan is missing for {}", event.getQueryStr());
+            }
 
-        final SortedMap<ReadEntity, Referenceable> source = new TreeMap<>(entityComparator);
-        final SortedMap<WriteEntity, Referenceable> target = new TreeMap<>(entityComparator);
+            final SortedMap<ReadEntity, Referenceable> source = new TreeMap<>(entityComparator);
+            final SortedMap<WriteEntity, Referenceable> target = new TreeMap<>(entityComparator);
 
-        final Set<String> dataSets = new HashSet<>();
-        final Set<Referenceable> entities = new LinkedHashSet<>();
+            final Set<String> dataSets = new HashSet<>();
+            final Set<Referenceable> entities = new LinkedHashSet<>();
 
-        boolean isSelectQuery = isSelectQuery(event);
+            boolean isSelectQuery = isSelectQuery(event);
 
-        // filter out select queries which do not modify data
-        if (!isSelectQuery) {
+            // filter out select queries which do not modify data
+            if (!isSelectQuery) {
 
-            SortedSet<ReadEntity> sortedHiveInputs = new TreeSet<>(entityComparator);
-            if ( event.getInputs() != null) {
-                sortedHiveInputs.addAll(event.getInputs());
-            }
+                SortedSet<ReadEntity> sortedHiveInputs = new TreeSet<>(entityComparator);
+                if (event.getInputs() != null) {
+                    sortedHiveInputs.addAll(event.getInputs());
+                }
 
-            SortedSet<WriteEntity> sortedHiveOutputs = new TreeSet<>(entityComparator);
-            if ( event.getOutputs() != null) {
-                sortedHiveOutputs.addAll(event.getOutputs());
-            }
+                SortedSet<WriteEntity> sortedHiveOutputs = new TreeSet<>(entityComparator);
+                if (event.getOutputs() != null) {
+                    sortedHiveOutputs.addAll(event.getOutputs());
+                }
 
-            for (ReadEntity readEntity : sortedHiveInputs) {
-                processHiveEntity(dgiBridge, event, readEntity, dataSets, source, entities);
-            }
+                for (ReadEntity readEntity : sortedHiveInputs) {
+                    processHiveEntity(dgiBridge, event, readEntity, dataSets, source, entities);
+                }
 
-            for (WriteEntity writeEntity : sortedHiveOutputs) {
-                processHiveEntity(dgiBridge, event, writeEntity, dataSets, target, entities);
-            }
+                for (WriteEntity writeEntity : sortedHiveOutputs) {
+                    processHiveEntity(dgiBridge, event, writeEntity, dataSets, target, entities);
+                }
 
-            if (source.size() > 0 || target.size() > 0) {
-                Referenceable processReferenceable = getProcessReferenceable(dgiBridge, event, sortedHiveInputs, sortedHiveOutputs, source, target);
-                // setup Column Lineage
-                List<Referenceable> sourceList = new ArrayList<>(source.values());
-                List<Referenceable> targetList = new ArrayList<>(target.values());
-                List<Referenceable> colLineageProcessInstances = new ArrayList<>();
-                try {
-                    Map<String, Referenceable> columnQNameToRef =
-                            ColumnLineageUtils.buildColumnReferenceableMap(sourceList, targetList);
-                    colLineageProcessInstances = createColumnLineageProcessInstances(processReferenceable,
-                            event.lineageInfo,
-                            columnQNameToRef);
-                }catch (Exception e){
-                    LOG.warn("Column lineage process setup failed with exception {}", e);
+                if (source.size() > 0 || target.size() > 0) {
+                    Referenceable processReferenceable = getProcessReferenceable(dgiBridge, event, sortedHiveInputs, sortedHiveOutputs, source, target);
+                    // setup Column Lineage
+                    List<Referenceable> sourceList = new ArrayList<>(source.values());
+                    List<Referenceable> targetList = new ArrayList<>(target.values());
+                    List<Referenceable> colLineageProcessInstances = new ArrayList<>();
+                    try {
+                        Map<String, Referenceable> columnQNameToRef =
+                                ColumnLineageUtils.buildColumnReferenceableMap(sourceList, targetList);
+                        colLineageProcessInstances = createColumnLineageProcessInstances(processReferenceable,
+                                event.lineageInfo,
+                                columnQNameToRef);
+                    } catch (Exception e) {
+                        LOG.warn("Column lineage process setup failed with exception {}", e);
+                    }
+                    colLineageProcessInstances.add(0, processReferenceable);
+                    entities.addAll(colLineageProcessInstances);
+                    event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), new ArrayList<>(entities)));
+                } else {
+                    LOG.info("Skipped query {} since it has no getInputs() or resulting getOutputs()", event.getQueryStr());
                 }
-                colLineageProcessInstances.add(0, processReferenceable);
-                entities.addAll(colLineageProcessInstances);
-                event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), new ArrayList<>(entities)));
             } else {
-                LOG.info("Skipped query {} since it has no getInputs() or resulting getOutputs()", event.getQueryStr());
+                LOG.info("Skipped query {} for processing since it is a select query ", event.getQueryStr());
             }
-        } else {
-            LOG.info("Skipped query {} for processing since it is a select query ", event.getQueryStr());
+        }
+        catch(Exception e) {
+            throw new AtlasHookException("HiveHook.registerProcess() failed.", e);
         }
     }
 
     private  <T extends Entity> void processHiveEntity(HiveMetaStoreBridge dgiBridge, HiveEventContext event, T entity, Set<String> dataSetsProcessed,
-        SortedMap<T, Referenceable> dataSets, Set<Referenceable> entities) throws Exception {
-        if (entity.getType() == Type.TABLE || entity.getType() == Type.PARTITION) {
-            final String tblQFName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(), entity.getTable());
-            if (!dataSetsProcessed.contains(tblQFName)) {
-                LinkedHashMap<Type, Referenceable> result = createOrUpdateEntities(dgiBridge, event, entity, false);
-                dataSets.put(entity, result.get(Type.TABLE));
-                dataSetsProcessed.add(tblQFName);
-                entities.addAll(result.values());
-            }
-        } else if (entity.getType() == Type.DFS_DIR) {
-            URI location = entity.getLocation();
-            if(location != null) {
-                final String pathUri = lower(new Path(location).toString());
-                LOG.debug("Registering DFS Path {} ", pathUri);
-                if (!dataSetsProcessed.contains(pathUri)) {
-                    Referenceable hdfsPath = dgiBridge.fillHDFSDataSet(pathUri);
-                    dataSets.put(entity, hdfsPath);
-                    dataSetsProcessed.add(pathUri);
-                    entities.add(hdfsPath);
+        SortedMap<T, Referenceable> dataSets, Set<Referenceable> entities) throws AtlasHookException {
+        try {
+            if (entity.getType() == Type.TABLE || entity.getType() == Type.PARTITION) {
+                final String tblQFName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(), entity.getTable());
+                if (!dataSetsProcessed.contains(tblQFName)) {
+                    LinkedHashMap<Type, Referenceable> result = createOrUpdateEntities(dgiBridge, event, entity, false);
+                    dataSets.put(entity, result.get(Type.TABLE));
+                    dataSetsProcessed.add(tblQFName);
+                    entities.addAll(result.values());
+                }
+            } else if (entity.getType() == Type.DFS_DIR) {
+                URI location = entity.getLocation();
+                if (location != null) {
+                    final String pathUri = lower(new Path(location).toString());
+                    LOG.debug("Registering DFS Path {} ", pathUri);
+                    if (!dataSetsProcessed.contains(pathUri)) {
+                        Referenceable hdfsPath = dgiBridge.fillHDFSDataSet(pathUri);
+                        dataSets.put(entity, hdfsPath);
+                        dataSetsProcessed.add(pathUri);
+                        entities.add(hdfsPath);
+                    }
                 }
             }
         }
+        catch(Exception e) {
+            throw new AtlasHookException("HiveHook.processHiveEntity() failed.", e);
+        }
     }
 
     private boolean isSelectQuery(HiveEventContext event) {

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f379c9ff/addons/sqoop-bridge/src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java
----------------------------------------------------------------------
diff --git a/addons/sqoop-bridge/src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java b/addons/sqoop-bridge/src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java
index 6fb27e5..50e20fa 100644
--- a/addons/sqoop-bridge/src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java
+++ b/addons/sqoop-bridge/src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java
@@ -25,6 +25,7 @@ import org.apache.atlas.AtlasConstants;
 import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
 import org.apache.atlas.hive.model.HiveDataTypes;
 import org.apache.atlas.hook.AtlasHook;
+import org.apache.atlas.hook.AtlasHookException;
 import org.apache.atlas.notification.hook.HookNotification;
 import org.apache.atlas.sqoop.model.SqoopDataTypes;
 import org.apache.atlas.typesystem.Referenceable;
@@ -71,8 +72,7 @@ public class SqoopHook extends SqoopJobDataPublisher {
         org.apache.hadoop.conf.Configuration.addDefaultResource("sqoop-site.xml");
     }
 
-    public Referenceable createHiveDatabaseInstance(String clusterName, String dbName)
-            throws Exception {
+    public Referenceable createHiveDatabaseInstance(String clusterName, String dbName) {
         Referenceable dbRef = new Referenceable(HiveDataTypes.HIVE_DB.getName());
         dbRef.set(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, clusterName);
         dbRef.set(AtlasClient.NAME, dbName);
@@ -82,14 +82,14 @@ public class SqoopHook extends SqoopJobDataPublisher {
     }
 
     public Referenceable createHiveTableInstance(String clusterName, Referenceable dbRef,
-                                             String tableName, String dbName) throws Exception {
-        Referenceable tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
-        tableRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
-                HiveMetaStoreBridge.getTableQualifiedName(clusterName, dbName, tableName));
-        tableRef.set(AtlasClient.NAME, tableName.toLowerCase());
-        tableRef.set(HiveMetaStoreBridge.DB, dbRef);
-        return tableRef;
-    }
+                                                 String tableName, String dbName) {
+            Referenceable tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
+            tableRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
+                    HiveMetaStoreBridge.getTableQualifiedName(clusterName, dbName, tableName));
+            tableRef.set(AtlasClient.NAME, tableName.toLowerCase());
+            tableRef.set(HiveMetaStoreBridge.DB, dbRef);
+            return tableRef;
+        }
 
     private Referenceable createDBStoreInstance(SqoopJobDataPublisher.Data data)
             throws ImportException {
@@ -173,19 +173,24 @@ public class SqoopHook extends SqoopJobDataPublisher {
     }
 
     @Override
-    public void publish(SqoopJobDataPublisher.Data data) throws Exception {
-        Configuration atlasProperties = ApplicationProperties.get();
-        String clusterName = atlasProperties.getString(ATLAS_CLUSTER_NAME, DEFAULT_CLUSTER_NAME);
-
-        Referenceable dbStoreRef = createDBStoreInstance(data);
-        Referenceable dbRef = createHiveDatabaseInstance(clusterName, data.getHiveDB());
-        Referenceable hiveTableRef = createHiveTableInstance(clusterName, dbRef,
-                data.getHiveTable(), data.getHiveDB());
-        Referenceable procRef = createSqoopProcessInstance(dbStoreRef, hiveTableRef, data, clusterName);
-
-        int maxRetries = atlasProperties.getInt(HOOK_NUM_RETRIES, 3);
-        HookNotification.HookNotificationMessage message =
-                new HookNotification.EntityCreateRequest(AtlasHook.getUser(), dbStoreRef, dbRef, hiveTableRef, procRef);
-        AtlasHook.notifyEntities(Arrays.asList(message), maxRetries);
+    public void publish(SqoopJobDataPublisher.Data data) throws AtlasHookException {
+        try {
+            Configuration atlasProperties = ApplicationProperties.get();
+            String clusterName = atlasProperties.getString(ATLAS_CLUSTER_NAME, DEFAULT_CLUSTER_NAME);
+
+            Referenceable dbStoreRef = createDBStoreInstance(data);
+            Referenceable dbRef = createHiveDatabaseInstance(clusterName, data.getHiveDB());
+            Referenceable hiveTableRef = createHiveTableInstance(clusterName, dbRef,
+                    data.getHiveTable(), data.getHiveDB());
+            Referenceable procRef = createSqoopProcessInstance(dbStoreRef, hiveTableRef, data, clusterName);
+
+            int maxRetries = atlasProperties.getInt(HOOK_NUM_RETRIES, 3);
+            HookNotification.HookNotificationMessage message =
+                    new HookNotification.EntityCreateRequest(AtlasHook.getUser(), dbStoreRef, dbRef, hiveTableRef, procRef);
+            AtlasHook.notifyEntities(Arrays.asList(message), maxRetries);
+        }
+        catch(Exception e) {
+            throw new AtlasHookException("SqoopHook.publish() failed.", e);
+        }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f379c9ff/addons/storm-bridge-shim/src/main/java/org/apache/atlas/storm/hook/StormAtlasHook.java
----------------------------------------------------------------------
diff --git a/addons/storm-bridge-shim/src/main/java/org/apache/atlas/storm/hook/StormAtlasHook.java b/addons/storm-bridge-shim/src/main/java/org/apache/atlas/storm/hook/StormAtlasHook.java
index 45eac72..0ce7633 100644
--- a/addons/storm-bridge-shim/src/main/java/org/apache/atlas/storm/hook/StormAtlasHook.java
+++ b/addons/storm-bridge-shim/src/main/java/org/apache/atlas/storm/hook/StormAtlasHook.java
@@ -34,6 +34,7 @@ import java.util.Map;
 public class StormAtlasHook implements ISubmitterHook {
     private static final Logger LOG = LoggerFactory.getLogger(StormAtlasHook.class);
 
+
     private static final String ATLAS_PLUGIN_TYPE = "storm";
     private static final String ATLAS_STORM_HOOK_IMPL_CLASSNAME = "org.apache.atlas.storm.hook.StormAtlasHook";
 

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f379c9ff/addons/storm-bridge/src/main/java/org/apache/atlas/storm/hook/StormTopologyUtil.java
----------------------------------------------------------------------
diff --git a/addons/storm-bridge/src/main/java/org/apache/atlas/storm/hook/StormTopologyUtil.java b/addons/storm-bridge/src/main/java/org/apache/atlas/storm/hook/StormTopologyUtil.java
index d646fba..d999a6a 100644
--- a/addons/storm-bridge/src/main/java/org/apache/atlas/storm/hook/StormTopologyUtil.java
+++ b/addons/storm-bridge/src/main/java/org/apache/atlas/storm/hook/StormTopologyUtil.java
@@ -42,7 +42,7 @@ public final class StormTopologyUtil {
     private StormTopologyUtil() {
     }
 
-    public static Set<String> getTerminalUserBoltNames(StormTopology topology) throws Exception {
+    public static Set<String> getTerminalUserBoltNames(StormTopology topology) {
         Set<String> terminalBolts = new HashSet<>();
         Set<String> inputs = new HashSet<>();
         for (Map.Entry<String, Bolt> entry : topology.get_bolts().entrySet()) {

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f379c9ff/intg/src/main/java/org/apache/atlas/AtlasErrorCode.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/AtlasErrorCode.java b/intg/src/main/java/org/apache/atlas/AtlasErrorCode.java
index 5054cf0..ada2e2b 100644
--- a/intg/src/main/java/org/apache/atlas/AtlasErrorCode.java
+++ b/intg/src/main/java/org/apache/atlas/AtlasErrorCode.java
@@ -67,9 +67,10 @@ public enum AtlasErrorCode {
     INSTANCE_LINEAGE_INVALID_PARAMS(400, "ATLAS-400-00-026", "Invalid lineage query parameters passed {0}: {1}"),
     ATTRIBUTE_UPDATE_NOT_SUPPORTED(400, "ATLAS-400-00-027", "{0}.{1} : attribute update not supported"),
 	INVALID_VALUE(400, "ATLAS-400-00-028", "invalid value: {0}"),
-    BAD_REQUEST(400, "ATLAS-400-00-020", "{0}"),
+    BAD_REQUEST(400, "ATLAS-400-00-029", "{0}"),
+    PARAMETER_PARSING_FAILED(400, "ATLAS-400-00-02A", "Parameter parsing failed at: {0}"),
 
-     // All Not found enums go here
+    // All Not found enums go here
     TYPE_NAME_NOT_FOUND(404, "ATLAS-404-00-001", "Given typename {0} was invalid"),
     TYPE_GUID_NOT_FOUND(404, "ATLAS-404-00-002", "Given type guid {0} was invalid"),
     EMPTY_RESULTS(404, "ATLAS-404-00-004", "No result found for {0}"),
@@ -96,7 +97,15 @@ public enum AtlasErrorCode {
     NOTIFICATION_FAILED(500, "ATLAS-500-00-007", "Failed to notify for change {0}"),
     GREMLIN_GROOVY_SCRIPT_ENGINE_FAILED(500, "ATLAS-500-00-008", "scriptEngine cannot be initialized for: {0}"),
     JSON_ERROR_OBJECT_MAPPER_NULL_RETURNED(500, "ATLAS-500-00-009", "ObjectMapper.readValue returned NULL for class: {0}"),
-    GREMLIN_SCRIPT_EXECUTION_FAILED(500, "ATLAS-500-00-00A", "Script execution failed for: {0}");
+    GREMLIN_SCRIPT_EXECUTION_FAILED(500, "ATLAS-500-00-00A", "Script execution failed for: {0}"),
+
+    CURATOR_FRAMEWORK_UPDATE(500, "ATLAS-500-00-00B", "ActiveInstanceState.update resulted in exception."),
+    QUICK_START(500, "ATLAS-500-00-00C", "Failed to run QuickStart: {0}"),
+    EMBEDDED_SERVER_START(500, "ATLAS-500-00-00D", "EmbeddedServer.Start: failed!"),
+    STORM_TOPOLOGY_UTIL(500, "ATLAS-500-00-00E", "StormToplogyUtil: {0}"),
+    SQOOP_HOOK(500, "ATLAS-500-00-00F", "SqoopHook: {0}"),
+    HIVE_HOOK(500, "ATLAS-500-00-010", "HiveHook: {0}"),
+    HIVE_HOOK_METASTORE_BRIDGE(500, "ATLAS-500-00-011", "HiveHookMetaStoreBridge: {0}");
 
     private String errorCode;
     private String errorMessage;

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f379c9ff/notification/src/main/java/org/apache/atlas/hook/AtlasHookException.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/hook/AtlasHookException.java b/notification/src/main/java/org/apache/atlas/hook/AtlasHookException.java
new file mode 100644
index 0000000..01014ec
--- /dev/null
+++ b/notification/src/main/java/org/apache/atlas/hook/AtlasHookException.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.hook;
+
+/**
+ * Exception class for Atlas Hooks.
+ */
+public class AtlasHookException extends Exception {
+
+    public AtlasHookException() {
+    }
+
+    public AtlasHookException(String message) {
+        super(message);
+    }
+
+    public AtlasHookException(String message, Throwable cause) {
+        super(message, cause);
+    }
+
+    public AtlasHookException(Throwable cause) {
+        super(cause);
+    }
+
+    public AtlasHookException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
+        super(message, cause, enableSuppression, writableStackTrace);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f379c9ff/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java b/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java
index 21ecca9..91ba111 100755
--- a/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java
+++ b/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java
@@ -24,7 +24,9 @@ import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableSet;
 import org.apache.atlas.ApplicationProperties;
 import org.apache.atlas.AtlasClient;
+import org.apache.atlas.AtlasErrorCode;
 import org.apache.atlas.AtlasException;
+import org.apache.atlas.exception.AtlasBaseException;
 import org.apache.atlas.typesystem.Referenceable;
 import org.apache.atlas.typesystem.TypesDef;
 import org.apache.atlas.typesystem.json.InstanceSerialization;
@@ -323,39 +325,48 @@ public class QuickStart {
     }
 
     Id database(String name, String description, String owner, String locationUri, String... traitNames)
-    throws Exception {
-        Referenceable referenceable = new Referenceable(DATABASE_TYPE, traitNames);
-        referenceable.set("name", name);
-        referenceable.set("description", description);
-        referenceable.set("owner", owner);
-        referenceable.set("locationUri", locationUri);
-        referenceable.set("createTime", System.currentTimeMillis());
-
-        return createInstance(referenceable);
+            throws AtlasBaseException {
+        try {
+            Referenceable referenceable = new Referenceable(DATABASE_TYPE, traitNames);
+            referenceable.set("name", name);
+            referenceable.set("description", description);
+            referenceable.set("owner", owner);
+            referenceable.set("locationUri", locationUri);
+            referenceable.set("createTime", System.currentTimeMillis());
+
+            return createInstance(referenceable);
+        } catch (Exception e) {
+            throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, String.format("%s database entity creation failed", name));
+        }
     }
 
-    Referenceable rawStorageDescriptor(String location, String inputFormat, String outputFormat, boolean compressed)
-    throws Exception {
-        Referenceable referenceable = new Referenceable(STORAGE_DESC_TYPE);
-        referenceable.set("location", location);
-        referenceable.set("inputFormat", inputFormat);
-        referenceable.set("outputFormat", outputFormat);
-        referenceable.set("compressed", compressed);
+    Referenceable rawStorageDescriptor(String location, String inputFormat, String outputFormat, boolean compressed) {
+            Referenceable referenceable = new Referenceable(STORAGE_DESC_TYPE);
+            referenceable.set("location", location);
+            referenceable.set("inputFormat", inputFormat);
+            referenceable.set("outputFormat", outputFormat);
+            referenceable.set("compressed", compressed);
 
-        return referenceable;
+            return referenceable;
     }
 
-    Referenceable rawColumn(String name, String dataType, String comment, String... traitNames) throws Exception {
-        Referenceable referenceable = new Referenceable(COLUMN_TYPE, traitNames);
-        referenceable.set("name", name);
-        referenceable.set("dataType", dataType);
-        referenceable.set("comment", comment);
+    Referenceable rawColumn(String name, String dataType, String comment, String... traitNames) throws AtlasBaseException {
+        try {
+            Referenceable referenceable = new Referenceable(COLUMN_TYPE, traitNames);
+            referenceable.set("name", name);
+            referenceable.set("dataType", dataType);
+            referenceable.set("comment", comment);
 
-        return referenceable;
+            return referenceable;
+        }
+        catch(Exception e) {
+            throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, String.format("%s, column entity creation failed", name));
+        }
     }
 
     Id table(String name, String description, Id dbId, Referenceable sd, String owner, String tableType,
-            List<Referenceable> columns, String... traitNames) throws Exception {
+            List<Referenceable> columns, String... traitNames) throws AtlasBaseException {
+        try {
         Referenceable referenceable = new Referenceable(TABLE_TYPE, traitNames);
         referenceable.set("name", name);
         referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
@@ -370,46 +381,61 @@ public class QuickStart {
         referenceable.set("columns", columns);
 
         return createInstance(referenceable);
+        } catch (Exception e) {
+            throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, String.format("%s table entity creation failed", name));
+        }
     }
 
     Id loadProcess(String name, String description, String user, List<Id> inputTables, List<Id> outputTables,
-            String queryText, String queryPlan, String queryId, String queryGraph, String... traitNames)
-    throws Exception {
-        Referenceable referenceable = new Referenceable(LOAD_PROCESS_TYPE, traitNames);
-        // super type attributes
-        referenceable.set(AtlasClient.NAME, name);
-        referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
-        referenceable.set("description", description);
-        referenceable.set(INPUTS_ATTRIBUTE, inputTables);
-        referenceable.set(OUTPUTS_ATTRIBUTE, outputTables);
-
-        referenceable.set("user", user);
-        referenceable.set("startTime", System.currentTimeMillis());
-        referenceable.set("endTime", System.currentTimeMillis() + 10000);
-
-        referenceable.set("queryText", queryText);
-        referenceable.set("queryPlan", queryPlan);
-        referenceable.set("queryId", queryId);
-        referenceable.set("queryGraph", queryGraph);
-
-        return createInstance(referenceable);
+                   String queryText, String queryPlan, String queryId, String queryGraph, String... traitNames)
+            throws AtlasBaseException {
+        try {
+            Referenceable referenceable = new Referenceable(LOAD_PROCESS_TYPE, traitNames);
+            // super type attributes
+            referenceable.set(AtlasClient.NAME, name);
+            referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
+            referenceable.set("description", description);
+            referenceable.set(INPUTS_ATTRIBUTE, inputTables);
+            referenceable.set(OUTPUTS_ATTRIBUTE, outputTables);
+
+            referenceable.set("user", user);
+            referenceable.set("startTime", System.currentTimeMillis());
+            referenceable.set("endTime", System.currentTimeMillis() + 10000);
+
+            referenceable.set("queryText", queryText);
+            referenceable.set("queryPlan", queryPlan);
+            referenceable.set("queryId", queryId);
+            referenceable.set("queryGraph", queryGraph);
+
+            return createInstance(referenceable);
+        } catch (Exception e) {
+            throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, String.format("%s process entity creation failed", name));
+        }
     }
 
-    Id view(String name, Id dbId, List<Id> inputTables, String... traitNames) throws Exception {
-        Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames);
-        referenceable.set("name", name);
-        referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
-        referenceable.set("db", dbId);
+    Id view(String name, Id dbId, List<Id> inputTables, String... traitNames) throws AtlasBaseException {
+        try {
+            Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames);
+            referenceable.set("name", name);
+            referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
+            referenceable.set("db", dbId);
 
-        referenceable.set(INPUT_TABLES_ATTRIBUTE, inputTables);
+            referenceable.set(INPUT_TABLES_ATTRIBUTE, inputTables);
 
-        return createInstance(referenceable);
+            return createInstance(referenceable);
+        } catch (Exception e) {
+            throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, String.format("%s Id creation", name));
+        }
     }
 
-    private void verifyTypesCreated() throws Exception {
-        List<String> types = metadataServiceClient.listTypes();
-        for (String type : TYPES) {
-            assert types.contains(type);
+    private void verifyTypesCreated() throws AtlasBaseException {
+        try {
+            List<String> types = metadataServiceClient.listTypes();
+            for (String type : TYPES) {
+                assert types.contains(type);
+            }
+        } catch (Exception e) {
+            throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, "view creation failed.");
         }
     }
 
@@ -461,14 +487,18 @@ public class QuickStart {
                 "from DataSet", "from Process",};
     }
 
-    private void search() throws Exception {
-        for (String dslQuery : getDSLQueries()) {
-            JSONArray results = metadataServiceClient.search(dslQuery, 10, 0);
-            if (results != null) {
-                System.out.println("query [" + dslQuery + "] returned [" + results.length() + "] rows");
-            } else {
-                System.out.println("query [" + dslQuery + "] failed, results:" + results);
+    private void search() throws AtlasBaseException {
+        try {
+            for (String dslQuery : getDSLQueries()) {
+                JSONArray results = metadataServiceClient.search(dslQuery, 10, 0);
+                if (results != null) {
+                    System.out.println("query [" + dslQuery + "] returned [" + results.length() + "] rows");
+                } else {
+                    System.out.println("query [" + dslQuery + "] failed, results:" + results);
+                }
             }
+        } catch (Exception e) {
+            throw new AtlasBaseException(AtlasErrorCode.QUICK_START, e, "one or more dsl queries failed");
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f379c9ff/webapp/src/main/java/org/apache/atlas/web/params/AbstractParam.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/params/AbstractParam.java b/webapp/src/main/java/org/apache/atlas/web/params/AbstractParam.java
index 8087782..91e56b0 100755
--- a/webapp/src/main/java/org/apache/atlas/web/params/AbstractParam.java
+++ b/webapp/src/main/java/org/apache/atlas/web/params/AbstractParam.java
@@ -18,6 +18,8 @@
 
 package org.apache.atlas.web.params;
 
+import org.apache.atlas.exception.AtlasBaseException;
+
 import javax.ws.rs.WebApplicationException;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
@@ -100,7 +102,7 @@ public abstract class AbstractParam<T> {
      * @return {@code input}, parsed as an instance of {@code T}
      * @throws Exception if there is an error parsing the input
      */
-    protected abstract T parse(String input) throws Exception;
+    protected abstract T parse(String input) throws AtlasBaseException;
 
     /**
      * Returns the underlying value.

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f379c9ff/webapp/src/main/java/org/apache/atlas/web/params/BooleanParam.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/params/BooleanParam.java b/webapp/src/main/java/org/apache/atlas/web/params/BooleanParam.java
index d81fb6f..a5c71d5 100755
--- a/webapp/src/main/java/org/apache/atlas/web/params/BooleanParam.java
+++ b/webapp/src/main/java/org/apache/atlas/web/params/BooleanParam.java
@@ -18,6 +18,9 @@
 
 package org.apache.atlas.web.params;
 
+import org.apache.atlas.AtlasErrorCode;
+import org.apache.atlas.exception.AtlasBaseException;
+
 /**
  * A parameter encapsulating boolean values. If the query parameter value is {@code "true"},
  * regardless of case, the returned value is {@link Boolean#TRUE}. If the query parameter value is
@@ -36,13 +39,13 @@ public class BooleanParam extends AbstractParam<Boolean> {
     }
 
     @Override
-    protected Boolean parse(String input) throws Exception {
+    protected Boolean parse(String input) throws AtlasBaseException {
         if ("true".equalsIgnoreCase(input)) {
             return Boolean.TRUE;
         }
         if ("false".equalsIgnoreCase(input)) {
             return Boolean.FALSE;
         }
-        throw new Exception();
+        throw new AtlasBaseException(AtlasErrorCode.PARAMETER_PARSING_FAILED, "Boolean.parse: input=" + input);
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f379c9ff/webapp/src/main/java/org/apache/atlas/web/params/DateTimeParam.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/params/DateTimeParam.java b/webapp/src/main/java/org/apache/atlas/web/params/DateTimeParam.java
index 4b417e6..3ccf97e 100755
--- a/webapp/src/main/java/org/apache/atlas/web/params/DateTimeParam.java
+++ b/webapp/src/main/java/org/apache/atlas/web/params/DateTimeParam.java
@@ -18,6 +18,7 @@
 
 package org.apache.atlas.web.params;
 
+import org.apache.atlas.exception.AtlasBaseException;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 
@@ -32,7 +33,7 @@ public class DateTimeParam extends AbstractParam<DateTime> {
     }
 
     @Override
-    protected DateTime parse(String input) throws Exception {
-        return new DateTime(input, DateTimeZone.UTC);
+    protected DateTime parse(String input) throws AtlasBaseException {
+            return new DateTime(input, DateTimeZone.UTC);
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f379c9ff/webapp/src/main/java/org/apache/atlas/web/security/AtlasLdapAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/security/AtlasLdapAuthenticationProvider.java b/webapp/src/main/java/org/apache/atlas/web/security/AtlasLdapAuthenticationProvider.java
index 6b5ae90..003d06c 100644
--- a/webapp/src/main/java/org/apache/atlas/web/security/AtlasLdapAuthenticationProvider.java
+++ b/webapp/src/main/java/org/apache/atlas/web/security/AtlasLdapAuthenticationProvider.java
@@ -18,15 +18,11 @@
 
 package org.apache.atlas.web.security;
 
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Properties;
-import javax.annotation.PostConstruct;
-
 import org.apache.atlas.ApplicationProperties;
 import org.apache.atlas.web.model.User;
 import org.apache.commons.configuration.Configuration;
 import org.apache.commons.configuration.ConfigurationConverter;
+import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.ldap.core.support.LdapContextSource;
@@ -41,7 +37,10 @@ import org.springframework.security.ldap.authentication.LdapAuthenticationProvid
 import org.springframework.security.ldap.search.FilterBasedLdapUserSearch;
 import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator;
 import org.springframework.stereotype.Component;
-import org.apache.commons.lang.StringUtils;
+
+import javax.annotation.PostConstruct;
+import java.util.List;
+import java.util.Properties;
 
 @Component
 public class AtlasLdapAuthenticationProvider extends
@@ -87,7 +86,7 @@ public class AtlasLdapAuthenticationProvider extends
     }
 
     private Authentication getLdapBindAuthentication(
-            Authentication authentication) throws Exception {
+            Authentication authentication) {
         try {
             if (isDebugEnabled) {
                 LOG.debug("==> AtlasLdapAuthenticationProvider getLdapBindAuthentication");

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f379c9ff/webapp/src/main/java/org/apache/atlas/web/service/ActiveInstanceState.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/service/ActiveInstanceState.java b/webapp/src/main/java/org/apache/atlas/web/service/ActiveInstanceState.java
index a489bcf..ee3b829 100644
--- a/webapp/src/main/java/org/apache/atlas/web/service/ActiveInstanceState.java
+++ b/webapp/src/main/java/org/apache/atlas/web/service/ActiveInstanceState.java
@@ -20,7 +20,9 @@ package org.apache.atlas.web.service;
 
 import com.google.inject.Inject;
 import org.apache.atlas.ApplicationProperties;
+import org.apache.atlas.AtlasErrorCode;
 import org.apache.atlas.AtlasException;
+import org.apache.atlas.exception.AtlasBaseException;
 import org.apache.atlas.ha.HAConfiguration;
 import org.apache.commons.configuration.Configuration;
 import org.apache.curator.framework.CuratorFramework;
@@ -81,23 +83,27 @@ public class ActiveInstanceState {
      * @throws Exception
      * @param serverId ID of this server instance
      */
-    public void update(String serverId) throws Exception {
-        CuratorFramework client = curatorFactory.clientInstance();
-        String atlasServerAddress = HAConfiguration.getBoundAddressForId(configuration, serverId);
-        HAConfiguration.ZookeeperProperties zookeeperProperties =
-                HAConfiguration.getZookeeperProperties(configuration);
-        List<ACL> acls = Arrays.asList(
-                new ACL[]{AtlasZookeeperSecurityProperties.parseAcl(zookeeperProperties.getAcl(),
-                    ZooDefs.Ids.OPEN_ACL_UNSAFE.get(0))});
-        Stat serverInfo = client.checkExists().forPath(getZnodePath(zookeeperProperties));
-        if (serverInfo == null) {
-            client.create().
-                    withMode(CreateMode.EPHEMERAL).
-                    withACL(acls).
-                    forPath(getZnodePath(zookeeperProperties));
+    public void update(String serverId) throws AtlasBaseException {
+        try {
+            CuratorFramework client = curatorFactory.clientInstance();
+            HAConfiguration.ZookeeperProperties zookeeperProperties =
+                    HAConfiguration.getZookeeperProperties(configuration);
+            String atlasServerAddress = HAConfiguration.getBoundAddressForId(configuration, serverId);
+            List<ACL> acls = Arrays.asList(
+                    new ACL[]{AtlasZookeeperSecurityProperties.parseAcl(zookeeperProperties.getAcl(),
+                            ZooDefs.Ids.OPEN_ACL_UNSAFE.get(0))});
+            Stat serverInfo = client.checkExists().forPath(getZnodePath(zookeeperProperties));
+            if (serverInfo == null) {
+                client.create().
+                        withMode(CreateMode.EPHEMERAL).
+                        withACL(acls).
+                        forPath(getZnodePath(zookeeperProperties));
+            }
+            client.setData().forPath(getZnodePath(zookeeperProperties),
+                    atlasServerAddress.getBytes(Charset.forName("UTF-8")));
+        } catch (Exception e) {
+            throw new AtlasBaseException(AtlasErrorCode.CURATOR_FRAMEWORK_UPDATE, e, "forPath: getZnodePath");
         }
-        client.setData().forPath(getZnodePath(zookeeperProperties),
-                atlasServerAddress.getBytes(Charset.forName("UTF-8")));
     }
 
     private String getZnodePath(HAConfiguration.ZookeeperProperties zookeeperProperties) {

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f379c9ff/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java b/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java
index e615a5b..467571e 100755
--- a/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java
+++ b/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java
@@ -19,6 +19,8 @@
 package org.apache.atlas.web.service;
 
 import org.apache.atlas.AtlasConfiguration;
+import org.apache.atlas.AtlasErrorCode;
+import org.apache.atlas.exception.AtlasBaseException;
 import org.eclipse.jetty.server.Connector;
 import org.eclipse.jetty.server.HttpConfiguration;
 import org.eclipse.jetty.server.HttpConnectionFactory;
@@ -88,9 +90,13 @@ public class EmbeddedServer {
         return connector;
     }
 
-    public void start() throws Exception {
-        server.start();
-        server.join();
+    public void start() throws AtlasBaseException {
+        try {
+            server.start();
+            server.join();
+        } catch(Exception e) {
+            throw new AtlasBaseException(AtlasErrorCode.EMBEDDED_SERVER_START, e);
+        }
     }
 
     public void stop() {

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/f379c9ff/webapp/src/main/java/org/apache/atlas/web/setup/SetupSteps.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/setup/SetupSteps.java b/webapp/src/main/java/org/apache/atlas/web/setup/SetupSteps.java
index c97cc95..eadd5ce 100644
--- a/webapp/src/main/java/org/apache/atlas/web/setup/SetupSteps.java
+++ b/webapp/src/main/java/org/apache/atlas/web/setup/SetupSteps.java
@@ -119,11 +119,15 @@ public class SetupSteps {
     }
 
     private void clearSetupInProgress(HAConfiguration.ZookeeperProperties zookeeperProperties)
-            throws Exception {
+            throws SetupException {
         CuratorFramework client = curatorFactory.clientInstance();
         String path = lockPath(zookeeperProperties);
-        client.delete().forPath(path);
-        LOG.info("Deleted lock path after completing setup {}", path);
+        try {
+            client.delete().forPath(path);
+            LOG.info("Deleted lock path after completing setup {}", path);
+        } catch (Exception e) {
+            throw new SetupException(String.format("SetupSteps.clearSetupInProgress: Failed to get Zookeeper node patH: %s", path), e);
+        }
     }
 
     private String lockPath(HAConfiguration.ZookeeperProperties zookeeperProperties) {


Mime
View raw message