atlas-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From shweth...@apache.org
Subject [1/2] incubator-atlas git commit: ATLAS-835 Falcon Integration with Atlas (sowmyaramesh via shwethags)
Date Mon, 20 Jun 2016 06:54:44 GMT
Repository: incubator-atlas
Updated Branches:
  refs/heads/master 436a52451 -> e30ab3d8d


http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e30ab3d8/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/hook/FalconHookIT.java
----------------------------------------------------------------------
diff --git a/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/hook/FalconHookIT.java b/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/hook/FalconHookIT.java
index 7adccef..8a5736a 100644
--- a/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/hook/FalconHookIT.java
+++ b/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/hook/FalconHookIT.java
@@ -22,24 +22,31 @@ import com.sun.jersey.api.client.ClientResponse;
 import org.apache.atlas.ApplicationProperties;
 import org.apache.atlas.AtlasClient;
 import org.apache.atlas.AtlasServiceException;
+import org.apache.atlas.falcon.bridge.FalconBridge;
 import org.apache.atlas.falcon.model.FalconDataModelGenerator;
 import org.apache.atlas.falcon.model.FalconDataTypes;
+import org.apache.atlas.fs.model.FSDataTypes;
 import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
+import org.apache.atlas.hive.model.HiveDataTypes;
 import org.apache.atlas.typesystem.Referenceable;
 import org.apache.atlas.typesystem.persistence.Id;
+import org.apache.atlas.typesystem.types.TypeUtils;
+import org.apache.atlas.utils.ParamChecker;
 import org.apache.commons.configuration.Configuration;
 import org.apache.commons.lang.RandomStringUtils;
-import org.apache.falcon.atlas.service.AtlasService;
+import org.apache.atlas.falcon.service.AtlasService;
+import org.apache.falcon.entity.FeedHelper;
+import org.apache.falcon.entity.FileSystemStorage;
 import org.apache.falcon.entity.store.ConfigurationStore;
 import org.apache.falcon.entity.v0.Entity;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.entity.v0.feed.Feed;
+import org.apache.falcon.entity.v0.feed.Location;
+import org.apache.falcon.entity.v0.feed.LocationType;
 import org.apache.falcon.entity.v0.process.Process;
 import org.apache.falcon.security.CurrentUser;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONObject;
 import org.slf4j.Logger;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Test;
@@ -49,7 +56,7 @@ import java.util.List;
 
 import static org.testng.Assert.assertEquals;
 import static org.testng.Assert.assertNotNull;
-import static org.testng.Assert.assertNull;
+import static org.testng.Assert.fail;
 
 public class FalconHookIT {
     public static final Logger LOG = org.slf4j.LoggerFactory.getLogger(FalconHookIT.class);
@@ -57,6 +64,7 @@ public class FalconHookIT {
     public static final String CLUSTER_RESOURCE = "/cluster.xml";
     public static final String FEED_RESOURCE = "/feed.xml";
     public static final String FEED_HDFS_RESOURCE = "/feed-hdfs.xml";
+    public static final String FEED_REPLICATION_RESOURCE = "/feed-replication.xml";
     public static final String PROCESS_RESOURCE = "/process.xml";
 
     private AtlasClient atlasClient;
@@ -91,7 +99,7 @@ public class FalconHookIT {
 
     private boolean isDataModelAlreadyRegistered() throws Exception {
         try {
-            atlasClient.getType(FalconDataTypes.FALCON_PROCESS_ENTITY.getName());
+            atlasClient.getType(FalconDataTypes.FALCON_PROCESS.getName());
             LOG.info("Hive data model is already registered!");
             return true;
         } catch(AtlasServiceException ase) {
@@ -128,18 +136,19 @@ public class FalconHookIT {
         return String.format("catalog:%s:%s#ds=${YEAR}-${MONTH}-${DAY}-${HOUR}", dbName, tableName);
     }
 
-    @Test (enabled = true)
+    @Test
     public void testCreateProcess() throws Exception {
         Cluster cluster = loadEntity(EntityType.CLUSTER, CLUSTER_RESOURCE, "cluster" + random());
         STORE.publish(EntityType.CLUSTER, cluster);
+        assertClusterIsRegistered(cluster);
 
-        Feed infeed = getTableFeed(FEED_RESOURCE, cluster.getName());
-        String inTableName = getTableName(infeed);
-        String inDbName = getDBName(infeed);
+        Feed infeed = getTableFeed(FEED_RESOURCE, cluster.getName(), null);
+        String infeedId = atlasClient.getEntity(FalconDataTypes.FALCON_FEED.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
+                FalconBridge.getFeedQualifiedName(infeed.getName(), cluster.getName())).getId()._getId();
 
         Feed outfeed = getTableFeed(FEED_RESOURCE, cluster.getName());
-        String outTableName = getTableName(outfeed);
-        String outDbName = getDBName(outfeed);
+        String outFeedId = atlasClient.getEntity(FalconDataTypes.FALCON_FEED.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
+                FalconBridge.getFeedQualifiedName(outfeed.getName(), cluster.getName())).getId()._getId();
 
         Process process = loadEntity(EntityType.PROCESS, PROCESS_RESOURCE, "process" + random());
         process.getClusters().getClusters().get(0).setName(cluster.getName());
@@ -147,57 +156,140 @@ public class FalconHookIT {
         process.getOutputs().getOutputs().get(0).setFeed(outfeed.getName());
         STORE.publish(EntityType.PROCESS, process);
 
-        String pid = assertProcessIsRegistered(cluster.getName(), process.getName());
+        String pid = assertProcessIsRegistered(process, cluster.getName());
         Referenceable processEntity = atlasClient.getEntity(pid);
         assertNotNull(processEntity);
         assertEquals(processEntity.get(AtlasClient.NAME), process.getName());
+        assertEquals(((List<Id>)processEntity.get("inputs")).get(0)._getId(), infeedId);
+        assertEquals(((List<Id>)processEntity.get("outputs")).get(0)._getId(), outFeedId);
+    }
 
-        Id inId = (Id) ((List)processEntity.get("inputs")).get(0);
-        Referenceable inEntity = atlasClient.getEntity(inId._getId());
-        assertEquals(inEntity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
-                HiveMetaStoreBridge.getTableQualifiedName(cluster.getName(), inDbName, inTableName));
+    private String assertProcessIsRegistered(Process process, String clusterName) throws Exception {
+        return assertEntityIsRegistered(FalconDataTypes.FALCON_PROCESS.getName(),
+                AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
+                FalconBridge.getProcessQualifiedName(process.getName(), clusterName));
+    }
 
-        Id outId = (Id) ((List)processEntity.get("outputs")).get(0);
-        Referenceable outEntity = atlasClient.getEntity(outId._getId());
-        assertEquals(outEntity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
-                HiveMetaStoreBridge.getTableQualifiedName(cluster.getName(), outDbName, outTableName));
+    private String assertClusterIsRegistered(Cluster cluster) throws Exception {
+        return assertEntityIsRegistered(FalconDataTypes.FALCON_CLUSTER.getName(),
+                AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, cluster.getName());
+    }
+
+    private TypeUtils.Pair<String, Feed> getHDFSFeed(String feedResource, String clusterName) throws Exception {
+        Feed feed = loadEntity(EntityType.FEED, feedResource, "feed" + random());
+        org.apache.falcon.entity.v0.feed.Cluster feedCluster = feed.getClusters().getClusters().get(0);
+        feedCluster.setName(clusterName);
+        STORE.publish(EntityType.FEED, feed);
+        String feedId = assertFeedIsRegistered(feed, clusterName);
+
+        String processId = assertEntityIsRegistered(FalconDataTypes.FALCON_FEED_CREATION.getName(),
+                AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
+                FalconBridge.getFeedQualifiedName(feed.getName(), clusterName));
+        Referenceable processEntity = atlasClient.getEntity(processId);
+        assertEquals(((List<Id>)processEntity.get("outputs")).get(0).getId()._getId(), feedId);
+
+        String inputId = ((List<Id>) processEntity.get("inputs")).get(0).getId()._getId();
+        Referenceable pathEntity = atlasClient.getEntity(inputId);
+        assertEquals(pathEntity.getTypeName(), FSDataTypes.HDFS_PATH().toString());
+
+        List<Location> locations = FeedHelper.getLocations(feedCluster, feed);
+        Location dataLocation = FileSystemStorage.getLocation(locations, LocationType.DATA);
+        assertEquals(pathEntity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
+                FalconBridge.normalize(dataLocation.getPath()));
+
+        return TypeUtils.Pair.of(feedId, feed);
     }
 
     private Feed getTableFeed(String feedResource, String clusterName) throws Exception {
+        return getTableFeed(feedResource, clusterName, null);
+    }
+
+    private Feed getTableFeed(String feedResource, String clusterName, String secondClusterName) throws Exception {
         Feed feed = loadEntity(EntityType.FEED, feedResource, "feed" + random());
         org.apache.falcon.entity.v0.feed.Cluster feedCluster = feed.getClusters().getClusters().get(0);
         feedCluster.setName(clusterName);
-        feedCluster.getTable().setUri(getTableUri("db" + random(), "table" + random()));
+        String dbName = "db" + random();
+        String tableName = "table" + random();
+        feedCluster.getTable().setUri(getTableUri(dbName, tableName));
+
+        String dbName2 = "db" + random();
+        String tableName2 = "table" + random();
+
+        if (secondClusterName != null) {
+            org.apache.falcon.entity.v0.feed.Cluster feedCluster2 = feed.getClusters().getClusters().get(1);
+            feedCluster2.setName(secondClusterName);
+            feedCluster2.getTable().setUri(getTableUri(dbName2, tableName2));
+        }
+
         STORE.publish(EntityType.FEED, feed);
+        String feedId = assertFeedIsRegistered(feed, clusterName);
+        verifyFeedLineage(feed.getName(), clusterName, feedId, dbName, tableName);
+
+        if (secondClusterName != null) {
+            String feedId2 = assertFeedIsRegistered(feed, secondClusterName);
+            verifyFeedLineage(feed.getName(), secondClusterName, feedId2, dbName2, tableName2);
+        }
         return feed;
     }
 
-    private String getDBName(Feed feed) {
-        String uri = feed.getClusters().getClusters().get(0).getTable().getUri();
-        String[] parts = uri.split(":");
-        return parts[1];
+    private void verifyFeedLineage(String feedName, String clusterName, String feedId, String dbName, String tableName)
+            throws Exception{
+        //verify that lineage from hive table to falcon feed is created
+        String processId = assertEntityIsRegistered(FalconDataTypes.FALCON_FEED_CREATION.getName(),
+                AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
+                FalconBridge.getFeedQualifiedName(feedName, clusterName));
+        Referenceable processEntity = atlasClient.getEntity(processId);
+        assertEquals(((List<Id>)processEntity.get("outputs")).get(0).getId()._getId(), feedId);
+
+        String inputId = ((List<Id>) processEntity.get("inputs")).get(0).getId()._getId();
+        Referenceable tableEntity = atlasClient.getEntity(inputId);
+        assertEquals(tableEntity.getTypeName(), HiveDataTypes.HIVE_TABLE.getName());
+        assertEquals(tableEntity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
+                HiveMetaStoreBridge.getTableQualifiedName(clusterName, dbName, tableName));
+
     }
 
-    private String getTableName(Feed feed) {
-        String uri = feed.getClusters().getClusters().get(0).getTable().getUri();
-        String[] parts = uri.split(":");
-        parts = parts[2].split("#");
-        return parts[0];
+    private String assertFeedIsRegistered(Feed feed, String clusterName) throws Exception {
+        return assertEntityIsRegistered(FalconDataTypes.FALCON_FEED.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
+                FalconBridge.getFeedQualifiedName(feed.getName(), clusterName));
     }
 
-    @Test (enabled = true)
+    @Test
+    public void testReplicationFeed() throws Exception {
+        Cluster srcCluster = loadEntity(EntityType.CLUSTER, CLUSTER_RESOURCE, "cluster" + random());
+        STORE.publish(EntityType.CLUSTER, srcCluster);
+        assertClusterIsRegistered(srcCluster);
+
+        Cluster targetCluster = loadEntity(EntityType.CLUSTER, CLUSTER_RESOURCE, "cluster" + random());
+        STORE.publish(EntityType.CLUSTER, targetCluster);
+        assertClusterIsRegistered(targetCluster);
+
+        Feed feed = getTableFeed(FEED_REPLICATION_RESOURCE, srcCluster.getName(), targetCluster.getName());
+        String inId = atlasClient.getEntity(FalconDataTypes.FALCON_FEED.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
+                FalconBridge.getFeedQualifiedName(feed.getName(), srcCluster.getName())).getId()._getId();
+        String outId = atlasClient.getEntity(FalconDataTypes.FALCON_FEED.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
+                FalconBridge.getFeedQualifiedName(feed.getName(), targetCluster.getName())).getId()._getId();
+
+
+        String processId = assertEntityIsRegistered(FalconDataTypes.FALCON_FEED_REPLICATION.getName(),
+                AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, feed.getName());
+        Referenceable process = atlasClient.getEntity(processId);
+        assertEquals(((List<Id>)process.get("inputs")).get(0)._getId(), inId);
+        assertEquals(((List<Id>)process.get("outputs")).get(0)._getId(), outId);
+    }
+
+    @Test
     public void testCreateProcessWithHDFSFeed() throws Exception {
         Cluster cluster = loadEntity(EntityType.CLUSTER, CLUSTER_RESOURCE, "cluster" + random());
         STORE.publish(EntityType.CLUSTER, cluster);
 
-        Feed infeed = loadEntity(EntityType.FEED, FEED_HDFS_RESOURCE, "feed" + random());
-        org.apache.falcon.entity.v0.feed.Cluster feedCluster = infeed.getClusters().getClusters().get(0);
-        feedCluster.setName(cluster.getName());
-        STORE.publish(EntityType.FEED, infeed);
+        TypeUtils.Pair<String, Feed> result = getHDFSFeed(FEED_HDFS_RESOURCE, cluster.getName());
+        Feed infeed = result.right;
+        String infeedId = result.left;
 
         Feed outfeed = getTableFeed(FEED_RESOURCE, cluster.getName());
-        String outTableName = getTableName(outfeed);
-        String outDbName = getDBName(outfeed);
+        String outfeedId = atlasClient.getEntity(FalconDataTypes.FALCON_FEED.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
+                FalconBridge.getFeedQualifiedName(outfeed.getName(), cluster.getName())).getId()._getId();
 
         Process process = loadEntity(EntityType.PROCESS, PROCESS_RESOURCE, "process" + random());
         process.getClusters().getClusters().get(0).setName(cluster.getName());
@@ -205,65 +297,35 @@ public class FalconHookIT {
         process.getOutputs().getOutputs().get(0).setFeed(outfeed.getName());
         STORE.publish(EntityType.PROCESS, process);
 
-        String pid = assertProcessIsRegistered(cluster.getName(), process.getName());
+        String pid = assertProcessIsRegistered(process, cluster.getName());
         Referenceable processEntity = atlasClient.getEntity(pid);
         assertEquals(processEntity.get(AtlasClient.NAME), process.getName());
-        assertNull(processEntity.get("inputs"));
-
-        Id outId = (Id) ((List)processEntity.get("outputs")).get(0);
-        Referenceable outEntity = atlasClient.getEntity(outId._getId());
-        assertEquals(outEntity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
-                HiveMetaStoreBridge.getTableQualifiedName(cluster.getName(), outDbName, outTableName));
+        assertEquals(processEntity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
+                FalconBridge.getProcessQualifiedName(process.getName(), cluster.getName()));
+        assertEquals(((List<Id>)processEntity.get("inputs")).get(0)._getId(), infeedId);
+        assertEquals(((List<Id>)processEntity.get("outputs")).get(0)._getId(), outfeedId);
     }
 
-    //    @Test (enabled = true, dependsOnMethods = "testCreateProcess")
-//    public void testUpdateProcess() throws Exception {
-//        FalconEvent event = createProcessEntity(PROCESS_NAME_2, INPUT, OUTPUT);
-//        FalconEventPublisher.Data data = new FalconEventPublisher.Data(event);
-//        hook.publish(data);
-//        String id = assertProcessIsRegistered(CLUSTER_NAME, PROCESS_NAME_2);
-//        event = createProcessEntity(PROCESS_NAME_2, INPUT_2, OUTPUT_2);
-//        hook.publish(data);
-//        String id2 = assertProcessIsRegistered(CLUSTER_NAME, PROCESS_NAME_2);
-//        if (!id.equals(id2)) {
-//            throw new Exception("Id mismatch");
-//        }
-//    }
-
-    private String assertProcessIsRegistered(String clusterName, String processName) throws Exception {
-        String name = processName + "@" + clusterName;
-        LOG.debug("Searching for process {}", name);
-        String query = String.format("%s as t where %s = '%s' select t",
-                FalconDataTypes.FALCON_PROCESS_ENTITY.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
-        return assertEntityIsRegistered(query);
-    }
-
-    private String assertEntityIsRegistered(final String query) throws Exception {
-        waitFor(2000000, new Predicate() {
+    private String assertEntityIsRegistered(final String typeName, final String property, final String value) throws Exception {
+        waitFor(80000, new Predicate() {
             @Override
-            public boolean evaluate() throws Exception {
-                JSONArray results = atlasClient.search(query);
-                System.out.println(results);
-                return results.length() == 1;
+            public void evaluate() throws Exception {
+                Referenceable entity = atlasClient.getEntity(typeName, property, value);
+                assertNotNull(entity);
             }
         });
-
-        JSONArray results = atlasClient.search(query);
-        JSONObject row = results.getJSONObject(0).getJSONObject("t");
-
-        return row.getString("id");
+        Referenceable entity = atlasClient.getEntity(typeName, property, value);
+        return entity.getId()._getId();
     }
 
-
     public interface Predicate {
-
         /**
          * Perform a predicate evaluation.
          *
          * @return the boolean result of the evaluation.
          * @throws Exception thrown if the predicate evaluation could not evaluate.
          */
-        boolean evaluate() throws Exception;
+        void evaluate() throws Exception;
     }
 
     /**
@@ -273,16 +335,20 @@ public class FalconHookIT {
      * @param predicate predicate waiting on.
      */
     protected void waitFor(int timeout, Predicate predicate) throws Exception {
+        ParamChecker.notNull(predicate, "predicate");
         long mustEnd = System.currentTimeMillis() + timeout;
 
-        boolean eval;
-        while (!(eval = predicate.evaluate()) && System.currentTimeMillis() < mustEnd) {
-            LOG.info("Waiting up to {} msec", mustEnd - System.currentTimeMillis());
-            Thread.sleep(1000);
-        }
-        if (!eval) {
-            throw new Exception("Waiting timed out after " + timeout + " msec");
+        while (true) {
+            try {
+                predicate.evaluate();
+                return;
+            } catch(Error | Exception e) {
+                if (System.currentTimeMillis() >= mustEnd) {
+                    fail("Assertions failed. Failing after waiting for timeout " + timeout + " msecs", e);
+                }
+                LOG.debug("Waiting up to " + (mustEnd - System.currentTimeMillis()) + " msec as assertion failed", e);
+                Thread.sleep(400);
+            }
         }
     }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e30ab3d8/addons/falcon-bridge/src/test/resources/feed-replication.xml
----------------------------------------------------------------------
diff --git a/addons/falcon-bridge/src/test/resources/feed-replication.xml b/addons/falcon-bridge/src/test/resources/feed-replication.xml
new file mode 100644
index 0000000..dcd427b
--- /dev/null
+++ b/addons/falcon-bridge/src/test/resources/feed-replication.xml
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+  -->
+<feed description="test input" name="testinput" xmlns="uri:falcon:feed:0.1">
+    <groups>online,bi</groups>
+
+    <frequency>hours(1)</frequency>
+    <timezone>UTC</timezone>
+    <late-arrival cut-off="hours(3)"/>
+
+    <clusters>
+        <cluster name="testcluster" type="source">
+            <validity start="2010-01-01T00:00Z" end="2012-04-21T00:00Z"/>
+            <retention limit="hours(24)" action="delete"/>
+            <table uri="catalog:indb:intable#ds=${YEAR}-${MONTH}-${DAY}-${HOUR}" />
+        </cluster>
+        <cluster name="testcluster" type="target">
+            <validity start="2010-01-01T00:00Z" end="2012-04-21T00:00Z"/>
+            <retention limit="hours(24)" action="delete"/>
+            <table uri="catalog:outdb:outtable#ds=${YEAR}-${MONTH}-${DAY}-${HOUR}" />
+        </cluster>
+    </clusters>
+
+    <table uri="catalog:indb:unused#ds=${YEAR}-${MONTH}-${DAY}-${HOUR}" />
+
+    <ACL owner="testuser" group="group" permission="0x755"/>
+    <schema location="hcat" provider="hcat"/>
+</feed>

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e30ab3d8/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
index 23c82df..5d9950f 100755
--- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
+++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
@@ -51,11 +51,9 @@ import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.LogManager;
 import org.json.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import scala.tools.cmd.gen.AnyVals;
 
 import java.net.MalformedURLException;
 import java.util.ArrayList;
@@ -99,8 +97,6 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
     private static final long keepAliveTimeDefault = 10;
     private static final int queueSizeDefault = 10000;
 
-    private List<HookNotification.HookNotificationMessage> messages = new ArrayList<>();
-
     private static final HiveConf hiveConf;
 
     static {
@@ -266,7 +262,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
         default:
         }
 
-        notifyEntities(messages);
+        notifyEntities(event.getMessages());
     }
 
     private void deleteTable(HiveMetaStoreBridge dgiBridge, HiveEventContext event) {
@@ -280,7 +276,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
     private void deleteTable(HiveMetaStoreBridge dgiBridge, HiveEventContext event, WriteEntity output) {
         final String tblQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(), output.getTable());
         LOG.info("Deleting table {} ", tblQualifiedName);
-        messages.add(
+        event.addMessage(
             new HookNotification.EntityDeleteRequest(event.getUser(),
                 HiveDataTypes.HIVE_TABLE.getName(),
                 AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
@@ -297,7 +293,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
                 deleteTable(dgiBridge, event, output);
             } else if (Type.DATABASE.equals(output.getType())) {
                 final String dbQualifiedName = HiveMetaStoreBridge.getDBQualifiedName(dgiBridge.getClusterName(), output.getDatabase().getName());
-                messages.add(
+                event.addMessage(
                     new HookNotification.EntityDeleteRequest(event.getUser(),
                         HiveDataTypes.HIVE_DB.getName(),
                         AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
@@ -348,7 +344,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
         for(WriteEntity writeEntity : event.getOutputs()){
             if (writeEntity.getType() == Type.TABLE){
                 Table newTable = writeEntity.getTable();
-                createOrUpdateEntities(dgiBridge, event.getUser(), writeEntity, true, oldTable);
+                createOrUpdateEntities(dgiBridge, event, writeEntity, true, oldTable);
                 final String newQualifiedTableName = dgiBridge.getTableQualifiedName(dgiBridge.getClusterName(),
                         newTable);
                 String oldColumnQFName = HiveMetaStoreBridge.getColumnQualifiedName(newQualifiedTableName, oldColName);
@@ -356,7 +352,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
                 Referenceable newColEntity = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
                 newColEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, newColumnQFName);
 
-                messages.add(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
+                event.addMessage(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
                         HiveDataTypes.HIVE_COLUMN.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
                         oldColumnQFName, newColEntity));
             }
@@ -385,7 +381,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
 
                     //Create/update old table entity - create entity with oldQFNme and old tableName if it doesnt exist. If exists, will update
                     //We always use the new entity while creating the table since some flags, attributes of the table are not set in inputEntity and Hive.getTable(oldTableName) also fails since the table doesnt exist in hive anymore
-                    final LinkedHashMap<Type, Referenceable> tables = createOrUpdateEntities(dgiBridge, event.getUser(), writeEntity, true);
+                    final LinkedHashMap<Type, Referenceable> tables = createOrUpdateEntities(dgiBridge, event, writeEntity, true);
                     Referenceable tableEntity = tables.get(Type.TABLE);
 
                     //Reset regular column QF Name to old Name and create a new partial notification request to replace old column QFName to newName to retain any existing traits
@@ -398,13 +394,13 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
                     replaceSDQFName(event, tableEntity, oldQualifiedName, newQualifiedName);
 
                     //Reset Table QF Name to old Name and create a new partial notification request to replace old Table QFName to newName
-                    replaceTableQFName(dgiBridge, event, oldTable, newTable, tableEntity, oldQualifiedName, newQualifiedName);
+                    replaceTableQFName(event, oldTable, newTable, tableEntity, oldQualifiedName, newQualifiedName);
                 }
             }
         }
     }
 
-    private Referenceable replaceTableQFName(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Table oldTable, Table newTable, final Referenceable tableEntity, final String oldTableQFName, final String newTableQFName) throws HiveException {
+    private Referenceable replaceTableQFName(HiveEventContext event, Table oldTable, Table newTable, final Referenceable tableEntity, final String oldTableQFName, final String newTableQFName) throws HiveException {
         tableEntity.set(HiveDataModelGenerator.NAME,  oldTable.getTableName().toLowerCase());
         tableEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, oldTableQFName);
 
@@ -416,7 +412,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
         ArrayList<String> alias_list = new ArrayList<>();
         alias_list.add(oldTable.getTableName().toLowerCase());
         newEntity.set(HiveDataModelGenerator.TABLE_ALIAS_LIST, alias_list);
-        messages.add(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
+        event.addMessage(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
             HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
             oldTableQFName, newEntity));
 
@@ -434,7 +430,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
             Referenceable newColEntity = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
             ///Only QF Name changes
             newColEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, newColumnQFName);
-            messages.add(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
+            event.addMessage(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
                 HiveDataTypes.HIVE_COLUMN.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
                 oldColumnQFName, newColEntity));
             newColEntities.add(newColEntity);
@@ -453,14 +449,14 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
 
         final Referenceable newSDEntity = new Referenceable(HiveDataTypes.HIVE_STORAGEDESC.getName());
         newSDEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, newSDQFName);
-        messages.add(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
+        event.addMessage(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
             HiveDataTypes.HIVE_STORAGEDESC.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
             oldSDQFName, newSDEntity));
 
         return newSDEntity;
     }
 
-    private LinkedHashMap<Type, Referenceable> createOrUpdateEntities(HiveMetaStoreBridge dgiBridge, String user, Entity entity, boolean skipTempTables, Table existTable) throws Exception {
+    private LinkedHashMap<Type, Referenceable> createOrUpdateEntities(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Entity entity, boolean skipTempTables, Table existTable) throws Exception {
         Database db = null;
         Table table = null;
         Partition partition = null;
@@ -513,18 +509,18 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
         }
 
 
-        messages.add(new HookNotification.EntityUpdateRequest(user, entities));
+        event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), entities));
         return result;
     }
 
-    private LinkedHashMap<Type, Referenceable> createOrUpdateEntities(HiveMetaStoreBridge dgiBridge, String user, Entity entity, boolean skipTempTables) throws Exception{
-        return createOrUpdateEntities(dgiBridge, user, entity, skipTempTables, null);
+    private LinkedHashMap<Type, Referenceable> createOrUpdateEntities(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Entity entity, boolean skipTempTables) throws Exception{
+        return createOrUpdateEntities(dgiBridge, event, entity, skipTempTables, null);
     }
 
     private LinkedHashMap<Type, Referenceable> handleEventOutputs(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Type entityType) throws Exception {
         for (Entity entity : event.getOutputs()) {
             if (entity.getType() == entityType) {
-                return createOrUpdateEntities(dgiBridge, event.getUser(), entity, true);
+                return createOrUpdateEntities(dgiBridge, event, entity, true);
             }
         }
         return null;
@@ -602,7 +598,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
                     }});
 
                 entities.add(processReferenceable);
-                messages.add(new HookNotification.EntityUpdateRequest(event.getUser(), new ArrayList<Referenceable>(entities)));
+                event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), new ArrayList<>(entities)));
             } else {
                 LOG.info("Skipped query {} since it has no getInputs() or resulting getOutputs()", event.getQueryStr());
             }
@@ -615,7 +611,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
         if (entity.getType() == Type.TABLE || entity.getType() == Type.PARTITION) {
             final String tblQFName = dgiBridge.getTableQualifiedName(dgiBridge.getClusterName(), entity.getTable());
             if (!dataSets.containsKey(tblQFName)) {
-                LinkedHashMap<Type, Referenceable> result = createOrUpdateEntities(dgiBridge, event.getUser(), entity, false);
+                LinkedHashMap<Type, Referenceable> result = createOrUpdateEntities(dgiBridge, event, entity, false);
                 dataSets.put(tblQFName, result.get(Type.TABLE));
                 entities.addAll(result.values());
             }
@@ -684,7 +680,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
             }
             entities.addAll(tables.values());
             entities.add(processReferenceable);
-            messages.add(new HookNotification.EntityUpdateRequest(event.getUser(), entities));
+            event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), entities));
         }
     }
 
@@ -771,6 +767,8 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
 
         private String queryType;
 
+        List<HookNotification.HookNotificationMessage> messages = new ArrayList<>();
+
         public void setInputs(Set<ReadEntity> inputs) {
             this.inputs = inputs;
         }
@@ -859,5 +857,12 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
             return queryType;
         }
 
+        public void addMessage(HookNotification.HookNotificationMessage message) {
+            messages.add(message);
+        }
+
+        public List<HookNotification.HookNotificationMessage> getMessages() {
+            return messages;
+        }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e30ab3d8/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
index e7fbf71..5a175e7 100755
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
@@ -1538,9 +1538,6 @@ public class HiveHookIT {
         return assertTableIsRegistered(dbName, tableName, null, false);
     }
 
-    private String assertTableIsRegistered(String dbName, String tableName, boolean isTemporary) throws Exception {
-        return assertTableIsRegistered(dbName, tableName, null, isTemporary);
-    }
 
     private String assertTableIsRegistered(String dbName, String tableName, AssertPredicate assertPredicate, boolean isTemporary) throws Exception {
         LOG.debug("Searching for table {}.{}", dbName, tableName);

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e30ab3d8/distro/src/conf/atlas-log4j.xml
----------------------------------------------------------------------
diff --git a/distro/src/conf/atlas-log4j.xml b/distro/src/conf/atlas-log4j.xml
index 600b4f1..e14afa3 100755
--- a/distro/src/conf/atlas-log4j.xml
+++ b/distro/src/conf/atlas-log4j.xml
@@ -23,23 +23,21 @@
     <appender name="console" class="org.apache.log4j.ConsoleAppender">
         <param name="Target" value="System.out"/>
         <layout class="org.apache.log4j.PatternLayout">
-            <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/>
+            <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%C{1}:%L)%n"/>
         </layout>
     </appender>
 
     <appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender">
         <param name="File" value="${atlas.log.dir}/${atlas.log.file}"/>
         <param name="Append" value="true"/>
-        <param name="Threshold" value="info"/>
         <layout class="org.apache.log4j.PatternLayout">
-            <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/>
+            <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%C{1}:%L)%n"/>
         </layout>
     </appender>
 
     <appender name="AUDIT" class="org.apache.log4j.DailyRollingFileAppender">
         <param name="File" value="${atlas.log.dir}/audit.log"/>
         <param name="Append" value="true"/>
-        <param name="Threshold" value="info"/>
         <layout class="org.apache.log4j.PatternLayout">
             <param name="ConversionPattern" value="%d %x %m%n"/>
         </layout>
@@ -55,6 +53,12 @@
         <appender-ref ref="FILE"/>
     </logger>
 
+    <!-- to avoid logs - The configuration log.flush.interval.messages = 1 was supplied but isn't a known config -->
+    <logger name="org.apache.kafka.common.config.AbstractConfig" additivity="false">
+        <level value="error"/>
+        <appender-ref ref="FILE"/>
+    </logger>
+
     <logger name="AUDIT" additivity="false">
         <level value="info"/>
         <appender-ref ref="AUDIT"/>

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e30ab3d8/docs/src/site/twiki/Bridge-Falcon.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/Bridge-Falcon.twiki b/docs/src/site/twiki/Bridge-Falcon.twiki
index 4f5e676..76df60f 100644
--- a/docs/src/site/twiki/Bridge-Falcon.twiki
+++ b/docs/src/site/twiki/Bridge-Falcon.twiki
@@ -3,18 +3,26 @@
 ---++ Falcon Model
 The default falcon modelling is available in org.apache.atlas.falcon.model.FalconDataModelGenerator. It defines the following types:
 <verbatim>
-falcon_process(ClassType) - super types [Process] - attributes [timestamp, owned-by, tags]
+falcon_cluster(ClassType) - super types [Infrastructure] - attributes [timestamp, colo, owner, tags]
+falcon_feed(ClassType) - super types [DataSet] - attributes [timestamp, stored-in, owner, groups, tags]
+falcon_feed_creation(ClassType) - super types [Process] - attributes [timestamp, stored-in, owner]
+falcon_feed_replication(ClassType) - super types [Process] - attributes [timestamp, owner]
+falcon_process(ClassType) - super types [Process] - attributes [timestamp, runs-on, owner, tags, pipelines, workflow-properties]
 </verbatim>
 
 One falcon_process entity is created for every cluster that the falcon process is defined for.
 
-The entities are created and de-duped using unique qualified name. They provide namespace and can be used for querying/lineage as well. The unique attributes are:
-   * falcon_process - attribute name - <process name>@<cluster name>
+The entities are created and de-duped using unique qualifiedName attribute. They provide namespace and can be used for querying/lineage as well. The unique attributes are:
+   * falcon_process - <process name>@<cluster name>
+   * falcon_cluster - <cluster name>
+   * falcon_feed - <feed name>@<cluster name>
+   * falcon_feed_creation - <feed name>
+   * falcon_feed_replication - <feed name>
 
 ---++ Falcon Hook
 Falcon supports listeners on falcon entity submission. This is used to add entities in Atlas using the model defined in org.apache.atlas.falcon.model.FalconDataModelGenerator.
 The hook submits the request to a thread pool executor to avoid blocking the command execution. The thread submits the entities as message to the notification server and atlas server reads these messages and registers the entities.
-   * Add 'org.apache.falcon.atlas.service.AtlasService' to application.services in <falcon-conf>/startup.properties
+   * Add 'org.apache.atlas.falcon.service.AtlasService' to application.services in <falcon-conf>/startup.properties
    * Link falcon hook jars in falcon classpath - 'ln -s <atlas-home>/hook/falcon/* <falcon-home>/server/webapp/falcon/WEB-INF/lib/'
    * In <falcon_conf>/falcon-env.sh, set an environment variable as follows:
      <verbatim>
@@ -33,5 +41,4 @@ Refer [[Configuration][Configuration]] for notification related configurations
 
 
 ---++ Limitations
-   * Only the process entity creation is currently handled. This model will be expanded to include all Falcon metadata
    * In falcon cluster entity, cluster name used should be uniform across components like hive, falcon, sqoop etc. If used with ambari, ambari cluster name should be used for cluster entity

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e30ab3d8/notification/src/main/java/org/apache/atlas/hook/AtlasHook.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/hook/AtlasHook.java b/notification/src/main/java/org/apache/atlas/hook/AtlasHook.java
index 2ca8d85..8bbe2d7 100644
--- a/notification/src/main/java/org/apache/atlas/hook/AtlasHook.java
+++ b/notification/src/main/java/org/apache/atlas/hook/AtlasHook.java
@@ -115,8 +115,11 @@ public abstract class AtlasHook {
     static void notifyEntitiesInternal(List<HookNotification.HookNotificationMessage> messages, int maxRetries,
                                        NotificationInterface notificationInterface,
                                        boolean shouldLogFailedMessages, FailedMessagesLogger logger) {
-        final String message = messages.toString();
+        if (messages == null || messages.isEmpty()) {
+            return;
+        }
 
+        final String message = messages.toString();
         int numRetries = 0;
         while (true) {
             try {

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e30ab3d8/notification/src/test/java/org/apache/atlas/hook/AtlasHookTest.java
----------------------------------------------------------------------
diff --git a/notification/src/test/java/org/apache/atlas/hook/AtlasHookTest.java b/notification/src/test/java/org/apache/atlas/hook/AtlasHookTest.java
index 9854bcc..d59cb1c 100644
--- a/notification/src/test/java/org/apache/atlas/hook/AtlasHookTest.java
+++ b/notification/src/test/java/org/apache/atlas/hook/AtlasHookTest.java
@@ -61,7 +61,11 @@ public class AtlasHookTest {
 
     @Test
     public void testNotifyEntitiesRetriesOnException() throws NotificationException {
-        List<HookNotification.HookNotificationMessage> hookNotificationMessages = new ArrayList<>();
+        List<HookNotification.HookNotificationMessage> hookNotificationMessages =
+                new ArrayList<HookNotification.HookNotificationMessage>() {{
+                    add(new HookNotification.EntityCreateRequest("user"));
+                }
+            };
         doThrow(new NotificationException(new Exception())).when(notificationInterface)
                 .send(NotificationInterface.NotificationType.HOOK, hookNotificationMessages);
         AtlasHook.notifyEntitiesInternal(hookNotificationMessages, 2, notificationInterface, false,
@@ -73,7 +77,11 @@ public class AtlasHookTest {
 
     @Test
     public void testFailedMessageIsLoggedIfRequired() throws NotificationException {
-        List<HookNotification.HookNotificationMessage> hookNotificationMessages = new ArrayList<>();
+        List<HookNotification.HookNotificationMessage> hookNotificationMessages =
+                new ArrayList<HookNotification.HookNotificationMessage>() {{
+                    add(new HookNotification.EntityCreateRequest("user"));
+                }
+            };
         doThrow(new NotificationException(new Exception(), Arrays.asList("test message")))
                 .when(notificationInterface)
                 .send(NotificationInterface.NotificationType.HOOK, hookNotificationMessages);
@@ -97,7 +105,11 @@ public class AtlasHookTest {
 
     @Test
     public void testAllFailedMessagesAreLogged() throws NotificationException {
-        List<HookNotification.HookNotificationMessage> hookNotificationMessages = new ArrayList<>();
+        List<HookNotification.HookNotificationMessage> hookNotificationMessages =
+                new ArrayList<HookNotification.HookNotificationMessage>() {{
+                    add(new HookNotification.EntityCreateRequest("user"));
+                }
+            };
         doThrow(new NotificationException(new Exception(), Arrays.asList("test message1", "test message2")))
                 .when(notificationInterface)
                 .send(NotificationInterface.NotificationType.HOOK, hookNotificationMessages);

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e30ab3d8/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index 7c2a583..ab40d1d 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -3,6 +3,7 @@ Apache Atlas Release Notes
 
 --trunk - unreleased
 INCOMPATIBLE CHANGES:
+ATLAS-835 Falcon Integration with Atlas (sowmyaramesh via shwethags)
 ATLAS-912 Update to use Kafka 0.10.0.0 (from 0.9.0.0) (madhan.neethiraj via yhemanth)
 ATLAS-542 Make qualifiedName and name consistent across all Datasets and Process (sumasai via yhemanth)
 ATLAS-716 Entity update/delete notifications (shwethags)

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e30ab3d8/typesystem/src/main/resources/atlas-application.properties
----------------------------------------------------------------------
diff --git a/typesystem/src/main/resources/atlas-application.properties b/typesystem/src/main/resources/atlas-application.properties
index 11253e6..2bbcdcd 100644
--- a/typesystem/src/main/resources/atlas-application.properties
+++ b/typesystem/src/main/resources/atlas-application.properties
@@ -101,3 +101,7 @@ atlas.jaas.KafkaClient.option.principal = atlas/_HOST@EXAMPLE.COM
 atlas.server.ha.enabled=false
 #atlas.server.ids=id1
 #atlas.server.address.id1=localhost:21000
+
+#########POLICY FILE PATH #########
+atlas.auth.policy.file=${sys:user.dir}/distro/src/conf/policy-store.txt
+

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e30ab3d8/typesystem/src/main/resources/atlas-log4j.xml
----------------------------------------------------------------------
diff --git a/typesystem/src/main/resources/atlas-log4j.xml b/typesystem/src/main/resources/atlas-log4j.xml
index 2bb49d3..1225db2 100755
--- a/typesystem/src/main/resources/atlas-log4j.xml
+++ b/typesystem/src/main/resources/atlas-log4j.xml
@@ -23,7 +23,7 @@
     <appender name="console" class="org.apache.log4j.ConsoleAppender">
         <param name="Target" value="System.out"/>
         <layout class="org.apache.log4j.PatternLayout">
-            <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/>
+            <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%C{1}:%L)%n"/>
         </layout>
     </appender>
 
@@ -43,7 +43,7 @@
 
     <logger name="com.thinkaurelius.titan" additivity="false">
         <level value="info"/>
-        <appender-ref ref="FILE"/>
+        <appender-ref ref="console"/>
     </logger>
 
     <logger name="AUDIT">

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/e30ab3d8/webapp/src/main/java/org/apache/atlas/web/filters/AuditFilter.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/filters/AuditFilter.java b/webapp/src/main/java/org/apache/atlas/web/filters/AuditFilter.java
index eeaddd6..866277f 100755
--- a/webapp/src/main/java/org/apache/atlas/web/filters/AuditFilter.java
+++ b/webapp/src/main/java/org/apache/atlas/web/filters/AuditFilter.java
@@ -73,7 +73,7 @@ public class AuditFilter implements Filter {
             // put the request id into the response so users can trace logs for this request
             ((HttpServletResponse) response).setHeader(AtlasClient.REQUEST_ID, requestId);
             currentThread.setName(oldName);
-            RequestContext.clear();;
+            RequestContext.clear();
         }
     }
 
@@ -88,7 +88,7 @@ public class AuditFilter implements Filter {
         final String whatURL = Servlets.getRequestURL(httpRequest);
         final String whatAddrs = httpRequest.getLocalAddr();
 
-        LOG.debug("Audit: {}/{} performed request {} {} ({}) at time {}", who, fromAddress, whatRequest, whatURL,
+        LOG.info("Audit: {}/{} performed request {} {} ({}) at time {}", who, fromAddress, whatRequest, whatURL,
                 whatAddrs, whenISO9601);
         audit(who, fromAddress, whatRequest, fromHost, whatURL, whatAddrs, whenISO9601);
     }


Mime
View raw message