eagle-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From yonzhang2...@apache.org
Subject [2/2] incubator-eagle git commit: EAGLE-469 make dataSource configuration extremely simple make dataSource configuration extremely simple
Date Tue, 16 Aug 2016 19:48:43 GMT
EAGLE-469 make dataSource configuration extremely simple
make dataSource configuration extremely simple

https://issues.apache.org/jira/browse/EAGLE-469

Author: @yonzhang <yonzhang2012@gmail.com>

Closes: #469


Project: http://git-wip-us.apache.org/repos/asf/incubator-eagle/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-eagle/commit/e383e879
Tree: http://git-wip-us.apache.org/repos/asf/incubator-eagle/tree/e383e879
Diff: http://git-wip-us.apache.org/repos/asf/incubator-eagle/diff/e383e879

Branch: refs/heads/develop
Commit: e383e879622b27bf43ff1be4fd062acda1cfbbd5
Parents: 58f66a8
Author: yonzhang <yonzhang2012@gmail.com>
Authored: Tue Aug 16 12:52:38 2016 -0700
Committer: yonzhang <yonzhang2012@gmail.com>
Committed: Tue Aug 16 12:52:38 2016 -0700

----------------------------------------------------------------------
 .../eagle-alert-service/pom.xml                 |   6 +
 .../eagle/common/config/EagleConfigHelper.java  |  20 +-
 .../impl/storm/kafka/KafkaSpoutProvider.java    |  44 ++--
 eagle-gc/pom.xml                                |   6 +
 .../org/apache/eagle/gc/GCLogApplication.java   |   1 +
 .../gc/executor/GCMetricGeneratorBolt.java      |   4 +-
 ...apache.eagle.gc.GCLogApplicationProvider.xml | 182 +++++++++++++++
 ....security.hbase.GCLogApplicationProvider.xml | 221 -------------------
 .../src/main/resources/application-gclog.conf   |  44 ++++
 eagle-gc/src/main/resources/application.conf    |  52 -----
 .../src/main/resources/gclog-init-sandbox.sh    |  71 ------
 eagle-gc/src/main/resources/log4j.properties    |  17 +-
 .../SecurityExternalMetadataResource.java       |  15 ++
 .../eagle/security/util/ExternalDataJoiner.java |   4 +-
 .../hbase/HBaseAuditLogApplication.java         |   3 +-
 .../HbaseResourceSensitivityPollingJob.java     |   9 +-
 ....security.hbase.HBaseAuditLogAppProvider.xml |   2 +-
 .../src/main/resources/application.conf         |  21 +-
 .../timer/FileSensitivityPollingJob.java        |   9 +-
 .../auditlog/timer/IPZonePollingJob.java        |   9 +-
 ...ecurity.auditlog.HdfsAuditLogAppProvider.xml | 129 ++++-------
 .../src/main/resources/application.conf         |  21 +-
 .../src/main/resources/application.conf         |  19 +-
 .../hive/HiveQueryMonitoringAppProvider.java    |  31 +++
 .../hive/HiveQueryMonitoringApplication.java    |  82 +++++++
 .../hive/jobrunning/HiveJobFetchSpout.java      |   7 +-
 .../HiveQueryMonitoringAppProvider.java         |  33 ---
 .../HiveQueryMonitoringApplication.java         |  80 -------
 .../HiveResourceSensitivityPollingJob.java      |   9 +-
 ...org.apache.eagle.app.spi.ApplicationProvider |   2 +-
 .../src/main/resources/application.conf         |  13 +-
 eagle-server/pom.xml                            |   7 +
 32 files changed, 507 insertions(+), 666 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-core/eagle-alert-parent/eagle-alert-service/pom.xml
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-alert-parent/eagle-alert-service/pom.xml b/eagle-core/eagle-alert-parent/eagle-alert-service/pom.xml
index 31d6d2b..81eddb1 100644
--- a/eagle-core/eagle-alert-parent/eagle-alert-service/pom.xml
+++ b/eagle-core/eagle-alert-parent/eagle-alert-service/pom.xml
@@ -35,6 +35,12 @@
   			<groupId>org.apache.eagle</groupId>
   			<artifactId>eagle-service-base</artifactId>
 			<version>${project.version}</version>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>log4j-over-slf4j</artifactId>
+				</exclusion>
+			</exclusions>
   		</dependency>
 	  	<dependency>
   			<groupId>org.wso2.siddhi</groupId>

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-core/eagle-common/src/main/java/org/apache/eagle/common/config/EagleConfigHelper.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-common/src/main/java/org/apache/eagle/common/config/EagleConfigHelper.java b/eagle-core/eagle-common/src/main/java/org/apache/eagle/common/config/EagleConfigHelper.java
index fe3e190..2f15cd6 100644
--- a/eagle-core/eagle-common/src/main/java/org/apache/eagle/common/config/EagleConfigHelper.java
+++ b/eagle-core/eagle-common/src/main/java/org/apache/eagle/common/config/EagleConfigHelper.java
@@ -24,28 +24,20 @@ import com.typesafe.config.Config;
 public class EagleConfigHelper {
 
     public static String getServiceHost(Config config) {
-        return config.getString(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.HOST);
+        return config.getString(EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.HOST);
     }
 
     public static int getServicePort(Config config) {
-        return config.getInt(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.PORT);
+        return config.getInt(EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.PORT);
     }
 
     public static String getServiceUser(Config config) {
-        return config.hasPath(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.USERNAME) ?
-               config.getString(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.USERNAME) : null;
+        return config.hasPath(EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.USERNAME) ?
+               config.getString(EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.USERNAME) : null;
     }
 
     public static String getServicePassword(Config config) {
-        return config.hasPath(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.PASSWORD) ?
-                config.getString(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.PASSWORD) : null;
-    }
-
-    public static String getSite(Config config) {
-        return config.getString(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.SITE);
-    }
-
-    public static String getApplication(Config config) {
-        return config.getString(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.APPLICATION);
+        return config.hasPath(EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.PASSWORD) ?
+                config.getString(EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.PASSWORD) : null;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-core/eagle-data-process/eagle-stream-process-api/src/main/java/org/apache/eagle/dataproc/impl/storm/kafka/KafkaSpoutProvider.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-data-process/eagle-stream-process-api/src/main/java/org/apache/eagle/dataproc/impl/storm/kafka/KafkaSpoutProvider.java b/eagle-core/eagle-data-process/eagle-stream-process-api/src/main/java/org/apache/eagle/dataproc/impl/storm/kafka/KafkaSpoutProvider.java
index 2d2936c..b9d998d 100644
--- a/eagle-core/eagle-data-process/eagle-stream-process-api/src/main/java/org/apache/eagle/dataproc/impl/storm/kafka/KafkaSpoutProvider.java
+++ b/eagle-core/eagle-data-process/eagle-stream-process-api/src/main/java/org/apache/eagle/dataproc/impl/storm/kafka/KafkaSpoutProvider.java
@@ -33,15 +33,21 @@ import storm.kafka.KafkaSpout;
 import storm.kafka.SpoutConfig;
 import storm.kafka.ZkHosts;
 
+import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
 
 /**
  * Since 6/8/16.
  */
 public class KafkaSpoutProvider implements StormSpoutProvider {
     private final static Logger LOG = LoggerFactory.getLogger(KafkaSpoutProvider.class);
+    private final static String DEFAULT_CONFIG_PREFIX = "dataSourceConfig";
+    private final static String DEFAULT_CONSUMER_GROUP_ID = "eagleConsumer";
+    private final static String DEFAULT_TRANSACTION_ZK_ROOT = "/consumers";
 
-    private String configPrefix = "dataSourceConfig";
+    private String configPrefix = DEFAULT_CONFIG_PREFIX;
 
     public KafkaSpoutProvider(){}
 
@@ -53,24 +59,28 @@ public class KafkaSpoutProvider implements StormSpoutProvider {
     public BaseRichSpout getSpout(Config config){
         Config context = config;
         if(this.configPrefix!=null) context = config.getConfig(configPrefix);
+
+        // the following is for fetching data from one topic
         // Kafka topic
         String topic = context.getString("topic");
-        // Kafka consumer group id
-        String groupId = context.getString("consumerGroupId");
-        // Kafka fetch size
-        int fetchSize = context.getInt("fetchSize");
         // Kafka broker zk connection
         String zkConnString = context.getString("zkConnection");
-        // transaction zkRoot
-        String zkRoot = context.getString("transactionZKRoot");
-
-        LOG.info(String.format("Use topic id: %s",topic));
+        // Kafka fetch size
+        int fetchSize = context.hasPath("fetchSize") ? context.getInt("fetchSize") : 1048586;
+        LOG.info(String.format("Use topic : %s, zkConnection : %s , fetchSize : %d", topic, zkConnString, fetchSize));
 
-        String brokerZkPath = null;
-        if(context.hasPath("brokerZkPath")) {
-            brokerZkPath = context.getString("brokerZkPath");
-        }
+        /*
+         the following is for recording offset for processing the data
+         the zk path to store current offset is comprised of the following
+         offset zkPath = zkRoot + "/" + topic + "/" + consumerGroupId + "/" + partition_Id
 
+         consumerGroupId is for differentiating different consumers which consume the same topic
+        */
+        // transaction zkRoot
+        String zkRoot = context.hasPath("transactionZKRoot") ? context.getString("transactionZKRoot") : DEFAULT_TRANSACTION_ZK_ROOT;
+        // Kafka consumer group id
+        String groupId = context.hasPath("consumerGroupId") ? context.getString("consumerGroupId") : DEFAULT_CONSUMER_GROUP_ID;
+        String brokerZkPath = context.hasPath("brokerZkPath") ? context.getString("brokerZkPath") : null;
         BrokerHosts hosts;
         if(brokerZkPath == null) {
             hosts = new ZkHosts(zkConnString);
@@ -84,11 +94,13 @@ public class KafkaSpoutProvider implements StormSpoutProvider {
                 groupId);
 
         // transaction zkServers
-        spoutConfig.zkServers = Arrays.asList(context.getString("transactionZKServers").split(","));
+        String[] txZkServers = context.hasPath("txZkServers") ? context.getString("txZkServers").split(",") : new String[]{"localhost:2181"};
+        spoutConfig.zkServers = Arrays.asList(txZkServers).stream().map(server -> server.split(":")[0]).collect(Collectors.toList());
         // transaction zkPort
-        spoutConfig.zkPort = context.getInt("transactionZKPort");
+        spoutConfig.zkPort = Integer.parseInt(txZkServers[0].split(":")[1]);
+        LOG.info("txZkServers:" + spoutConfig.zkServers + ", zkPort:" + spoutConfig.zkPort);
         // transaction update interval
-        spoutConfig.stateUpdateIntervalMs = context.getLong("transactionStateUpdateMS");
+        spoutConfig.stateUpdateIntervalMs = context.hasPath("transactionStateUpdateMS") ? context.getLong("transactionStateUpdateMS") : 2000;
         // Kafka fetch size
         spoutConfig.fetchSizeBytes = fetchSize;
         // "startOffsetTime" is for test usage, prod should not use this

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-gc/pom.xml
----------------------------------------------------------------------
diff --git a/eagle-gc/pom.xml b/eagle-gc/pom.xml
index be38567..653cdff 100644
--- a/eagle-gc/pom.xml
+++ b/eagle-gc/pom.xml
@@ -35,6 +35,12 @@
       <groupId>org.apache.eagle</groupId>
       <artifactId>eagle-metric</artifactId>
       <version>${project.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.slf4j</groupId>
+          <artifactId>log4j-over-slf4j</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.eagle</groupId>

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-gc/src/main/java/org/apache/eagle/gc/GCLogApplication.java
----------------------------------------------------------------------
diff --git a/eagle-gc/src/main/java/org/apache/eagle/gc/GCLogApplication.java b/eagle-gc/src/main/java/org/apache/eagle/gc/GCLogApplication.java
index e2ac91a..a3052d8 100644
--- a/eagle-gc/src/main/java/org/apache/eagle/gc/GCLogApplication.java
+++ b/eagle-gc/src/main/java/org/apache/eagle/gc/GCLogApplication.java
@@ -72,6 +72,7 @@ public class GCLogApplication extends StormApplication{
     }
 
     public static void main(String[] args){
+        System.setProperty("config.resource", "/application-gclog.conf");
         Config config = ConfigFactory.load();
         GCLogApplication app = new GCLogApplication();
         app.run(config);

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-gc/src/main/java/org/apache/eagle/gc/executor/GCMetricGeneratorBolt.java
----------------------------------------------------------------------
diff --git a/eagle-gc/src/main/java/org/apache/eagle/gc/executor/GCMetricGeneratorBolt.java b/eagle-gc/src/main/java/org/apache/eagle/gc/executor/GCMetricGeneratorBolt.java
index 2d1023b..acb442e 100644
--- a/eagle-gc/src/main/java/org/apache/eagle/gc/executor/GCMetricGeneratorBolt.java
+++ b/eagle-gc/src/main/java/org/apache/eagle/gc/executor/GCMetricGeneratorBolt.java
@@ -67,8 +67,8 @@ public class GCMetricGeneratorBolt extends BaseRichBolt {
         String password = EagleConfigHelper.getServicePassword(config);
         listener = new EagleServiceReporterMetricListener(host, port, username, password);
         dimensions = new HashMap<>();
-        dimensions.put(EagleConfigConstants.SITE, EagleConfigHelper.getSite(config));
-        dimensions.put(EagleConfigConstants.APPLICATION, EagleConfigHelper.getApplication(config));
+        dimensions.put(EagleConfigConstants.SITE, config.getString("siteId"));
+        dimensions.put(EagleConfigConstants.APPLICATION, config.getString("appId"));
         gcPausedTimeMetricName = MetricKeyCodeDecoder.codeMetricKey(GCConstants.GC_PAUSE_TIME_METRIC_NAME, dimensions);
 
         this.collector = collector;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-gc/src/main/resources/META-INF/providers/org.apache.eagle.gc.GCLogApplicationProvider.xml
----------------------------------------------------------------------
diff --git a/eagle-gc/src/main/resources/META-INF/providers/org.apache.eagle.gc.GCLogApplicationProvider.xml b/eagle-gc/src/main/resources/META-INF/providers/org.apache.eagle.gc.GCLogApplicationProvider.xml
new file mode 100644
index 0000000..453c489
--- /dev/null
+++ b/eagle-gc/src/main/resources/META-INF/providers/org.apache.eagle.gc.GCLogApplicationProvider.xml
@@ -0,0 +1,182 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+  ~ /*
+  ~  *
+  ~  *    Licensed to the Apache Software Foundation (ASF) under one or more
+  ~  *    contributor license agreements.  See the NOTICE file distributed with
+  ~  *    this work for additional information regarding copyright ownership.
+  ~  *    The ASF licenses this file to You under the Apache License, Version 2.0
+  ~  *    (the "License"); you may not use this file except in compliance with
+  ~  *    the License.  You may obtain a copy of the License at
+  ~  *
+  ~  *       http://www.apache.org/licenses/LICENSE-2.0
+  ~  *
+  ~  *    Unless required by applicable law or agreed to in writing, software
+  ~  *    distributed under the License is distributed on an "AS IS" BASIS,
+  ~  *    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~  *    See the License for the specific language governing permissions and
+  ~  *    limitations under the License.
+  ~  *
+  ~  */
+  -->
+
+<application>
+    <type>GCLogApplication</type>
+    <name>GC Log Monitoring Application</name>
+    <version>0.5.0-incubating</version>
+    <appClass>org.apache.eagle.gc.GCLogApplication</appClass>
+    <viewPath>/apps/example</viewPath>
+    <configuration>
+        <!-- topology level configurations -->
+        <property>
+            <name>topology.numOfSpoutTasks</name>
+            <displayName>topology.numOfSpoutTasks</displayName>
+            <value>2</value>
+            <description>number of spout tasks</description>
+        </property>
+        <property>
+            <name>topology.numOfAnalyzerTasks</name>
+            <displayName>topology.numOfAnalyzerTasks</displayName>
+            <value>2</value>
+            <description>number of analyzer tasks</description>
+        </property>
+        <property>
+            <name>topology.numOfGeneratorTasks</name>
+            <displayName>topology.numOfGeneratorTasks</displayName>
+            <value>2</value>
+            <description>number of generator tasks</description>
+        </property>
+        <property>
+            <name>topology.numOfSinkTasks</name>
+            <displayName>topology.numOfSinkTasks</displayName>
+            <value>2</value>
+            <description>number of sink tasks</description>
+        </property>
+
+        <!-- data source configuration -->
+        <property>
+            <name>dataSourceConfig.topic</name>
+            <displayName>dataSourceConfig.topic</displayName>
+            <value>gc_log</value>
+            <description>data source topic</description>
+        </property>
+        <property>
+            <name>dataSourceConfig.zkConnection</name>
+            <displayName>dataSourceConfig.zkConnection</displayName>
+            <value>server.eagle.apache.org:2181</value>
+            <description>zk connection</description>
+        </property>
+        <property>
+            <name>dataSourceConfig.txZkServers</name>
+            <displayName>dataSourceConfig.txZkServers</displayName>
+            <value>server.eagle.apache.org:2181</value>
+            <description>zookeeper server for offset transaction</description>
+        </property>
+        <property>
+            <name>dataSourceConfig.transactionZKPort</name>
+            <displayName>dataSourceConfig.transactionZKPort</displayName>
+            <value>2181</value>
+            <description>zookeeper server port for offset transaction</description>
+        </property>
+        <property>
+            <name>dataSourceConfig.schemeCls</name>
+            <displayName>dataSourceConfig.schemeCls</displayName>
+            <value>storm.kafka.StringScheme</value>
+            <description>scheme class</description>
+        </property>
+
+        <!-- eagle server configurations -->
+        <property>
+            <name>eagleService.host</name>
+            <displayName>eagleService.host</displayName>
+            <value>localhost</value>
+            <description>eagle service host</description>
+        </property>
+        <property>
+            <name>eagleService.port</name>
+            <displayName>eagleService.port</displayName>
+            <value>8080</value>
+            <description>eagle service port</description>
+        </property>
+        <property>
+            <name>eagleService.username</name>
+            <displayName>eagleService.username</displayName>
+            <value>admin</value>
+            <description>eagle service username</description>
+        </property>
+        <property>
+            <name>eagleService.password</name>
+            <displayName>eagleService.password</displayName>
+            <value>secret</value>
+            <description>eagle service password</description>
+        </property>
+
+        <!-- data sink configurations -->
+        <property>
+            <name>dataSinkConfig.topic</name>
+            <displayName>dataSinkConfig.topic</displayName>
+            <value>hbase_audit_log_parsed</value>
+            <description>topic for kafka data sink</description>
+        </property>
+        <property>
+            <name>dataSinkConfig.brokerList</name>
+            <displayName>dataSinkConfig.brokerList</displayName>
+            <value>sandbox.hortonworks.com:6667</value>
+            <description>kafka broker list</description>
+        </property>
+        <property>
+            <name>dataSinkConfig.serializerClass</name>
+            <displayName>dataSinkConfig.serializerClass</displayName>
+            <value>kafka.serializer.StringEncoder</value>
+            <description>serializer class Kafka message value</description>
+        </property>
+        <property>
+            <name>dataSinkConfig.keySerializerClass</name>
+            <displayName>dataSinkConfig.keySerializerClass</displayName>
+            <value>kafka.serializer.StringEncoder</value>
+            <description>serializer class Kafka message key</description>
+        </property>
+    </configuration>
+    <streams>
+        <stream>
+            <streamId>gc_log_stream</streamId>
+            <description>GC Log Stream</description>
+            <validate>true</validate>
+            <timeseries>true</timeseries>
+            <columns>
+            </columns>
+        </stream>
+    </streams>
+    <docs>
+        <install>
+            # Step 1: Create source kafka topic named "gc_log_${site}"
+
+            ./bin/kafka-topics.sh --create --topic gc_log_${site} --replication-factor 1 --replication 1
+
+            # Step 2: Set up data collector to flow data into kafka topic in
+
+            ./bin/logstash -f log_collector.conf
+
+            ## `log_collector.conf` sample as following:
+
+            input {
+
+            }
+            filter {
+
+            }
+            output{
+
+            }
+
+            # Step 3: start application
+
+            # Step 4: monitor with featured portal or alert with policies
+        </install>
+        <uninstall>
+            # Step 1: stop and uninstall application
+            # Step 2: delete kafka topic named "${site}_example_source_topic"
+            # Step 3: stop logstash
+        </uninstall>
+    </docs>
+</application>

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-gc/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.GCLogApplicationProvider.xml
----------------------------------------------------------------------
diff --git a/eagle-gc/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.GCLogApplicationProvider.xml b/eagle-gc/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.GCLogApplicationProvider.xml
deleted file mode 100644
index 213132d..0000000
--- a/eagle-gc/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.GCLogApplicationProvider.xml
+++ /dev/null
@@ -1,221 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
-  ~ /*
-  ~  *
-  ~  *    Licensed to the Apache Software Foundation (ASF) under one or more
-  ~  *    contributor license agreements.  See the NOTICE file distributed with
-  ~  *    this work for additional information regarding copyright ownership.
-  ~  *    The ASF licenses this file to You under the Apache License, Version 2.0
-  ~  *    (the "License"); you may not use this file except in compliance with
-  ~  *    the License.  You may obtain a copy of the License at
-  ~  *
-  ~  *       http://www.apache.org/licenses/LICENSE-2.0
-  ~  *
-  ~  *    Unless required by applicable law or agreed to in writing, software
-  ~  *    distributed under the License is distributed on an "AS IS" BASIS,
-  ~  *    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~  *    See the License for the specific language governing permissions and
-  ~  *    limitations under the License.
-  ~  *
-  ~  */
-  -->
-
-<application>
-    <type>GCLogApplication</type>
-    <name>GC Log Monitoring Application</name>
-    <version>0.5.0-incubating</version>
-    <appClass>org.apache.eagle.gc.GCLogApplication</appClass>
-    <viewPath>/apps/example</viewPath>
-    <configuration>
-        <property>
-            <name>dataSourceConfig.topic</name>
-            <displayName>dataSourceConfig.topic</displayName>
-            <value>gc_log</value>
-            <description>data source topic</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.zkConnection</name>
-            <displayName>dataSourceConfig.zkConnection</displayName>
-            <value>server.eagle.apache.org:2181</value>
-            <description>zk connection</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.zkConnectionTimeoutMS</name>
-            <displayName>dataSourceConfig.zkConnectionTimeoutMS</displayName>
-            <value>15000</value>
-            <description>zk connection timeout in milliseconds</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.fetchSize</name>
-            <displayName>dataSourceConfig.fetchSize</displayName>
-            <value>1048586</value>
-            <description>kafka fetch size</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.transactionZKServers</name>
-            <displayName>dataSourceConfig.transactionZKServers</displayName>
-            <value>server.eagle.apache.org</value>
-            <description>zookeeper server for offset transaction</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.transactionZKPort</name>
-            <displayName>dataSourceConfig.transactionZKPort</displayName>
-            <value>2181</value>
-            <description>zookeeper server port for offset transaction</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.transactionZKRoot</name>
-            <displayName>dataSourceConfig.transactionZKRoot</displayName>
-            <value>/consumers</value>
-            <description>offset transaction root</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.consumerGroupId</name>
-            <displayName>dataSourceConfig.consumerGroupId</displayName>
-            <value>eagle.hbaseaudit.consumer</value>
-            <description>kafka consumer group Id</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.transactionStateUpdateMS</name>
-            <displayName>dataSourceConfig.transactionStateUpdateMS</displayName>
-            <value>2000</value>
-            <description>zk upate</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.schemeCls</name>
-            <displayName>dataSourceConfig.schemeCls</displayName>
-            <value>storm.kafka.StringScheme</value>
-            <description>scheme class</description>
-        </property>
-        <property>
-            <name>topology.numOfSpoutTasks</name>
-            <displayName>topology.numOfSpoutTasks</displayName>
-            <value>2</value>
-            <description>number of spout tasks</description>
-        </property>
-        <property>
-            <name>topology.numOfAnalyzerTasks</name>
-            <displayName>topology.numOfAnalyzerTasks</displayName>
-            <value>2</value>
-            <description>number of analyzer tasks</description>
-        </property>
-        <property>
-            <name>topology.numOfGeneratorTasks</name>
-            <displayName>topology.numOfGeneratorTasks</displayName>
-            <value>2</value>
-            <description>number of generator tasks</description>
-        </property>
-        <property>
-            <name>topology.numOfSinkTasks</name>
-            <displayName>topology.numOfSinkTasks</displayName>
-            <value>2</value>
-            <description>number of sink tasks</description>
-        </property>
-        <property>
-            <name>eagleProps.eagleService.host</name>
-            <displayName>eagleProps.eagleService.host</displayName>
-            <value>localhost</value>
-            <description>eagle service host</description>
-        </property>
-        <property>
-            <name>eagleProps.eagleService.port</name>
-            <displayName>eagleProps.eagleService.port</displayName>
-            <value>8080</value>
-            <description>eagle service port</description>
-        </property>
-        <property>
-            <name>eagleProps.eagleService.username</name>
-            <displayName>eagleProps.eagleService.username</displayName>
-            <value>admin</value>
-            <description>eagle service username</description>
-        </property>
-        <property>
-            <name>eagleProps.eagleService.password</name>
-            <displayName>eagleProps.eagleService.password</displayName>
-            <value>secret</value>
-            <description>eagle service password</description>
-        </property>
-        <property>
-            <name>dataSinkConfig.topic</name>
-            <displayName>dataSinkConfig.topic</displayName>
-            <value>hbase_audit_log_parsed</value>
-            <description>topic for kafka data sink</description>
-        </property>
-        <property>
-            <name>dataSinkConfig.brokerList</name>
-            <displayName>dataSinkConfig.brokerList</displayName>
-            <value>sandbox.hortonworks.com:6667</value>
-            <description>kafka broker list</description>
-        </property>
-        <property>
-            <name>dataSinkConfig.serializerClass</name>
-            <displayName>dataSinkConfig.serializerClass</displayName>
-            <value>kafka.serializer.StringEncoder</value>
-            <description>serializer class Kafka message value</description>
-        </property>
-        <property>
-            <name>dataSinkConfig.keySerializerClass</name>
-            <displayName>dataSinkConfig.keySerializerClass</displayName>
-            <value>kafka.serializer.StringEncoder</value>
-            <description>serializer class Kafka message key</description>
-        </property>
-    </configuration>
-    <streams>
-        <stream>
-            <streamId>gc_log_stream</streamId>
-            <description>GC Log Stream</description>
-            <validate>true</validate>
-            <timeseries>true</timeseries>
-            <columns>
-                <column>
-                    <name>action</name>
-                    <type>string</type>
-                </column>
-                <column>
-                    <name>host</name>
-                    <type>string</type>
-                </column>
-                <column>
-                    <name>status</name>
-                    <type>string</type>
-                </column>
-                <column>
-                    <name>timestamp</name>
-                    <type>long</type>
-                </column>
-            </columns>
-        </stream>
-    </streams>
-    <docs>
-        <install>
-            # Step 1: Create source kafka topic named "${site}_example_source_topic"
-
-            ./bin/kafka-topics.sh --create --topic example_source_topic --replication-factor 1 --replication 1
-
-            # Step 2: Set up data collector to flow data into kafka topic in
-
-            ./bin/logstash -f log_collector.conf
-
-            ## `log_collector.conf` sample as following:
-
-            input {
-
-            }
-            filter {
-
-            }
-            output{
-
-            }
-
-            # Step 3: start application
-
-            # Step 4: monitor with featured portal or alert with policies
-        </install>
-        <uninstall>
-            # Step 1: stop and uninstall application
-            # Step 2: delete kafka topic named "${site}_example_source_topic"
-            # Step 3: stop logstash
-        </uninstall>
-    </docs>
-</application>

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-gc/src/main/resources/application-gclog.conf
----------------------------------------------------------------------
diff --git a/eagle-gc/src/main/resources/application-gclog.conf b/eagle-gc/src/main/resources/application-gclog.conf
new file mode 100644
index 0000000..07a760a
--- /dev/null
+++ b/eagle-gc/src/main/resources/application-gclog.conf
@@ -0,0 +1,44 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+{
+  "appId" : "GCLogApp",
+  "mode" : "LOCAL",
+  "siteId" : "testsite",
+  "topology" : {
+    "numOfTotalWorkers" : 2,
+    "numOfSpoutTasks" : 2,
+    "numOfAnalyzerTasks" : 2,
+    "numOfGeneratorTasks" : 2,
+    "numOfSinkTasks" : 2
+  },
+  "dataSourceConfig": {
+    "topic" : "gc_log",
+    "zkConnection" : "server.eagle.apache.org:2181",
+    "txZkServers" : "server.eagle.apache.org:2181",
+    "schemeCls" : "storm.kafka.StringScheme"
+  },
+  "eagleService": {
+    "host": "localhost",
+    "port": 9090,
+    "username": "admin",
+    "password": "secret"
+  },
+  "dataSinkConfig": {
+    "topic" : "gc_log_parsed",
+    "brokerList" : "server.eagle.apache.org:6667",
+    "serializerClass" : "kafka.serializer.StringEncoder",
+    "keySerializerClass" : "kafka.serializer.StringEncoder"
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-gc/src/main/resources/application.conf
----------------------------------------------------------------------
diff --git a/eagle-gc/src/main/resources/application.conf b/eagle-gc/src/main/resources/application.conf
deleted file mode 100644
index 33fa733..0000000
--- a/eagle-gc/src/main/resources/application.conf
+++ /dev/null
@@ -1,52 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-{
-  "appId" : "GCLogApp",
-  "mode" : "LOCAL",
-  "siteId" : "testsite",
-  "topology" : {
-    "numOfTotalWorkers" : 2,
-    "numOfSpoutTasks" : 2,
-    "numOfAnalyzerTasks" : 2,
-    "numOfGeneratorTasks" : 2,
-    "numOfSinkTasks" : 2
-  },
-  "dataSourceConfig": {
-    "topic" : "gc_log",
-    "zkConnection" : "server.eagle.apache.org:2181",
-    "zkConnectionTimeoutMS" : 15000,
-    "fetchSize" : 1048586,
-    "transactionZKServers" : "server.eagle.apache.org",
-    "transactionZKPort" : "2181",
-    "transactionZKRoot" : "/consumers",
-    "consumerGroupId" : "gc.log.eagle.consumer",
-    "transactionStateUpdateMS" : 2000,
-    "schemeCls" : "storm.kafka.StringScheme"
-  },
-  "eagleProps" : {
-    "eagleService": {
-      "host": "localhost",
-      "port": 9090,
-      "username": "admin",
-      "password": "secret"
-    }
-  },
-  "dataSinkConfig": {
-    "topic" : "gc_log_parsed",
-    "brokerList" : "server.eagle.apache.org:6667",
-    "serializerClass" : "kafka.serializer.StringEncoder",
-    "keySerializerClass" : "kafka.serializer.StringEncoder"
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-gc/src/main/resources/gclog-init-sandbox.sh
----------------------------------------------------------------------
diff --git a/eagle-gc/src/main/resources/gclog-init-sandbox.sh b/eagle-gc/src/main/resources/gclog-init-sandbox.sh
deleted file mode 100755
index ded245f..0000000
--- a/eagle-gc/src/main/resources/gclog-init-sandbox.sh
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with`
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source $(dirname $0)/eagle-env.sh
-
-##### add policies ##########
-echo ""
-echo "Creating Meta Data for GC Monitoring  ..."
-
-
-#### AlertDataSourceService: alert streams generated from data source
-curl -u ${EAGLE_SERVICE_USER}:${EAGLE_SERVICE_PASSWD} -X POST -H 'Content-Type:application/json' \
- "http://${EAGLE_SERVICE_HOST}:${EAGLE_SERVICE_PORT}/eagle-service/rest/entities?serviceName=SiteApplicationService" \
-  -d '
-  [
-     {
-        "tags":{
-           "site":"sandbox",
-           "application":"NNGCLog"
-        },
-        "enabled": true        
-     }
-  ]
-  '
-curl -u ${EAGLE_SERVICE_USER}:${EAGLE_SERVICE_PASSWD} -X POST -H 'Content-Type:application/json' \
- "http://${EAGLE_SERVICE_HOST}:${EAGLE_SERVICE_PORT}/eagle-service/rest/entities?serviceName=ApplicationDescService" \
-  -d '
-  [
-     {
-        "tags":{
-           "application":"NNGCLog"
-        },
-        "desc":"hadoop gc log monitoring",
-        "alias":"NNGCLogMonitor",
-        "group":"DAM",
-        "config":"{}",
-        "features":["common","metadata"]
-     }
-  ]
-  '
-  
-#### AlertStreamService: alert streams generated from data source
-curl -u ${EAGLE_SERVICE_USER}:${EAGLE_SERVICE_PASSWD} -X POST -H 'Content-Type:application/json' \
- "http://${EAGLE_SERVICE_HOST}:${EAGLE_SERVICE_PORT}/eagle-service/rest/entities?serviceName=AlertStreamService" -d '[{"prefix":"alertStream","tags":{"application":"NNGCLog","streamName":"NNGCLogStream"},"desc":"alert event stream from namenode gc log"}]'
-
-#### AlertExecutorService: what alert streams are consumed by alert executor
-curl -u ${EAGLE_SERVICE_USER}:${EAGLE_SERVICE_PASSWD} -X POST -H 'Content-Type:application/json' \
- "http://${EAGLE_SERVICE_HOST}:${EAGLE_SERVICE_PORT}/eagle-service/rest/entities?serviceName=AlertExecutorService" -d '[{"prefix":"alertExecutor","tags":{"application":"NNGCLog","alertExecutorId":"NNGCAlert","streamName":"NNGCLogStream"},"desc":"alert executor for namenode gc log"}]'
-
-#### AlertStreamSchemaService: schema for event from alert stream
-curl -u ${EAGLE_SERVICE_USER}:${EAGLE_SERVICE_PASSWD} -X POST -H 'Content-Type:application/json' \
- "http://${EAGLE_SERVICE_HOST}:${EAGLE_SERVICE_PORT}/eagle-service/rest/entities?serviceName=AlertStreamSchemaService" -d '[{"prefix":"alertStreamSchema","category":"","attrType":"long","attrValueResolver":"","tags":{"application":"NNGCLog","streamName":"NNGCLogStream","attrName":"timestamp"}},{"prefix":"alertStreamSchema","category":"","attrType":"string","attrValueResolver":"","tags":{"application":"NNGCLog","streamName":"NNGCLogStream","attrName":"eventType"}},{"prefix":"alertStreamSchema","category":"","attrType":"double","attrValueResolver":"","tags":{"application":"NNGCLog","streamName":"NNGCLogStream","attrName":"pausedGCTimeSec"}},{"prefix":"alertStreamSchema","category":"","attrType":"bool","attrValueResolver":"","tags":{"application":"NNGCLog","streamName":"NNGCLogStream","attrName":"youngAreaGCed"}},{"prefix":"alertStreamSchema","category":"","attrType":"long","attrValueResolver":"","tags":{"application":"NNGCLog","streamName":"NNGCLogStream","attrName":"youngUsedHeapK"}}
 ,{"prefix":"alertStreamSchema","category":"","attrType":"long","attrValueResolver":"","tags":{"application":"NNGCLog","streamName":"NNGCLogStream","attrName":"youngTotalHeapK"}},{"prefix":"alertStreamSchema","category":"","attrType":"bool","attrValueResolver":"","tags":{"application":"NNGCLog","streamName":"NNGCLogStream","attrName":"tenuredAreaGCed"}},{"prefix":"alertStreamSchema","category":"","attrType":"long","attrValueResolver":"","tags":{"application":"NNGCLog","streamName":"NNGCLogStream","attrName":"tenuredAreaGCed"}},{"prefix":"alertStreamSchema","category":"","attrType":"long","attrValueResolver":"","tags":{"application":"NNGCLog","streamName":"NNGCLogStream","attrName":"tenuredTotalHeapK"}},{"prefix":"alertStreamSchema","category":"","attrType":"bool","attrValueResolver":"","tags":{"application":"NNGCLog","streamName":"NNGCLogStream","attrName":"permAreaGCed"}},{"prefix":"alertStreamSchema","category":"","attrType":"long","attrValueResolver":"","tags":{"application":"NNGC
 Log","streamName":"NNGCLogStream","attrName":"permUsedHeapK"}},{"prefix":"alertStreamSchema","category":"","attrType":"long","attrValueResolver":"","tags":{"application":"NNGCLog","streamName":"NNGCLogStream","attrName":"permTotalHeapK"}},{"prefix":"alertStreamSchema","category":"","attrType":"bool","attrValueResolver":"","tags":{"application":"NNGCLog","streamName":"NNGCLogStream","attrName":"totalHeapUsageAvailable"}},{"prefix":"alertStreamSchema","category":"","attrType":"long","attrValueResolver":"","tags":{"application":"NNGCLog","streamName":"NNGCLogStream","attrName":"usedTotalHeapK"}},{"prefix":"alertStreamSchema","category":"","attrType":"bool","attrValueResolver":"","tags":{"application":"NNGCLog","streamName":"NNGCLogStream","attrName":"tenuredUsedHeapK"}},{"prefix":"alertStreamSchema","category":"","attrType":"long","attrValueResolver":"","tags":{"application":"NNGCLog","streamName":"NNGCLogStream","attrName":"totalHeapK"}},{"prefix":"alertStreamSchema","category":"","at
 trType":"string","attrValueResolver":"","tags":{"application":"NNGCLog","streamName":"NNGCLogStream","attrName":"logLine"}}]'
-
-
-echo " Initialized GC Log Monitoring ...."
-
-

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-gc/src/main/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/eagle-gc/src/main/resources/log4j.properties b/eagle-gc/src/main/resources/log4j.properties
index f5fb8a8..2ed4dde 100644
--- a/eagle-gc/src/main/resources/log4j.properties
+++ b/eagle-gc/src/main/resources/log4j.properties
@@ -15,22 +15,7 @@
 
 log4j.rootLogger=INFO, stdout, DRFA
 
-eagle.log.dir=./logs
-eagle.log.file=eagle.log
-
-
 # standard output
 log4j.appender.stdout=org.apache.log4j.ConsoleAppender
 log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %p [%t] %c{2}[%L]: %m%n
-
-# Daily Rolling File Appender
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${eagle.log.dir}/${eagle.log.file}
-log4j.appender.DRFA.DatePattern=yyyy-MM-dd
-## 30-day backup
-# log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p [%t] %c{2}[%L]: %m%n
\ No newline at end of file
+log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %p [%t] %c{2}[%L]: %m%n
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/SecurityExternalMetadataResource.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/SecurityExternalMetadataResource.java b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/SecurityExternalMetadataResource.java
index f0ec69e..ccf8933 100644
--- a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/SecurityExternalMetadataResource.java
+++ b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/SecurityExternalMetadataResource.java
@@ -79,4 +79,19 @@ public class SecurityExternalMetadataResource {
     public void addIPZones(Collection<IPZoneEntity> list){
         dao.addIPZone(list);
     }
+
+
+    @Path("/hiveSensitivity")
+    @GET
+    @Produces("application/json")
+    public Collection<HiveSensitivityEntity> getHiveSensitivities(@QueryParam("site") String site){
+        return dao.listHiveSensitivities();
+    }
+
+    @Path("/hiveSensitivity")
+    @POST
+    @Consumes("application/json")
+    public void addHiveSensitivities(Collection<HiveSensitivityEntity> list){
+        dao.addHiveSensitivity(list);
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/util/ExternalDataJoiner.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/util/ExternalDataJoiner.java b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/util/ExternalDataJoiner.java
index fe0c8e7..e6af377 100644
--- a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/util/ExternalDataJoiner.java
+++ b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/util/ExternalDataJoiner.java
@@ -64,8 +64,8 @@ public class ExternalDataJoiner {
 	
 	public ExternalDataJoiner(Class<? extends Job> jobCls, Config config, String id) throws Exception{
 		this.id = id;
-		Map<String, Object> map = new HashMap<String, Object>();
-        for(Map.Entry<String, ConfigValue> entry : config.getObject("eagleProps").entrySet()){
+		Map<String, Object> map = new HashMap<>();
+        for(Map.Entry<String, ConfigValue> entry : config.getObject("eagleService").entrySet()){
             map.put(entry.getKey(), entry.getValue().unwrapped());
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HBaseAuditLogApplication.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HBaseAuditLogApplication.java b/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HBaseAuditLogApplication.java
index f5753cf..c5c0388 100644
--- a/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HBaseAuditLogApplication.java
+++ b/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HBaseAuditLogApplication.java
@@ -27,6 +27,7 @@ import org.apache.eagle.app.StormApplication;
 import org.apache.eagle.app.environment.impl.StormEnvironment;
 import org.apache.eagle.app.sink.StormStreamSink;
 import org.apache.eagle.dataproc.impl.storm.kafka.KafkaSpoutProvider;
+import storm.kafka.StringScheme;
 
 /**
  * Since 7/27/16.
@@ -52,7 +53,7 @@ public class HBaseAuditLogApplication extends StormApplication {
 
         builder.setSpout("ingest", spout, numOfSpoutTasks);
         BoltDeclarer boltDeclarer = builder.setBolt("parserBolt", bolt, numOfParserTasks);
-        boltDeclarer.fieldsGrouping("ingest", new Fields("f1"));
+        boltDeclarer.fieldsGrouping("ingest", new Fields(StringScheme.STRING_SCHEME_KEY));
 
         HbaseResourceSensitivityDataJoinBolt joinBolt = new HbaseResourceSensitivityDataJoinBolt(config);
         BoltDeclarer joinBoltDeclarer = builder.setBolt("joinBolt", joinBolt, numOfJoinTasks);

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseResourceSensitivityPollingJob.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseResourceSensitivityPollingJob.java b/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseResourceSensitivityPollingJob.java
index cd479f3..a04f1b2 100644
--- a/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseResourceSensitivityPollingJob.java
+++ b/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseResourceSensitivityPollingJob.java
@@ -57,11 +57,10 @@ public class HbaseResourceSensitivityPollingJob extends AbstractResourceSensitiv
     }
 
     private Collection<HBaseSensitivityEntity> load(JobDataMap jobDataMap) throws Exception {
-        Map<String, Object> map = (Map<String,Object>)jobDataMap.get(EagleConfigConstants.EAGLE_SERVICE);
-        String eagleServiceHost = (String)map.get(EagleConfigConstants.HOST);
-        Integer eagleServicePort = Integer.parseInt(map.get(EagleConfigConstants.PORT).toString());
-        String username = map.containsKey(EagleConfigConstants.USERNAME) ? (String)map.get(EagleConfigConstants.USERNAME) : null;
-        String password = map.containsKey(EagleConfigConstants.PASSWORD) ? (String)map.get(EagleConfigConstants.PASSWORD) : null;
+        String eagleServiceHost = (String)jobDataMap.get(EagleConfigConstants.HOST);
+        Integer eagleServicePort = Integer.parseInt(jobDataMap.get(EagleConfigConstants.PORT).toString());
+        String username = jobDataMap.containsKey(EagleConfigConstants.USERNAME) ? (String)jobDataMap.get(EagleConfigConstants.USERNAME) : null;
+        String password = jobDataMap.containsKey(EagleConfigConstants.PASSWORD) ? (String)jobDataMap.get(EagleConfigConstants.PASSWORD) : null;
 
         // load from eagle database
         LOG.info("Load hbase resource sensitivity information from eagle service "

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-hbase-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.HBaseAuditLogAppProvider.xml
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.HBaseAuditLogAppProvider.xml b/eagle-security/eagle-security-hbase-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.HBaseAuditLogAppProvider.xml
index f8d26e5..a085952 100644
--- a/eagle-security/eagle-security-hbase-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.HBaseAuditLogAppProvider.xml
+++ b/eagle-security/eagle-security-hbase-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.HBaseAuditLogAppProvider.xml
@@ -17,7 +17,7 @@
   -->
 
 <application>
-    <type>HbaseAuditLogApplication</type>
+    <type>HBaseAuditLogApplication</type>
     <name>Hbase Audit Log Monitoring Application</name>
     <version>0.5.0-incubating</version>
     <appClass>org.apache.eagle.security.hbase.HBaseAuditLogApplication</appClass>

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-hbase-auditlog/src/main/resources/application.conf
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-auditlog/src/main/resources/application.conf b/eagle-security/eagle-security-hbase-auditlog/src/main/resources/application.conf
index 3947cba..784d75d 100644
--- a/eagle-security/eagle-security-hbase-auditlog/src/main/resources/application.conf
+++ b/eagle-security/eagle-security-hbase-auditlog/src/main/resources/application.conf
@@ -27,23 +27,14 @@
   "dataSourceConfig": {
     "topic" : "hbase_audit_log",
     "zkConnection" : "server.eagle.apache.org:2181",
-    "zkConnectionTimeoutMS" : 15000,
-    "fetchSize" : 1048586,
-    "transactionZKServers" : "server.eagle.apache.org",
-    "transactionZKPort" : 2181,
-    "transactionZKRoot" : "/consumers",
-    "consumerGroupId" : "eagle.hbaseaudit.consumer",
-    "transactionStateUpdateMS" : 2000,
+    "txZkServers" : "server.eagle.apache.org:2181",
     "schemeCls" : "storm.kafka.StringScheme"
   },
-  "eagleProps" : {
-    "dataJoinPollIntervalSec" : 30,
-    "eagleService": {
-      "host": "localhost",
-      "port": 9090
-      "username": "admin",
-      "password": "secret"
-    }
+  "eagleService": {
+    "host": "localhost",
+    "port": 9090
+    "username": "admin",
+    "password": "secret"
   },
   "dataSinkConfig": {
     "topic" : "hbase_audit_log_parsed",

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/FileSensitivityPollingJob.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/FileSensitivityPollingJob.java b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/FileSensitivityPollingJob.java
index 375edc7..c0f90ee 100644
--- a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/FileSensitivityPollingJob.java
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/FileSensitivityPollingJob.java
@@ -62,11 +62,10 @@ public class FileSensitivityPollingJob implements Job{
 	}
 
 	private Collection<HdfsSensitivityEntity> load(JobDataMap jobDataMap) throws Exception{
-		Map<String, Object> map = (Map<String,Object>)jobDataMap.get(EagleConfigConstants.EAGLE_SERVICE);
-		String eagleServiceHost = (String)map.get(EagleConfigConstants.HOST);
-		Integer eagleServicePort = Integer.parseInt(map.get(EagleConfigConstants.PORT).toString());
-		String username = map.containsKey(EagleConfigConstants.USERNAME) ? (String)map.get(EagleConfigConstants.USERNAME) : null;
-		String password = map.containsKey(EagleConfigConstants.PASSWORD) ? (String)map.get(EagleConfigConstants.PASSWORD) : null;
+		String eagleServiceHost = (String)jobDataMap.get(EagleConfigConstants.HOST);
+		Integer eagleServicePort = Integer.parseInt(jobDataMap.get(EagleConfigConstants.PORT).toString());
+		String username = jobDataMap.containsKey(EagleConfigConstants.USERNAME) ? (String)jobDataMap.get(EagleConfigConstants.USERNAME) : null;
+		String password = jobDataMap.containsKey(EagleConfigConstants.PASSWORD) ? (String)jobDataMap.get(EagleConfigConstants.PASSWORD) : null;
 		// load from eagle database
 		LOG.info("Load file sensitivity information from eagle service " + eagleServiceHost + ":" + eagleServicePort);
 		IMetadataServiceClient client = new MetadataServiceClientImpl(eagleServiceHost, eagleServicePort, "/rest");

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/IPZonePollingJob.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/IPZonePollingJob.java b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/IPZonePollingJob.java
index dc80eb9..ea49ba4 100644
--- a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/IPZonePollingJob.java
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/IPZonePollingJob.java
@@ -65,11 +65,10 @@ public class IPZonePollingJob implements Job{
 	}
 
 	private Collection<IPZoneEntity> load(JobDataMap jobDataMap) throws Exception{
-		Map<String, Object> map = (Map<String,Object>)jobDataMap.get(EagleConfigConstants.EAGLE_SERVICE);
-		String eagleServiceHost = (String)map.get(EagleConfigConstants.HOST);
-		Integer eagleServicePort = Integer.parseInt(map.get(EagleConfigConstants.PORT).toString());
-		String username = map.containsKey(EagleConfigConstants.USERNAME) ? (String)map.get(EagleConfigConstants.USERNAME) : null;
-		String password = map.containsKey(EagleConfigConstants.PASSWORD) ? (String)map.get(EagleConfigConstants.PASSWORD) : null;
+		String eagleServiceHost = (String)jobDataMap.get(EagleConfigConstants.HOST);
+		Integer eagleServicePort = Integer.parseInt(jobDataMap.get(EagleConfigConstants.PORT).toString());
+		String username = jobDataMap.containsKey(EagleConfigConstants.USERNAME) ? (String)jobDataMap.get(EagleConfigConstants.USERNAME) : null;
+		String password = jobDataMap.containsKey(EagleConfigConstants.PASSWORD) ? (String)jobDataMap.get(EagleConfigConstants.PASSWORD) : null;
 		// load from eagle database
 		LOG.info("Load ip zone information from eagle service " + eagleServiceHost + ":" + eagleServicePort);
 		IMetadataServiceClient client = new MetadataServiceClientImpl(eagleServiceHost, eagleServicePort, "/rest");

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HdfsAuditLogAppProvider.xml
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HdfsAuditLogAppProvider.xml b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HdfsAuditLogAppProvider.xml
index dadab98..2419747 100644
--- a/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HdfsAuditLogAppProvider.xml
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HdfsAuditLogAppProvider.xml
@@ -27,78 +27,7 @@
     <appClass>org.apache.eagle.security.auditlog.HdfsAuditLogApplication</appClass>
     <viewPath>/apps/example</viewPath>
     <configuration>
-        <property>
-            <name>dataSourceConfig.topic</name>
-            <displayName>dataSourceConfig.topic</displayName>
-            <value>hdfs_audit_log</value>
-            <description>data source topic</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.zkConnection</name>
-            <displayName>dataSourceConfig.zkConnection</displayName>
-            <value>server.eagle.apache.org</value>
-            <description>zk connection</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.zkConnectionTimeoutMS</name>
-            <displayName>dataSourceConfig.zkConnectionTimeoutMS</displayName>
-            <value>15000</value>
-            <description>zk connection timeout in milliseconds</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.fetchSize</name>
-            <displayName>dataSourceConfig.fetchSize</displayName>
-            <value>1048586</value>
-            <description>kafka fetch size</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.transactionZKServers</name>
-            <displayName>dataSourceConfig.transactionZKServers</displayName>
-            <value>server.eagle.apache.org</value>
-            <description>zookeeper server for offset transaction</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.transactionZKPort</name>
-            <displayName>dataSourceConfig.transactionZKPort</displayName>
-            <value>2181</value>
-            <description>zookeeper server port for offset transaction</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.transactionZKRoot</name>
-            <displayName>dataSourceConfig.transactionZKRoot</displayName>
-            <value>/consumers</value>
-            <description>offset transaction root</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.consumerGroupId</name>
-            <displayName>dataSourceConfig.consumerGroupId</displayName>
-            <value>eagle.hdfsaudit.consumer</value>
-            <description>kafka consumer group Id</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.transactionStateUpdateMS</name>
-            <displayName>dataSourceConfig.transactionStateUpdateMS</displayName>
-            <value>2000</value>
-            <description>zk upate</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.schemeCls</name>
-            <displayName>dataSourceConfig.schemeCls</displayName>
-            <value>storm.kafka.StringScheme</value>
-            <description>scheme class</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.transactionZKPort</name>
-            <displayName>dataSourceConfig.transactionZKPort</displayName>
-            <value>2181</value>
-            <description>zookeeper server port for offset transaction</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.transactionZKPort</name>
-            <displayName>dataSourceConfig.transactionZKPort</displayName>
-            <value>2181</value>
-            <description>zookeeper server port for offset transaction</description>
-        </property>
+        <!-- topology related configurations -->
         <property>
             <name>topology.numOfSpoutTasks</name>
             <displayName>topology.numOfSpoutTasks</displayName>
@@ -123,36 +52,68 @@
             <value>2</value>
             <description>number of sink tasks</description>
         </property>
+
+        <!-- data source configurations -->
+        <property>
+            <name>dataSourceConfig.topic</name>
+            <displayName>dataSourceConfig.topic</displayName>
+            <value>hdfs_audit_log</value>
+            <description>data source topic</description>
+        </property>
         <property>
-            <name>eagleProps.dataJoinPollIntervalSec</name>
+            <name>dataSourceConfig.zkConnection</name>
+            <displayName>dataSourceConfig.zkConnection</displayName>
+            <value>server.eagle.apache.org</value>
+            <description>zk connection</description>
+        </property>
+        <property>
+            <name>dataSourceConfig.txZkServers</name>
+            <displayName>dataSourceConfig.txZkServers</displayName>
+            <value>server.eagle.apache.org:2181</value>
+            <description>zookeeper server for offset transaction</description>
+        </property>
+        <property>
+            <name>dataSourceConfig.schemeCls</name>
+            <displayName>dataSourceConfig.schemeCls</displayName>
+            <value>storm.kafka.StringScheme</value>
+            <description>scheme class</description>
+        </property>
+
+        <!-- data enrich configurations -->
+        <property>
+            <name>dataEnrich.dataJoinPollIntervalSec</name>
             <displayName>eagleProps.dataJoinPollIntervalSec</displayName>
             <value>30</value>
             <description>interval in seconds for polling</description>
         </property>
+
+        <!-- eagle service configurations -->
         <property>
-            <name>eagleProps.eagleService.host</name>
-            <displayName>eagleProps.eagleService.host</displayName>
+            <name>eagleService.host</name>
+            <displayName>eagleService.host</displayName>
             <value>localhost</value>
             <description>eagle service host</description>
         </property>
         <property>
-            <name>eagleProps.eagleService.port</name>
-            <displayName>eagleProps.eagleService.port</displayName>
-            <value>8080</value>
+            <name>eagleService.port</name>
+            <displayName>eagleService.port</displayName>
+            <value>9090</value>
             <description>eagle service port</description>
         </property>
         <property>
-            <name>eagleProps.eagleService.username</name>
-            <displayName>eagleProps.eagleService.username</displayName>
+            <name>eagleService.username</name>
+            <displayName>eagleService.username</displayName>
             <value>admin</value>
             <description>eagle service username</description>
         </property>
         <property>
-            <name>eagleProps.eagleService.password</name>
-            <displayName>eagleProps.eagleService.password</displayName>
+            <name>eagleService.password</name>
+            <displayName>eagleService.password</displayName>
             <value>secret</value>
             <description>eagle service password</description>
         </property>
+
+        <!-- data sink configurations -->
         <property>
             <name>dataSinkConfig.topic</name>
             <displayName>dataSinkConfig.topic</displayName>
@@ -162,7 +123,7 @@
         <property>
             <name>dataSinkConfig.brokerList</name>
             <displayName>dataSinkConfig.brokerList</displayName>
-            <value>sandbox.hortonworks.com:6667</value>
+            <value>server.eagle.apache.org:6667</value>
             <description>kafka broker list</description>
         </property>
         <property>
@@ -178,7 +139,7 @@
             <description>serializer class Kafka message key</description>
         </property>
 
-        <!-- properties for hdfs file system access and attribute resolver-->
+        <!-- web app related configurations -->
         <property>
             <name>fs.defaultFS</name>
             <displayName>fs.defaultFS</displayName>

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/application.conf
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/application.conf b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/application.conf
index efa6467..ed4609d 100644
--- a/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/application.conf
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/application.conf
@@ -28,23 +28,14 @@
   "dataSourceConfig": {
     "topic" : "hdfs_audit_log",
     "zkConnection" : "server.eagle.apache.org:2181",
-    "zkConnectionTimeoutMS" : 15000,
-    "consumerGroupId" : "EagleConsumer",
-    "fetchSize" : 1048586,
-    "transactionZKServers" : "server.eagle.apache.org",
-    "transactionZKPort" : 2181,
-    "transactionZKRoot" : "/consumers",
-    "transactionStateUpdateMS" : 2000,
+    "txZkServers" : "server.eagle.apache.org:2181",
     "schemeCls" : "storm.kafka.StringScheme"
   },
-  "eagleProps" : {
-  	"dataJoinPollIntervalSec" : 30,
-    "eagleService": {
-      "host": "localhost",
-      "port": 9090,
-      "username": "admin",
-      "password": "secret"
-    }
+  "eagleService": {
+    "host": "localhost",
+    "port": 9090,
+    "username": "admin",
+    "password": "secret"
   },
   "dataSinkConfig": {
     "topic" : "hdfs_audit_log_parsed",

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-hdfs-authlog/src/main/resources/application.conf
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-authlog/src/main/resources/application.conf b/eagle-security/eagle-security-hdfs-authlog/src/main/resources/application.conf
index 45fd424..f38de11 100644
--- a/eagle-security/eagle-security-hdfs-authlog/src/main/resources/application.conf
+++ b/eagle-security/eagle-security-hdfs-authlog/src/main/resources/application.conf
@@ -19,24 +19,17 @@
     "numOfTotalWorkers" : 2,
     "numOfSpoutTasks" : 2,
     "numOfParserTasks" : 2,
-    "numOfSinkTasks" : 2,
-    "name" : "sandbox-hdfsAuthLog-topology",
+    "numOfSinkTasks" : 2
   },
   "dataSourceConfig": {
-    "topic" : "sandbox_hdfs_auth_log",
-    "zkConnection" : "sandbox.hortonworks.com:2181",
-    "zkConnectionTimeoutMS" : 15000,
-    "fetchSize" : 1048586,
-    "transactionZKServers" : "sandbox.hortonworks.com",
-    "transactionZKPort" : 2181,
-    "transactionZKRoot" : "/consumers",
-    "consumerGroupId" : "eagle.hdfssecurity.consumer",
-    "transactionStateUpdateMS" : 2000,
+    "topic" : "hdfs_auth_log",
+    "zkConnection" : "server.eagle.apache.org:2181",
+    "txZkServers" : "server.eagle.apache.org:2181",
     "schemeCls" : "storm.kafka.StringScheme"
   },
   "dataSinkConfig": {
-    "topic" : "sandbox_hdfs_auth_log_parsed",
-    "brokerList" : "sandbox.hortonworks.com:6667",
+    "topic" : "hdfs_auth_log_parsed",
+    "brokerList" : "server.eagle.apache.org:6667",
     "serializerClass" : "kafka.serializer.StringEncoder",
     "keySerializerClass" : "kafka.serializer.StringEncoder"
   }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/HiveQueryMonitoringAppProvider.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/HiveQueryMonitoringAppProvider.java b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/HiveQueryMonitoringAppProvider.java
new file mode 100644
index 0000000..7646595
--- /dev/null
+++ b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/HiveQueryMonitoringAppProvider.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.eagle.security.hive;
+
+import org.apache.eagle.app.spi.AbstractApplicationProvider;
+import org.apache.eagle.security.hive.HiveQueryMonitoringApplication;
+
+/**
+ * Since 8/12/16.
+ */
+public class HiveQueryMonitoringAppProvider extends AbstractApplicationProvider<HiveQueryMonitoringApplication> {
+    @Override
+    public HiveQueryMonitoringApplication getApplication() {
+        return new HiveQueryMonitoringApplication();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/HiveQueryMonitoringApplication.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/HiveQueryMonitoringApplication.java b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/HiveQueryMonitoringApplication.java
new file mode 100644
index 0000000..cc26ae7
--- /dev/null
+++ b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/HiveQueryMonitoringApplication.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.eagle.security.hive;
+
+import backtype.storm.generated.StormTopology;
+import backtype.storm.topology.BoltDeclarer;
+import backtype.storm.topology.IRichSpout;
+import backtype.storm.topology.TopologyBuilder;
+import backtype.storm.tuple.Fields;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import org.apache.eagle.app.StormApplication;
+import org.apache.eagle.app.environment.impl.StormEnvironment;
+import org.apache.eagle.app.sink.StormStreamSink;
+import org.apache.eagle.security.hive.jobrunning.HiveJobRunningSourcedStormSpoutProvider;
+import org.apache.eagle.security.hive.jobrunning.HiveQueryParserBolt;
+import org.apache.eagle.security.hive.jobrunning.JobFilterBolt;
+import org.apache.eagle.security.hive.sensitivity.HiveResourceSensitivityDataJoinBolt;
+
+/**
+ * Since 8/11/16.
+ */
+public class HiveQueryMonitoringApplication extends StormApplication {
+    public final static String SPOUT_TASK_NUM = "topology.numOfSpoutTasks";
+    public final static String FILTER_TASK_NUM = "topology.numOfFilterTasks";
+    public final static String PARSER_TASK_NUM = "topology.numOfParserTasks";
+    public final static String JOIN_TASK_NUM = "topology.numOfJoinTasks";
+    public final static String SINK_TASK_NUM = "topology.numOfSinkTasks";
+
+    @Override
+    public StormTopology execute(Config config, StormEnvironment environment) {
+        TopologyBuilder builder = new TopologyBuilder();
+        HiveJobRunningSourcedStormSpoutProvider provider = new HiveJobRunningSourcedStormSpoutProvider();
+        IRichSpout spout = provider.getSpout(config, config.getInt(SPOUT_TASK_NUM));
+
+
+        int numOfSpoutTasks = config.getInt(SPOUT_TASK_NUM);
+        int numOfFilterTasks = config.getInt(FILTER_TASK_NUM);
+        int numOfParserTasks = config.getInt(PARSER_TASK_NUM);
+        int numOfJoinTasks = config.getInt(JOIN_TASK_NUM);
+        int numOfSinkTasks = config.getInt(SINK_TASK_NUM);
+
+        builder.setSpout("ingest", spout, numOfSpoutTasks);
+        JobFilterBolt bolt = new JobFilterBolt();
+        BoltDeclarer boltDeclarer = builder.setBolt("filterBolt", bolt, numOfFilterTasks);
+        boltDeclarer.fieldsGrouping("ingest", new Fields("jobId"));
+
+        HiveQueryParserBolt parserBolt = new HiveQueryParserBolt();
+        BoltDeclarer parserBoltDeclarer = builder.setBolt("parserBolt", parserBolt, numOfParserTasks);
+        parserBoltDeclarer.fieldsGrouping("filterBolt", new Fields("user"));
+
+        HiveResourceSensitivityDataJoinBolt joinBolt = new HiveResourceSensitivityDataJoinBolt(config);
+        BoltDeclarer joinBoltDeclarer = builder.setBolt("joinBolt", joinBolt, numOfJoinTasks);
+        joinBoltDeclarer.fieldsGrouping("parserBolt", new Fields("user"));
+
+        StormStreamSink sinkBolt = environment.getStreamSink("hive_query_stream",config);
+        BoltDeclarer kafkaBoltDeclarer = builder.setBolt("kafkaSink", sinkBolt, numOfSinkTasks);
+        kafkaBoltDeclarer.fieldsGrouping("joinBolt", new Fields("user"));
+        return builder.createTopology();
+    }
+
+    public static void main(String[] args){
+        Config config = ConfigFactory.load();
+        HiveQueryMonitoringApplication app = new HiveQueryMonitoringApplication();
+        app.run(config);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveJobFetchSpout.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveJobFetchSpout.java b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveJobFetchSpout.java
index 0db3d47..12d0732 100644
--- a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveJobFetchSpout.java
+++ b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveJobFetchSpout.java
@@ -22,7 +22,6 @@ import backtype.storm.task.TopologyContext;
 import backtype.storm.topology.OutputFieldsDeclarer;
 import backtype.storm.topology.base.BaseRichSpout;
 import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
 import org.apache.eagle.dataproc.core.ValuesArray;
 import org.apache.eagle.jpm.util.*;
 import org.apache.eagle.jpm.util.jobrecover.RunningJobManager;
@@ -120,11 +119,17 @@ public class HiveJobFetchSpout extends BaseRichSpout {
         LOG.info("start to fetch job list");
         try {
             List<AppInfo> apps = rmResourceFetcher.getResource(Constants.ResourceType.RUNNING_MR_JOB);
+            if(apps == null){
+                apps = new ArrayList<>();
+            }
             handleApps(apps, true);
 
             long fetchTime = Calendar.getInstance().getTimeInMillis();
             if (fetchTime - this.lastFinishAppTime > 60000l) {
                 apps = rmResourceFetcher.getResource(Constants.ResourceType.COMPLETE_MR_JOB, Long.toString(this.lastFinishAppTime));
+                if(apps == null){
+                    apps = new ArrayList<>();
+                }
                 handleApps(apps, false);
                 this.lastFinishAppTime = fetchTime;
                 this.runningJobManager.updateLastFinishTime(partitionId, fetchTime);

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveQueryMonitoringAppProvider.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveQueryMonitoringAppProvider.java b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveQueryMonitoringAppProvider.java
deleted file mode 100644
index e79b0eb..0000000
--- a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveQueryMonitoringAppProvider.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- *
- *  * Licensed to the Apache Software Foundation (ASF) under one or more
- *  * contributor license agreements.  See the NOTICE file distributed with
- *  * this work for additional information regarding copyright ownership.
- *  * The ASF licenses this file to You under the Apache License, Version 2.0
- *  * (the "License"); you may not use this file except in compliance with
- *  * the License.  You may obtain a copy of the License at
- *  *
- *  *     http://www.apache.org/licenses/LICENSE-2.0
- *  *
- *  *  Unless required by applicable law or agreed to in writing, software
- *  *  distributed under the License is distributed on an "AS IS" BASIS,
- *  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  *  See the License for the specific language governing permissions and
- *  *  limitations under the License.
- *  *
- *
- */
-
-package org.apache.eagle.security.hive.jobrunning;
-
-import org.apache.eagle.app.spi.AbstractApplicationProvider;
-
-/**
- * Since 8/12/16.
- */
-public class HiveQueryMonitoringAppProvider extends AbstractApplicationProvider<HiveQueryMonitoringApplication> {
-    @Override
-    public HiveQueryMonitoringApplication getApplication() {
-        return new HiveQueryMonitoringApplication();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveQueryMonitoringApplication.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveQueryMonitoringApplication.java b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveQueryMonitoringApplication.java
deleted file mode 100644
index 5938314..0000000
--- a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/jobrunning/HiveQueryMonitoringApplication.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *
- */
-
-package org.apache.eagle.security.hive.jobrunning;
-
-import backtype.storm.generated.StormTopology;
-import backtype.storm.topology.BoltDeclarer;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.topology.TopologyBuilder;
-import backtype.storm.tuple.Fields;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
-import org.apache.eagle.app.StormApplication;
-import org.apache.eagle.app.environment.impl.StormEnvironment;
-import org.apache.eagle.app.sink.StormStreamSink;
-import org.apache.eagle.security.hive.sensitivity.HiveResourceSensitivityDataJoinBolt;
-
-/**
- * Since 8/11/16.
- */
-public class HiveQueryMonitoringApplication extends StormApplication {
-    public final static String SPOUT_TASK_NUM = "topology.numOfSpoutTasks";
-    public final static String FILTER_TASK_NUM = "topology.numOfFilterTasks";
-    public final static String PARSER_TASK_NUM = "topology.numOfParserTasks";
-    public final static String JOIN_TASK_NUM = "topology.numOfJoinTasks";
-    public final static String SINK_TASK_NUM = "topology.numOfSinkTasks";
-
-    @Override
-    public StormTopology execute(Config config, StormEnvironment environment) {
-        TopologyBuilder builder = new TopologyBuilder();
-        HiveJobRunningSourcedStormSpoutProvider provider = new HiveJobRunningSourcedStormSpoutProvider();
-        IRichSpout spout = provider.getSpout(config, config.getInt(SPOUT_TASK_NUM));
-
-
-        int numOfSpoutTasks = config.getInt(SPOUT_TASK_NUM);
-        int numOfFilterTasks = config.getInt(FILTER_TASK_NUM);
-        int numOfParserTasks = config.getInt(PARSER_TASK_NUM);
-        int numOfJoinTasks = config.getInt(JOIN_TASK_NUM);
-        int numOfSinkTasks = config.getInt(SINK_TASK_NUM);
-
-        builder.setSpout("ingest", spout, numOfSpoutTasks);
-        JobFilterBolt bolt = new JobFilterBolt();
-        BoltDeclarer boltDeclarer = builder.setBolt("filterBolt", bolt, numOfFilterTasks);
-        boltDeclarer.fieldsGrouping("ingest", new Fields("jobId"));
-
-        HiveQueryParserBolt parserBolt = new HiveQueryParserBolt();
-        BoltDeclarer parserBoltDeclarer = builder.setBolt("parserBolt", parserBolt, numOfParserTasks);
-        parserBoltDeclarer.fieldsGrouping("filterBolt", new Fields("user"));
-
-        HiveResourceSensitivityDataJoinBolt joinBolt = new HiveResourceSensitivityDataJoinBolt(config);
-        BoltDeclarer joinBoltDeclarer = builder.setBolt("joinBolt", joinBolt, numOfJoinTasks);
-        joinBoltDeclarer.fieldsGrouping("parserBolt", new Fields("user"));
-
-        StormStreamSink sinkBolt = environment.getStreamSink("hive_query_stream",config);
-        BoltDeclarer kafkaBoltDeclarer = builder.setBolt("kafkaSink", sinkBolt, numOfSinkTasks);
-        kafkaBoltDeclarer.fieldsGrouping("joinBolt", new Fields("user"));
-        return builder.createTopology();
-    }
-
-    public static void main(String[] args){
-        Config config = ConfigFactory.load();
-        HiveQueryMonitoringApplication app = new HiveQueryMonitoringApplication();
-        app.run(config);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/sensitivity/HiveResourceSensitivityPollingJob.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/sensitivity/HiveResourceSensitivityPollingJob.java b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/sensitivity/HiveResourceSensitivityPollingJob.java
index 061ef19..9fc7049 100644
--- a/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/sensitivity/HiveResourceSensitivityPollingJob.java
+++ b/eagle-security/eagle-security-hive/src/main/java/org/apache/eagle/security/hive/sensitivity/HiveResourceSensitivityPollingJob.java
@@ -68,11 +68,10 @@ public class HiveResourceSensitivityPollingJob implements Job {
     }
 
     private Collection<HiveSensitivityEntity> load(JobDataMap jobDataMap) throws Exception {
-        Map<String, Object> map = (Map<String,Object>)jobDataMap.get(EagleConfigConstants.EAGLE_SERVICE);
-        String eagleServiceHost = (String)map.get(EagleConfigConstants.HOST);
-        Integer eagleServicePort = Integer.parseInt(map.get(EagleConfigConstants.PORT).toString());
-        String username = map.containsKey(EagleConfigConstants.USERNAME) ? (String)map.get(EagleConfigConstants.USERNAME) : null;
-        String password = map.containsKey(EagleConfigConstants.PASSWORD) ? (String)map.get(EagleConfigConstants.PASSWORD) : null;
+        String eagleServiceHost = (String)jobDataMap.get(EagleConfigConstants.HOST);
+        Integer eagleServicePort = Integer.parseInt(jobDataMap.get(EagleConfigConstants.PORT).toString());
+        String username = jobDataMap.containsKey(EagleConfigConstants.USERNAME) ? (String)jobDataMap.get(EagleConfigConstants.USERNAME) : null;
+        String password = jobDataMap.containsKey(EagleConfigConstants.PASSWORD) ? (String)jobDataMap.get(EagleConfigConstants.PASSWORD) : null;
 
         // load from eagle database
         LOG.info("Load hive resource sensitivity information from eagle service "

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/e383e879/eagle-security/eagle-security-hive/src/main/resources/META-INF/services/org.apache.eagle.app.spi.ApplicationProvider
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hive/src/main/resources/META-INF/services/org.apache.eagle.app.spi.ApplicationProvider b/eagle-security/eagle-security-hive/src/main/resources/META-INF/services/org.apache.eagle.app.spi.ApplicationProvider
index fdd2754..4e3274f 100644
--- a/eagle-security/eagle-security-hive/src/main/resources/META-INF/services/org.apache.eagle.app.spi.ApplicationProvider
+++ b/eagle-security/eagle-security-hive/src/main/resources/META-INF/services/org.apache.eagle.app.spi.ApplicationProvider
@@ -13,4 +13,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-org.apache.eagle.security.hive.jobrunning.HiveQueryMonitoringAppProvider
+org.apache.eagle.security.hive.HiveQueryMonitoringAppProvider



Mime
View raw message