eagle-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From h..@apache.org
Subject [2/2] incubator-eagle git commit: [EAGLE-532] Fix checkstyle on eagle-jpm and enable failOnViolation
Date Thu, 08 Sep 2016 07:28:21 GMT
[EAGLE-532] Fix checkstyle on eagle-jpm and enable failOnViolation

https://issues.apache.org/jira/browse/EAGLE-532

Author: Hao Chen <hao@apache.org>

Closes #425 from haoch/EAGLE-532.


Project: http://git-wip-us.apache.org/repos/asf/incubator-eagle/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-eagle/commit/ecbdec8e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-eagle/tree/ecbdec8e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-eagle/diff/ecbdec8e

Branch: refs/heads/master
Commit: ecbdec8efae12a7b6044d0d998934a6653936d5a
Parents: a0fc870
Author: Hao Chen <hao@apache.org>
Authored: Thu Sep 8 15:28:02 2016 +0800
Committer: Hao Chen <hao@apache.org>
Committed: Thu Sep 8 15:28:02 2016 +0800

----------------------------------------------------------------------
 eagle-dev/checkstyle.xml                        |   2 +-
 .../hadoop/queue/HadoopQueueRunningMain.java    |  16 +-
 .../queue/common/HadoopClusterConstants.java    |  36 +-
 .../queue/common/HadoopYarnResourceUtils.java   |   2 +-
 .../common/YarnClusterResourceURLBuilder.java   |   8 +-
 .../queue/crawler/ClusterMetricsCrawler.java    |  66 ++--
 .../crawler/ClusterMetricsParseListener.java    | 240 ++++++------
 .../queue/crawler/RunningAppParseListener.java  | 145 +++----
 .../queue/crawler/RunningAppsCrawler.java       |  66 ++--
 .../queue/crawler/SchedulerInfoCrawler.java     |  67 ++--
 .../crawler/SchedulerInfoParseListener.java     |  40 +-
 .../exceptions/HadoopQueueFetcherException.java |  17 +-
 .../model/HadoopQueueEntityRepository.java      |   6 +-
 .../hadoop/queue/model/applications/App.java    | 386 +++++++++----------
 .../hadoop/queue/model/applications/Apps.java   |  18 +-
 .../queue/model/applications/AppsWrapper.java   |  16 +-
 .../queue/model/cluster/ClusterMetrics.java     | 198 ++++++++++
 .../model/cluster/ClusterMetricsWrapper.java    |  37 ++
 .../model/clusterMetrics/ClusterMetrics.java    | 198 ----------
 .../clusterMetrics/ClusterMetricsWrapper.java   |  37 --
 .../hadoop/queue/model/scheduler/Queue.java     | 357 ++++++++---------
 .../hadoop/queue/model/scheduler/Queues.java    |  16 +-
 .../queue/model/scheduler/ResourcesUsed.java    |  30 +-
 .../model/scheduler/RunningQueueAPIEntity.java  | 248 ++++++------
 .../hadoop/queue/model/scheduler/Scheduler.java |  16 +-
 .../queue/storm/HadoopQueueMessageId.java       |   8 +-
 .../storm/HadoopQueueMetricPersistBolt.java     |   1 -
 .../queue/storm/HadoopQueueRunningSpout.java    |  11 +-
 .../queue/TestHadoopYarnResourceUtils.java      |   2 +-
 .../apache/eagle/app/jpm/JPMApplication.java    |   2 +
 .../eagle/app/jpm/JPMApplicationProvider.java   |   4 +-
 .../history/crawler/JHFCrawlerDriverImpl.java   |   5 +-
 .../metrics/JobCountMetricsGenerator.java       |   8 +-
 .../JobExecutionMetricsCreationListener.java    |   2 +-
 .../jpm/mr/history/parser/JHFMRVer2Parser.java  |   2 +-
 .../jpm/mr/history/storm/JobHistorySpout.java   |   3 +-
 .../history/zkres/JobHistoryZKStateManager.java |  94 ++---
 .../jpm/mr/running/MRRunningJobApplication.java |   8 +-
 .../JobExecutionMetricsCreationListener.java    |   2 +-
 .../TaskExecutionMetricsCreationListener.java   |   2 +-
 .../mr/running/recover/MRRunningJobManager.java |   2 +-
 .../mr/running/storm/MRRunningJobParseBolt.java |   3 +-
 .../jpm/spark/history/SparkHistoryJobApp.java   |   8 +-
 .../spark/history/SparkHistoryJobAppConfig.java |   5 +-
 .../history/crawl/JHFSparkEventReader.java      |  20 +-
 .../status/JobHistoryZKStateManager.java        |   6 +-
 .../history/storm/SparkHistoryJobParseBolt.java |  13 +-
 .../history/storm/SparkHistoryJobSpout.java     |  15 +-
 .../eagle/jpm/util/JobNameNormalization.java    |   6 +-
 eagle-jpm/pom.xml                               |  13 +
 50 files changed, 1269 insertions(+), 1244 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-dev/checkstyle.xml
----------------------------------------------------------------------
diff --git a/eagle-dev/checkstyle.xml b/eagle-dev/checkstyle.xml
index 3c9c244..b292542 100644
--- a/eagle-dev/checkstyle.xml
+++ b/eagle-dev/checkstyle.xml
@@ -120,7 +120,7 @@
                      value="Type name ''{0}'' must match pattern ''{1}''."/>
         </module>
         <module name="MemberName">
-            <property name="format" value="^_*[a-z][a-z0-9][a-zA-Z0-9]*$"/>
+            <property name="format" value="^[a-z][a-z0-9]*[a-zA-Z0-9]*$"/>
             <message key="name.invalidPattern"
                      value="Member name ''{0}'' must match pattern ''{1}''."/>
         </module>

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/HadoopQueueRunningMain.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/HadoopQueueRunningMain.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/HadoopQueueRunningMain.java
index caf99ad..37fd17b 100644
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/HadoopQueueRunningMain.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/HadoopQueueRunningMain.java
@@ -18,6 +18,9 @@
 
 package org.apache.eagle.hadoop.queue;
 
+import org.apache.eagle.common.config.ConfigOptionParser;
+import org.apache.eagle.hadoop.queue.storm.HadoopQueueMetricPersistBolt;
+import org.apache.eagle.hadoop.queue.storm.HadoopQueueRunningSpout;
 import backtype.storm.LocalCluster;
 import backtype.storm.StormSubmitter;
 import backtype.storm.generated.AlreadyAliveException;
@@ -26,22 +29,19 @@ import backtype.storm.generated.StormTopology;
 import backtype.storm.topology.IRichSpout;
 import backtype.storm.topology.TopologyBuilder;
 import com.typesafe.config.Config;
-import org.apache.eagle.common.config.ConfigOptionParser;
-import org.apache.eagle.hadoop.queue.storm.HadoopQueueMetricPersistBolt;
-import org.apache.eagle.hadoop.queue.storm.HadoopQueueRunningSpout;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class HadoopQueueRunningMain {
 
-    public final static String PARSER_TASK_NUM = "topology.numOfParserTasks";
-    public final static String TOTAL_WORKER_NUM = "topology.numOfTotalWorkers";
-    public final static String TOPOLOGY_NAME = "topology.name";
-    public final static String LOCAL_MODE = "topology.localMode";
+    public static final String PARSER_TASK_NUM = "topology.numOfParserTasks";
+    public static final String TOTAL_WORKER_NUM = "topology.numOfTotalWorkers";
+    public static final String TOPOLOGY_NAME = "topology.name";
+    public static final String LOCAL_MODE = "topology.localMode";
 
     private static final Logger LOG = LoggerFactory.getLogger(HadoopQueueRunningMain.class);
 
-    public static void main(String [] args) {
+    public static void main(String[] args) {
         //System.setProperty("config.resource", "/application.conf");
         //Config config = ConfigFactory.load();
         Config config = null;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/common/HadoopClusterConstants.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/common/HadoopClusterConstants.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/common/HadoopClusterConstants.java
index 324489d..9318a35 100644
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/common/HadoopClusterConstants.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/common/HadoopClusterConstants.java
@@ -19,7 +19,7 @@ package org.apache.eagle.hadoop.queue.common;
 
 public class HadoopClusterConstants {
 
-    public enum AggregateFunc{
+    public enum AggregateFunc {
         MAX, AVG
     }
 
@@ -31,31 +31,31 @@ public class HadoopClusterConstants {
         CLUSTER_METRIC, RUNNING_APPS, SCHEDULER
     }
 
-    public static class MetricName{
+    public static class MetricName {
 
         // Metrics from running apps
-        public final static String HADOOP_APPS_ALLOCATED_MB = "hadoop.%s.allocatedmb";
-        public final static String HADOOP_APPS_ALLOCATED_VCORES = "hadoop.%s.allocatedvcores";
-        public final static String HADOOP_APPS_RUNNING_CONTAINERS = "hadoop.%s.runningcontainers";
+        public static final String HADOOP_APPS_ALLOCATED_MB = "hadoop.%s.allocatedmb";
+        public static final String HADOOP_APPS_ALLOCATED_VCORES = "hadoop.%s.allocatedvcores";
+        public static final String HADOOP_APPS_RUNNING_CONTAINERS = "hadoop.%s.runningcontainers";
 
         // metrics from cluster metrics
-        public final static String HADOOP_CLUSTER_NUMPENDING_JOBS = "hadoop.cluster.numpendingjobs";
-        public final static String HADOOP_CLUSTER_ALLOCATED_MEMORY = "hadoop.cluster.allocatedmemory";
-        public final static String HADOOP_CLUSTER_TOTAL_MEMORY = "hadoop.cluster.totalmemory";
-        public final static String HADOOP_CLUSTER_AVAILABLE_MEMORY = "hadoop.cluster.availablememory";
-        public final static String HADOOP_CLUSTER_RESERVED_MEMORY = "hadoop.cluster.reservedmemory";
+        public static final String HADOOP_CLUSTER_NUMPENDING_JOBS = "hadoop.cluster.numpendingjobs";
+        public static final String HADOOP_CLUSTER_ALLOCATED_MEMORY = "hadoop.cluster.allocatedmemory";
+        public static final String HADOOP_CLUSTER_TOTAL_MEMORY = "hadoop.cluster.totalmemory";
+        public static final String HADOOP_CLUSTER_AVAILABLE_MEMORY = "hadoop.cluster.availablememory";
+        public static final String HADOOP_CLUSTER_RESERVED_MEMORY = "hadoop.cluster.reservedmemory";
 
         // metrics from scheduler info
-        public final static String HADOOP_CLUSTER_CAPACITY = "hadoop.cluster.capacity";
-        public final static String HADOOP_CLUSTER_USED_CAPACITY = "hadoop.cluster.usedcapacity";
+        public static final String HADOOP_CLUSTER_CAPACITY = "hadoop.cluster.capacity";
+        public static final String HADOOP_CLUSTER_USED_CAPACITY = "hadoop.cluster.usedcapacity";
 
-        public final static String HADOOP_QUEUE_NUMPENDING_JOBS = "hadoop.queue.numpendingjobs";
-        public final static String HADOOP_QUEUE_USED_CAPACITY = "hadoop.queue.usedcapacity";
-        public final static String HADOOP_QUEUE_USED_CAPACITY_RATIO = "hadoop.queue.usedcapacityratio";
+        public static final String HADOOP_QUEUE_NUMPENDING_JOBS = "hadoop.queue.numpendingjobs";
+        public static final String HADOOP_QUEUE_USED_CAPACITY = "hadoop.queue.usedcapacity";
+        public static final String HADOOP_QUEUE_USED_CAPACITY_RATIO = "hadoop.queue.usedcapacityratio";
 
-        public final static String HADOOP_USER_NUMPENDING_JOBS = "hadoop.user.numpendingjobs";
-        public final static String HADOOP_USER_USED_MEMORY = "hadoop.user.usedmemory";
-        public final static String HADOOP_USER_USED_MEMORY_RATIO = "hadoop.user.usedmemoryratio";
+        public static final String HADOOP_USER_NUMPENDING_JOBS = "hadoop.user.numpendingjobs";
+        public static final String HADOOP_USER_USED_MEMORY = "hadoop.user.usedmemory";
+        public static final String HADOOP_USER_USED_MEMORY_RATIO = "hadoop.user.usedmemoryratio";
 
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/common/HadoopYarnResourceUtils.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/common/HadoopYarnResourceUtils.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/common/HadoopYarnResourceUtils.java
index 2802449..3f6606a 100644
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/common/HadoopYarnResourceUtils.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/common/HadoopYarnResourceUtils.java
@@ -18,9 +18,9 @@
 
 package org.apache.eagle.hadoop.queue.common;
 
-import com.typesafe.config.Config;
 import org.apache.eagle.jpm.util.Constants;
 import org.apache.eagle.jpm.util.resourcefetch.connection.InputStreamUtils;
+import com.typesafe.config.Config;
 import org.codehaus.jackson.JsonParser;
 import org.codehaus.jackson.map.ObjectMapper;
 

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/common/YarnClusterResourceURLBuilder.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/common/YarnClusterResourceURLBuilder.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/common/YarnClusterResourceURLBuilder.java
index 7fd275b..c71a2b1 100644
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/common/YarnClusterResourceURLBuilder.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/common/YarnClusterResourceURLBuilder.java
@@ -20,10 +20,10 @@ package org.apache.eagle.hadoop.queue.common;
 
 public class YarnClusterResourceURLBuilder {
 
-    private final static String CLUSTER_SCHEDULER_API_URL = "ws/v1/cluster/scheduler";
-    private final static String CLUSTER_METRICS_API_URL = "ws/v1/cluster/metrics";
-    private final static String CLUSTER_APPS_API_URL = "ws/v1/cluster/apps";
-    private final static String ANONYMOUS_PARAMETER = "anonymous=true";
+    private static final String CLUSTER_SCHEDULER_API_URL = "ws/v1/cluster/scheduler";
+    private static final String CLUSTER_METRICS_API_URL = "ws/v1/cluster/metrics";
+    private static final String CLUSTER_APPS_API_URL = "ws/v1/cluster/apps";
+    private static final String ANONYMOUS_PARAMETER = "anonymous=true";
 
     public static String buildSchedulerInfoURL(String urlBase) {
         return urlBase + CLUSTER_SCHEDULER_API_URL + "?" + ANONYMOUS_PARAMETER;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/ClusterMetricsCrawler.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/ClusterMetricsCrawler.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/ClusterMetricsCrawler.java
index 5cf44dc..ac2f0f5 100644
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/ClusterMetricsCrawler.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/ClusterMetricsCrawler.java
@@ -18,11 +18,11 @@
 
 package org.apache.eagle.hadoop.queue.crawler;
 
-import backtype.storm.spout.SpoutOutputCollector;
 import org.apache.eagle.hadoop.queue.common.HadoopYarnResourceUtils;
 import org.apache.eagle.hadoop.queue.common.YarnClusterResourceURLBuilder;
-import org.apache.eagle.hadoop.queue.model.clusterMetrics.ClusterMetrics;
-import org.apache.eagle.hadoop.queue.model.clusterMetrics.ClusterMetricsWrapper;
+import org.apache.eagle.hadoop.queue.model.cluster.ClusterMetrics;
+import org.apache.eagle.hadoop.queue.model.cluster.ClusterMetricsWrapper;
+import backtype.storm.spout.SpoutOutputCollector;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -30,36 +30,36 @@ import java.io.IOException;
 
 public class ClusterMetricsCrawler implements Runnable {
 
-	private final static Logger logger = LoggerFactory.getLogger(ClusterMetricsCrawler.class);
-	private ClusterMetricsParseListener listener;
-	private String urlString;
+    private static final Logger logger = LoggerFactory.getLogger(ClusterMetricsCrawler.class);
+    private ClusterMetricsParseListener listener;
+    private String urlString;
 
-	public ClusterMetricsCrawler(String site, String urlBase, SpoutOutputCollector collector) {
-		listener = new ClusterMetricsParseListener(site, collector);
-		urlString = YarnClusterResourceURLBuilder.buildClusterMetricsURL(urlBase);
-	}
+    public ClusterMetricsCrawler(String site, String urlBase, SpoutOutputCollector collector) {
+        listener = new ClusterMetricsParseListener(site, collector);
+        urlString = YarnClusterResourceURLBuilder.buildClusterMetricsURL(urlBase);
+    }
 
-	@Override
-	public void run() {
-		try {
-			logger.info("Start to crawl cluster metrics from " + this.urlString);
-			ClusterMetricsWrapper metricsWrapper = (ClusterMetricsWrapper) HadoopYarnResourceUtils.getObjectFromStreamWithGzip(urlString, ClusterMetricsWrapper.class);
-			ClusterMetrics metrics = metricsWrapper.getClusterMetrics();
-			if(metrics == null) {
-				logger.error("Failed to crawl cluster metrics");
-			} else {
-				long currentTimestamp = System.currentTimeMillis();
-				listener.onMetric(metrics, currentTimestamp);
-			}
-		} catch (IOException e) {
-			logger.error(e.getMessage());
-			if(logger.isDebugEnabled()) {
-				logger.trace(e.getMessage(), e);
-			}
-		} catch (Exception e) {
-			logger.error(e.getMessage(), e);
-		} finally {
-			listener.flush();
-		}
-	}
+    @Override
+    public void run() {
+        try {
+            logger.info("Start to crawl cluster metrics from " + this.urlString);
+            ClusterMetricsWrapper metricsWrapper = (ClusterMetricsWrapper) HadoopYarnResourceUtils.getObjectFromStreamWithGzip(urlString, ClusterMetricsWrapper.class);
+            ClusterMetrics metrics = metricsWrapper.getClusterMetrics();
+            if (metrics == null) {
+                logger.error("Failed to crawl cluster metrics");
+            } else {
+                long currentTimestamp = System.currentTimeMillis();
+                listener.onMetric(metrics, currentTimestamp);
+            }
+        } catch (IOException e) {
+            logger.error(e.getMessage());
+            if (logger.isDebugEnabled()) {
+                logger.trace(e.getMessage(), e);
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        } finally {
+            listener.flush();
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/ClusterMetricsParseListener.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/ClusterMetricsParseListener.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/ClusterMetricsParseListener.java
index 693fdd1..de93d1e 100644
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/ClusterMetricsParseListener.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/ClusterMetricsParseListener.java
@@ -21,135 +21,137 @@
  */
 package org.apache.eagle.hadoop.queue.crawler;
 
-import backtype.storm.spout.SpoutOutputCollector;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.eagle.common.DateTimeUtil;
 import org.apache.eagle.dataproc.impl.storm.ValuesArray;
-import org.apache.eagle.hadoop.queue.model.clusterMetrics.ClusterMetrics;
-import org.apache.eagle.hadoop.queue.storm.HadoopQueueMessageId;
 import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants;
-import org.apache.eagle.log.entity.GenericMetricEntity;
+import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants.AggregateFunc;
 import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants.DataSource;
 import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants.DataType;
 import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants.MetricName;
-import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants.AggregateFunc;
+import org.apache.eagle.hadoop.queue.model.cluster.ClusterMetrics;
+import org.apache.eagle.hadoop.queue.storm.HadoopQueueMessageId;
+import org.apache.eagle.log.entity.GenericMetricEntity;
 
+import backtype.storm.spout.SpoutOutputCollector;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
 import java.util.*;
 import java.util.stream.Collectors;
 
 public class ClusterMetricsParseListener {
 
-	private String site;
-	private SpoutOutputCollector collector;
-
-	private long maxTimestamp;
-	private Map<MetricKey, GenericMetricEntity> clusterMetricEntities = new HashMap<>();
-	private Map<MetricKey, Integer> clusterMetricCounts = new HashMap<>();
-
-	private final static long AGGREGATE_INTERVAL = DateTimeUtil.ONEMINUTE;
-	private final static long HOLD_TIME_WINDOW = 2 * DateTimeUtil.ONEMINUTE;
-
-	public ClusterMetricsParseListener(String site, SpoutOutputCollector collector){
-		reset();
-		this.site = site;
-		this.collector = collector;
-	}
-
-	private void createMetric(String metricName, long timestamp, double value, HadoopClusterConstants.AggregateFunc aggFunc){
-		if (timestamp > maxTimestamp) {
-			maxTimestamp = timestamp;
-		}
-		timestamp = timestamp / AGGREGATE_INTERVAL * AGGREGATE_INTERVAL;
-		MetricKey key = new MetricKey(metricName, timestamp);
-		GenericMetricEntity entity = clusterMetricEntities.get(key);
-		if (entity == null) {
-			entity = new GenericMetricEntity();
-			entity.setTags(buildMetricTags());
-			entity.setTimestamp(timestamp);
-			entity.setPrefix(metricName);
-			entity.setValue(new double[]{0.0});
-			clusterMetricEntities.put(key, entity);
-		}
-		if (clusterMetricCounts.get(key) == null){
-			clusterMetricCounts.put(key, 0);
-		}
-		updateEntityAggValue(entity, aggFunc, value, clusterMetricCounts.get(key));
-		clusterMetricCounts.put(key, clusterMetricCounts.get(key) + 1);
-	}
-
-	public void onMetric(ClusterMetrics metrics, long currentTimestamp) {
-		createMetric(MetricName.HADOOP_CLUSTER_NUMPENDING_JOBS, currentTimestamp, metrics.getAppsPending(), AggregateFunc.MAX);
-		createMetric(MetricName.HADOOP_CLUSTER_ALLOCATED_MEMORY, currentTimestamp, metrics.getAllocatedMB(), AggregateFunc.AVG);
-		createMetric(MetricName.HADOOP_CLUSTER_TOTAL_MEMORY, currentTimestamp, metrics.getTotalMB(), AggregateFunc.MAX);
-		createMetric(MetricName.HADOOP_CLUSTER_AVAILABLE_MEMORY, currentTimestamp, metrics.getAvailableMB(), AggregateFunc.AVG);
-		createMetric(MetricName.HADOOP_CLUSTER_RESERVED_MEMORY, currentTimestamp, metrics.getReservedMB(), AggregateFunc.AVG);
-	}
-
-	public void flush() {
-		HadoopQueueMessageId messageId = new HadoopQueueMessageId(DataType.METRIC, DataSource.CLUSTER_METRIC, System.currentTimeMillis());
-		List<GenericMetricEntity> metrics = new ArrayList<>(clusterMetricEntities.values());
-		this.collector.emit(new ValuesArray(DataType.METRIC.name(), metrics), messageId);
-		reset();
-	}
-
-	private void reset() {
-		maxTimestamp = 0;
-		clearOldCache();
-	}
-
-	private void clearOldCache() {
-		List<MetricKey> removedkeys = clusterMetricEntities.keySet().stream().filter(key -> key.createTime < maxTimestamp - HOLD_TIME_WINDOW).collect(Collectors.toList());
-
-		for (MetricKey key : removedkeys) {
-			clusterMetricEntities.remove(key);
-		}
-	}
-
-	private Map<String, String> buildMetricTags(){
-		Map<String,String> tags = new HashMap<String, String>();
-		tags.put(HadoopClusterConstants.TAG_SITE, site);
-		return tags;
-	}
-
-	private void updateEntityAggValue(GenericMetricEntity entity,
-									  HadoopClusterConstants.AggregateFunc aggFunc,
-									  double value,
-									  double count) {
-		double lastValue = entity.getValue()[0];
-		switch (aggFunc){
-			case MAX:
-				entity.setValue(new double[]{Math.max(lastValue, value)});
-				return;
-			case AVG:
-				long avgValue = (long) ((lastValue * count + value) / (count +1));
-				entity.setValue(new double[]{avgValue});
-				return;
-		}
-	}
-
-	private class MetricKey {
-		String metricName;
-		Long createTime;
-
-		public MetricKey(String metricName, Long timestamp) {
-			this.metricName = metricName;
-			this.createTime = timestamp;
-		}
-
-		public boolean equals(Object obj) {
-			if (obj instanceof MetricKey) {
-				MetricKey key = (MetricKey) obj;
-				if (key == null) {
-					return false;
-				}
-				return Objects.equals(metricName, key.metricName) & Objects.equals(createTime, key.createTime);
-			}
-			return false;
-		}
-
-		public int hashCode() {
-			return new HashCodeBuilder().append(metricName).append(createTime).toHashCode();
-		}
-
-	}
+    private String site;
+    private SpoutOutputCollector collector;
+
+    private long maxTimestamp;
+    private Map<MetricKey, GenericMetricEntity> clusterMetricEntities = new HashMap<>();
+    private Map<MetricKey, Integer> clusterMetricCounts = new HashMap<>();
+
+    private static final long AGGREGATE_INTERVAL = DateTimeUtil.ONEMINUTE;
+    private static final long HOLD_TIME_WINDOW = 2 * DateTimeUtil.ONEMINUTE;
+
+    public ClusterMetricsParseListener(String site, SpoutOutputCollector collector) {
+        reset();
+        this.site = site;
+        this.collector = collector;
+    }
+
+    private void createMetric(String metricName, long timestamp, double value, HadoopClusterConstants.AggregateFunc aggFunc) {
+        if (timestamp > maxTimestamp) {
+            maxTimestamp = timestamp;
+        }
+        timestamp = timestamp / AGGREGATE_INTERVAL * AGGREGATE_INTERVAL;
+        MetricKey key = new MetricKey(metricName, timestamp);
+        GenericMetricEntity entity = clusterMetricEntities.get(key);
+        if (entity == null) {
+            entity = new GenericMetricEntity();
+            entity.setTags(buildMetricTags());
+            entity.setTimestamp(timestamp);
+            entity.setPrefix(metricName);
+            entity.setValue(new double[] {0.0});
+            clusterMetricEntities.put(key, entity);
+        }
+        if (clusterMetricCounts.get(key) == null) {
+            clusterMetricCounts.put(key, 0);
+        }
+        updateEntityAggValue(entity, aggFunc, value, clusterMetricCounts.get(key));
+        clusterMetricCounts.put(key, clusterMetricCounts.get(key) + 1);
+    }
+
+    public void onMetric(ClusterMetrics metrics, long currentTimestamp) {
+        createMetric(MetricName.HADOOP_CLUSTER_NUMPENDING_JOBS, currentTimestamp, metrics.getAppsPending(), AggregateFunc.MAX);
+        createMetric(MetricName.HADOOP_CLUSTER_ALLOCATED_MEMORY, currentTimestamp, metrics.getAllocatedMB(), AggregateFunc.AVG);
+        createMetric(MetricName.HADOOP_CLUSTER_TOTAL_MEMORY, currentTimestamp, metrics.getTotalMB(), AggregateFunc.MAX);
+        createMetric(MetricName.HADOOP_CLUSTER_AVAILABLE_MEMORY, currentTimestamp, metrics.getAvailableMB(), AggregateFunc.AVG);
+        createMetric(MetricName.HADOOP_CLUSTER_RESERVED_MEMORY, currentTimestamp, metrics.getReservedMB(), AggregateFunc.AVG);
+    }
+
+    public void flush() {
+        HadoopQueueMessageId messageId = new HadoopQueueMessageId(DataType.METRIC, DataSource.CLUSTER_METRIC, System.currentTimeMillis());
+        List<GenericMetricEntity> metrics = new ArrayList<>(clusterMetricEntities.values());
+        this.collector.emit(new ValuesArray(DataType.METRIC.name(), metrics), messageId);
+        reset();
+    }
+
+    private void reset() {
+        maxTimestamp = 0;
+        clearOldCache();
+    }
+
+    private void clearOldCache() {
+        List<MetricKey> removedkeys = clusterMetricEntities.keySet().stream().filter(key -> key.createTime < maxTimestamp - HOLD_TIME_WINDOW).collect(Collectors.toList());
+
+        for (MetricKey key : removedkeys) {
+            clusterMetricEntities.remove(key);
+        }
+    }
+
+    private Map<String, String> buildMetricTags() {
+        Map<String, String> tags = new HashMap<String, String>();
+        tags.put(HadoopClusterConstants.TAG_SITE, site);
+        return tags;
+    }
+
+    private void updateEntityAggValue(GenericMetricEntity entity,
+                                      HadoopClusterConstants.AggregateFunc aggFunc,
+                                      double value,
+                                      double count) {
+        double lastValue = entity.getValue()[0];
+        switch (aggFunc) {
+            case MAX:
+                entity.setValue(new double[] {Math.max(lastValue, value)});
+                return;
+            case AVG:
+                long avgValue = (long) ((lastValue * count + value) / (count + 1));
+                entity.setValue(new double[] {avgValue});
+                return;
+            default:
+                throw new IllegalArgumentException("Illegal aggregation function: " + aggFunc);
+        }
+    }
+
+    private class MetricKey {
+        String metricName;
+        Long createTime;
+
+        public MetricKey(String metricName, Long timestamp) {
+            this.metricName = metricName;
+            this.createTime = timestamp;
+        }
+
+        public boolean equals(Object obj) {
+            if (obj instanceof MetricKey) {
+                MetricKey key = (MetricKey) obj;
+                if (key == null) {
+                    return false;
+                }
+                return Objects.equals(metricName, key.metricName) & Objects.equals(createTime, key.createTime);
+            }
+            return false;
+        }
+
+        public int hashCode() {
+            return new HashCodeBuilder().append(metricName).append(createTime).toHashCode();
+        }
+
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/RunningAppParseListener.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/RunningAppParseListener.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/RunningAppParseListener.java
index ab56508..364a1a7 100755
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/RunningAppParseListener.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/RunningAppParseListener.java
@@ -21,103 +21,106 @@
  */
 package org.apache.eagle.hadoop.queue.crawler;
 
-import backtype.storm.spout.SpoutOutputCollector;
 import org.apache.eagle.common.DateTimeUtil;
 import org.apache.eagle.dataproc.impl.storm.ValuesArray;
+import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants;
+import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants.MetricName;
 import org.apache.eagle.hadoop.queue.model.applications.App;
 import org.apache.eagle.hadoop.queue.model.applications.Apps;
 import org.apache.eagle.hadoop.queue.storm.HadoopQueueMessageId;
 import org.apache.eagle.log.entity.GenericMetricEntity;
-import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants.MetricName;
-import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants;
-
+import backtype.storm.spout.SpoutOutputCollector;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.lang.reflect.Method;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
 public class RunningAppParseListener {
 
-	private final static Logger logger = LoggerFactory.getLogger(RunningAppParseListener.class);
-	private final static long AGGREGATE_INTERVAL = DateTimeUtil.ONEMINUTE;
+    private static final Logger logger = LoggerFactory.getLogger(RunningAppParseListener.class);
+    private static final long AGGREGATE_INTERVAL = DateTimeUtil.ONEMINUTE;
 
-	@SuppressWarnings("serial")
-	public static HashMap<String, String> metrics = new HashMap<String, String>() {
-		{
-			put(MetricName.HADOOP_APPS_ALLOCATED_MB, "getAllocatedMB");
-		 	put(MetricName.HADOOP_APPS_ALLOCATED_VCORES, "getAllocatedVCores");
-		 	put(MetricName.HADOOP_APPS_RUNNING_CONTAINERS, "getRunningContainers");
-		}
-	};
+    @SuppressWarnings("serial")
+    public static HashMap<String, String> metrics = new HashMap<String, String>() {
+        {
+            put(MetricName.HADOOP_APPS_ALLOCATED_MB, "getAllocatedMB");
+            put(MetricName.HADOOP_APPS_ALLOCATED_VCORES, "getAllocatedVCores");
+            put(MetricName.HADOOP_APPS_RUNNING_CONTAINERS, "getRunningContainers");
+        }
+    };
 
-	private String site;
-	private SpoutOutputCollector collector;
-	private Map<String, GenericMetricEntity> appMetricEntities = new HashMap<>();
+    private String site;
+    private SpoutOutputCollector collector;
+    private Map<String, GenericMetricEntity> appMetricEntities = new HashMap<>();
 
-	public RunningAppParseListener(String site, SpoutOutputCollector collector){
-		this.site = site;
-		this.collector = collector;
-	}
+    public RunningAppParseListener(String site, SpoutOutputCollector collector) {
+        this.site = site;
+        this.collector = collector;
+    }
 
-	public void flush() {
-		logger.info("start sending app metrics, size: " + appMetricEntities.size());
-		HadoopQueueMessageId messageId = new HadoopQueueMessageId(HadoopClusterConstants.DataType.METRIC, HadoopClusterConstants.DataSource.RUNNING_APPS, System.currentTimeMillis());
-		List<GenericMetricEntity> metrics = new ArrayList<>(appMetricEntities.values());
-		collector.emit(new ValuesArray(HadoopClusterConstants.DataType.METRIC.name(), metrics), messageId);
-		appMetricEntities.clear();
-	}
+    public void flush() {
+        logger.info("start sending app metrics, size: " + appMetricEntities.size());
+        HadoopQueueMessageId messageId = new HadoopQueueMessageId(HadoopClusterConstants.DataType.METRIC, HadoopClusterConstants.DataSource.RUNNING_APPS, System.currentTimeMillis());
+        List<GenericMetricEntity> metrics = new ArrayList<>(appMetricEntities.values());
+        collector.emit(new ValuesArray(HadoopClusterConstants.DataType.METRIC.name(), metrics), messageId);
+        appMetricEntities.clear();
+    }
 
-	private Map<String, String> buildMetricTags(AggLevel level, Map<String, String> tags){
-		Map<String, String> newTags = new HashMap<String, String>();
-		newTags.put(HadoopClusterConstants.TAG_SITE, site);
-		tags.entrySet().stream().filter(entry -> level.level.equalsIgnoreCase(entry.getKey())).forEach(entry -> {
-			newTags.put(entry.getKey(), entry.getValue());
-		});
-		return newTags;
-	}
+    private Map<String, String> buildMetricTags(AggLevel level, Map<String, String> tags) {
+        Map<String, String> newTags = new HashMap<String, String>();
+        newTags.put(HadoopClusterConstants.TAG_SITE, site);
+        tags.entrySet().stream().filter(entry -> level.level.equalsIgnoreCase(entry.getKey())).forEach(entry -> {
+            newTags.put(entry.getKey(), entry.getValue());
+        });
+        return newTags;
+    }
 
-	private void createMetric(String metricName, Map<String, String> tags, long timestamp, int value) {
-		String key = metricName + tags.toString() + " " + timestamp;
-		GenericMetricEntity entity = appMetricEntities.get(key);
-		if (entity == null) {
-			entity = new GenericMetricEntity();
-			entity.setTags(tags);
-			entity.setTimestamp(timestamp);
-			entity.setPrefix(metricName);
-			entity.setValue(new double[]{0.0});
-			appMetricEntities.put(key, entity);
-		}
-		double lastValue = entity.getValue()[0];
-		entity.setValue(new double[]{lastValue + value});
-	}
+    private void createMetric(String metricName, Map<String, String> tags, long timestamp, int value) {
+        String key = metricName + tags.toString() + " " + timestamp;
+        GenericMetricEntity entity = appMetricEntities.get(key);
+        if (entity == null) {
+            entity = new GenericMetricEntity();
+            entity.setTags(tags);
+            entity.setTimestamp(timestamp);
+            entity.setPrefix(metricName);
+            entity.setValue(new double[] {0.0});
+            appMetricEntities.put(key, entity);
+        }
+        double lastValue = entity.getValue()[0];
+        entity.setValue(new double[] {lastValue + value});
+    }
 
-	public void onMetric(Apps apps, long timestamp) throws Exception {
-		timestamp = timestamp / AGGREGATE_INTERVAL * AGGREGATE_INTERVAL;
-		for (App app : apps.getApp()) {
-			Map<String, String> tags = new HashMap<>();
-			tags.put(HadoopClusterConstants.TAG_USER, app.getUser());
-			tags.put(HadoopClusterConstants.TAG_QUEUE, app.getQueue());
-			for (AggLevel level : AggLevel.values()) {
-				Map<String, String> newTags = buildMetricTags(level, tags);
-				for (java.util.Map.Entry<String, String> entry : metrics.entrySet()) {
-					Method method = App.class.getMethod(entry.getValue());
-					Integer value = (Integer) method.invoke(app);
-					String metricName = String.format(entry.getKey(), level.name);
-					createMetric(metricName, newTags, timestamp, value);
-				}
-			}
-		}
-	}
+    public void onMetric(Apps apps, long timestamp) throws Exception {
+        timestamp = timestamp / AGGREGATE_INTERVAL * AGGREGATE_INTERVAL;
+        for (App app : apps.getApp()) {
+            Map<String, String> tags = new HashMap<>();
+            tags.put(HadoopClusterConstants.TAG_USER, app.getUser());
+            tags.put(HadoopClusterConstants.TAG_QUEUE, app.getQueue());
+            for (AggLevel level : AggLevel.values()) {
+                Map<String, String> newTags = buildMetricTags(level, tags);
+                for (java.util.Map.Entry<String, String> entry : metrics.entrySet()) {
+                    Method method = App.class.getMethod(entry.getValue());
+                    Integer value = (Integer) method.invoke(app);
+                    String metricName = String.format(entry.getKey(), level.name);
+                    createMetric(metricName, newTags, timestamp, value);
+                }
+            }
+        }
+    }
 
     private enum AggLevel {
         CLUSTER(HadoopClusterConstants.TAG_CLUSTER, ""),
-		QUEUE(HadoopClusterConstants.TAG_QUEUE, HadoopClusterConstants.TAG_QUEUE),
-		USER(HadoopClusterConstants.TAG_USER, HadoopClusterConstants.TAG_USER);
+        QUEUE(HadoopClusterConstants.TAG_QUEUE, HadoopClusterConstants.TAG_QUEUE),
+        USER(HadoopClusterConstants.TAG_USER, HadoopClusterConstants.TAG_USER);
 
         private String name;
         private String level;
-		AggLevel(String name, String level) {
+
+        AggLevel(String name, String level) {
             this.name = name;
             this.level = level;
         }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/RunningAppsCrawler.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/RunningAppsCrawler.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/RunningAppsCrawler.java
index 55ba41b..3ffd371 100755
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/RunningAppsCrawler.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/RunningAppsCrawler.java
@@ -18,11 +18,11 @@
 
 package org.apache.eagle.hadoop.queue.crawler;
 
-import backtype.storm.spout.SpoutOutputCollector;
-import org.apache.eagle.hadoop.queue.common.YarnClusterResourceURLBuilder;
-import org.apache.eagle.hadoop.queue.model.applications.Apps;
 import org.apache.eagle.hadoop.queue.common.HadoopYarnResourceUtils;
+import org.apache.eagle.hadoop.queue.common.YarnClusterResourceURLBuilder;
 import org.apache.eagle.hadoop.queue.model.applications.AppsWrapper;
+
+import backtype.storm.spout.SpoutOutputCollector;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -30,37 +30,37 @@ import java.io.IOException;
 
 public class RunningAppsCrawler implements Runnable {
 
-	private final static Logger logger = LoggerFactory.getLogger(RunningAppsCrawler.class);
+    private static final Logger logger = LoggerFactory.getLogger(RunningAppsCrawler.class);
 
-	private RunningAppParseListener listener;
-	private String urlString;
+    private RunningAppParseListener listener;
+    private String urlString;
 
-	public RunningAppsCrawler(String site, String baseUrl, SpoutOutputCollector collector){
-		this.urlString = YarnClusterResourceURLBuilder.buildRunningAppsURL(baseUrl);
-		//this.urlString = YarnClusterResourceURLBuilder.buildFinishedAppsURL(baseUrl);
-		listener = new RunningAppParseListener(site, collector);
-	}
+    public RunningAppsCrawler(String site, String baseUrl, SpoutOutputCollector collector) {
+        this.urlString = YarnClusterResourceURLBuilder.buildRunningAppsURL(baseUrl);
+        //this.urlString = YarnClusterResourceURLBuilder.buildFinishedAppsURL(baseUrl);
+        listener = new RunningAppParseListener(site, collector);
+    }
 
-	@Override
-	public void run() {
-		try {
-			logger.info("Start to crawl app metrics from " + this.urlString);
-			AppsWrapper appsWrapper = (AppsWrapper) HadoopYarnResourceUtils.getObjectFromStreamWithGzip(urlString, AppsWrapper.class);
-			if(appsWrapper == null || appsWrapper.getApps() == null) {
-				logger.error("Failed to crawl running applications with api = " + urlString);
-			} else {
-				long currentTimestamp = System.currentTimeMillis();
-				listener.onMetric(appsWrapper.getApps(), currentTimestamp);
-			}
-		} catch (IOException e) {
-			logger.error(e.getMessage());
-			if(logger.isDebugEnabled()) {
-				logger.trace(e.getMessage(), e);
-			}
-		} catch (Exception e) {
-			logger.error(e.getMessage(), e);
-		} finally {
-			listener.flush();
-		}
-	}
+    @Override
+    public void run() {
+        try {
+            logger.info("Start to crawl app metrics from " + this.urlString);
+            AppsWrapper appsWrapper = (AppsWrapper) HadoopYarnResourceUtils.getObjectFromStreamWithGzip(urlString, AppsWrapper.class);
+            if (appsWrapper == null || appsWrapper.getApps() == null) {
+                logger.error("Failed to crawl running applications with api = " + urlString);
+            } else {
+                long currentTimestamp = System.currentTimeMillis();
+                listener.onMetric(appsWrapper.getApps(), currentTimestamp);
+            }
+        } catch (IOException e) {
+            logger.error(e.getMessage());
+            if (logger.isDebugEnabled()) {
+                logger.trace(e.getMessage(), e);
+            }
+        } catch (Exception e) {
+            logger.error(e.getMessage(), e);
+        } finally {
+            listener.flush();
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/SchedulerInfoCrawler.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/SchedulerInfoCrawler.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/SchedulerInfoCrawler.java
index f7cd5b4..16e644a 100755
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/SchedulerInfoCrawler.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/SchedulerInfoCrawler.java
@@ -18,10 +18,11 @@
 
 package org.apache.eagle.hadoop.queue.crawler;
 
-import backtype.storm.spout.SpoutOutputCollector;
-import org.apache.eagle.hadoop.queue.common.YarnClusterResourceURLBuilder;
-import org.apache.eagle.hadoop.queue.model.scheduler.*;
 import org.apache.eagle.hadoop.queue.common.HadoopYarnResourceUtils;
+import org.apache.eagle.hadoop.queue.common.YarnClusterResourceURLBuilder;
+import org.apache.eagle.hadoop.queue.model.scheduler.SchedulerInfo;
+import org.apache.eagle.hadoop.queue.model.scheduler.SchedulerWrapper;
+import backtype.storm.spout.SpoutOutputCollector;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -29,38 +30,38 @@ import java.io.IOException;
 
 public class SchedulerInfoCrawler implements Runnable {
 
-	private final static Logger logger = LoggerFactory.getLogger(SchedulerInfoCrawler.class);
+    private static final Logger logger = LoggerFactory.getLogger(SchedulerInfoCrawler.class);
 
-	private SchedulerInfoParseListener listener;
+    private SchedulerInfoParseListener listener;
     private String urlString;
 
-	public SchedulerInfoCrawler(String site, String baseUrl, SpoutOutputCollector collector) {
-		this.urlString = YarnClusterResourceURLBuilder.buildSchedulerInfoURL(baseUrl);
-		this.listener = new SchedulerInfoParseListener(site, collector);
-	}
+    public SchedulerInfoCrawler(String site, String baseUrl, SpoutOutputCollector collector) {
+        this.urlString = YarnClusterResourceURLBuilder.buildSchedulerInfoURL(baseUrl);
+        this.listener = new SchedulerInfoParseListener(site, collector);
+    }
 
-	@Override
-	public void run() {
-		try {
-			//https://some.server.address:50030/ws/v1/cluster/scheduler?anonymous=true
-			logger.info("Start to crawl cluster scheduler queues from " + this.urlString);
-			SchedulerWrapper schedulerWrapper = (SchedulerWrapper) HadoopYarnResourceUtils.getObjectFromStreamWithGzip(urlString, SchedulerWrapper.class);
-			if (schedulerWrapper == null || schedulerWrapper.getScheduler() == null) {
-				logger.error("Failed to crawl scheduler info with url = " + this.urlString);
-			} else {
-				SchedulerInfo scheduler = schedulerWrapper.getScheduler().getSchedulerInfo();
-				logger.info("Crawled " + scheduler.getQueues().getQueue().size() + " queues");
-				long currentTimestamp = System.currentTimeMillis();
-				listener.onMetric(scheduler, currentTimestamp);
-			}
-		} catch (IOException e) {
-			logger.error("Got IO exception while connecting to "+this.urlString + " : "+ e.getMessage());
-		} catch (Exception e) {
-			logger.error("Got exception while crawling queues:" + e.getMessage(), e);
-		} catch (Throwable e) {
-			logger.error("Got throwable exception while crawling queues:" + e.getMessage(), e);
-		} finally {
-			listener.flush();
-		}
-	}
+    @Override
+    public void run() {
+        try {
+            //https://some.server.address:50030/ws/v1/cluster/scheduler?anonymous=true
+            logger.info("Start to crawl cluster scheduler queues from " + this.urlString);
+            SchedulerWrapper schedulerWrapper = (SchedulerWrapper) HadoopYarnResourceUtils.getObjectFromStreamWithGzip(urlString, SchedulerWrapper.class);
+            if (schedulerWrapper == null || schedulerWrapper.getScheduler() == null) {
+                logger.error("Failed to crawl scheduler info with url = " + this.urlString);
+            } else {
+                SchedulerInfo scheduler = schedulerWrapper.getScheduler().getSchedulerInfo();
+                logger.info("Crawled " + scheduler.getQueues().getQueue().size() + " queues");
+                long currentTimestamp = System.currentTimeMillis();
+                listener.onMetric(scheduler, currentTimestamp);
+            }
+        } catch (IOException e) {
+            logger.error("Got IO exception while connecting to " + this.urlString + " : " + e.getMessage());
+        } catch (Exception e) {
+            logger.error("Got exception while crawling queues:" + e.getMessage(), e);
+        } catch (Throwable e) {
+            logger.error("Got throwable exception while crawling queues:" + e.getMessage(), e);
+        } finally {
+            listener.flush();
+        }
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/SchedulerInfoParseListener.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/SchedulerInfoParseListener.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/SchedulerInfoParseListener.java
index 82f433e..214e7f6 100644
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/SchedulerInfoParseListener.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/crawler/SchedulerInfoParseListener.java
@@ -18,23 +18,26 @@
 
 package org.apache.eagle.hadoop.queue.crawler;
 
-import backtype.storm.spout.SpoutOutputCollector;
 import org.apache.eagle.dataproc.impl.storm.ValuesArray;
 import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants;
 import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants.MetricName;
 import org.apache.eagle.hadoop.queue.model.scheduler.*;
-import org.apache.eagle.hadoop.queue.model.scheduler.Queue;
 import org.apache.eagle.hadoop.queue.storm.HadoopQueueMessageId;
 import org.apache.eagle.log.entity.GenericMetricEntity;
+
+import backtype.storm.spout.SpoutOutputCollector;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
 
 public class SchedulerInfoParseListener {
 
-    private final static Logger LOG = LoggerFactory.getLogger(SchedulerInfoParseListener.class);
+    private static final Logger LOG = LoggerFactory.getLogger(SchedulerInfoParseListener.class);
     //private final static long AGGREGATE_INTERVAL = DateTimeUtil.ONEMINUTE;
     //private int MAX_CACHE_COUNT = 1000;
 
@@ -50,10 +53,10 @@ public class SchedulerInfoParseListener {
     }
 
     public void onMetric(SchedulerInfo scheduler, long currentTimestamp) throws Exception {
-        Map<String,String> tags = buildMetricTags(null, null);
+        Map<String, String> tags = buildMetricTags(null, null);
         createMetric(MetricName.HADOOP_CLUSTER_CAPACITY, tags, currentTimestamp, scheduler.getCapacity());
         createMetric(MetricName.HADOOP_CLUSTER_USED_CAPACITY, tags, currentTimestamp, scheduler.getUsedCapacity());
-        for(Queue queue : scheduler.getQueues().getQueue()) {
+        for (Queue queue : scheduler.getQueues().getQueue()) {
             createQueues(queue, currentTimestamp, scheduler, null);
         }
     }
@@ -74,7 +77,7 @@ public class SchedulerInfoParseListener {
     }
 
     private Map<String, String> buildMetricTags(String queueName, String parentQueueName) {
-        Map<String,String> tags = new HashMap<>();
+        Map<String, String> tags = new HashMap<>();
         tags.put(HadoopClusterConstants.TAG_SITE, this.site);
         if (queueName != null) {
             tags.put(HadoopClusterConstants.TAG_QUEUE, queueName);
@@ -85,12 +88,12 @@ public class SchedulerInfoParseListener {
         return tags;
     }
 
-    private void createMetric(String metricName, Map<String,String> tags,long timestamp, double value) throws Exception {
+    private void createMetric(String metricName, Map<String, String> tags, long timestamp, double value) throws Exception {
         GenericMetricEntity e = new GenericMetricEntity();
         e.setPrefix(metricName);
         e.setTimestamp(timestamp);
         e.setTags(tags);
-        e.setValue(new double[]{value});
+        e.setValue(new double[] {value});
         this.metricEntities.add(e);
     }
 
@@ -111,7 +114,7 @@ public class SchedulerInfoParseListener {
         _entity.setTimestamp(currentTimestamp);
 
         List<UserWrapper> userList = new ArrayList<>();
-        if (queue.getUsers() != null && queue.getUsers().getUser() != null )  {
+        if (queue.getUsers() != null && queue.getUsers().getUser() != null) {
             for (User user : queue.getUsers().getUser()) {
                 UserWrapper newUser = new UserWrapper(user);
                 userList.add(newUser);
@@ -122,20 +125,21 @@ public class SchedulerInfoParseListener {
         runningQueueAPIEntities.add(_entity);
 
         createMetric(MetricName.HADOOP_QUEUE_NUMPENDING_JOBS, _tags, currentTimestamp, queue.getNumPendingApplications());
-        createMetric(MetricName.HADOOP_QUEUE_USED_CAPACITY,_tags,currentTimestamp,queue.getAbsoluteUsedCapacity());
-        if(queue.getAbsoluteCapacity() == 0) {
-            createMetric(MetricName.HADOOP_QUEUE_USED_CAPACITY_RATIO,_tags,currentTimestamp,0);
+        createMetric(MetricName.HADOOP_QUEUE_USED_CAPACITY, _tags, currentTimestamp, queue.getAbsoluteUsedCapacity());
+        if (queue.getAbsoluteCapacity() == 0) {
+            createMetric(MetricName.HADOOP_QUEUE_USED_CAPACITY_RATIO, _tags, currentTimestamp, 0);
         } else {
-            createMetric(MetricName.HADOOP_QUEUE_USED_CAPACITY_RATIO,_tags,currentTimestamp,queue.getAbsoluteUsedCapacity()/queue.getAbsoluteCapacity());
+            createMetric(MetricName.HADOOP_QUEUE_USED_CAPACITY_RATIO, _tags, currentTimestamp, queue.getAbsoluteUsedCapacity() / queue.getAbsoluteCapacity());
         }
 
-        if (queue.getUsers() != null && queue.getUsers().getUser() != null )  {
+        if (queue.getUsers() != null && queue.getUsers().getUser() != null) {
             for (User user : queue.getUsers().getUser()) {
                 Map<String, String> userTags = new HashMap<>(_tags);
                 userTags.put(HadoopClusterConstants.TAG_USER, user.getUsername());
-                createMetric(HadoopClusterConstants.MetricName.HADOOP_USER_NUMPENDING_JOBS,userTags,currentTimestamp,user.getNumPendingApplications());
-                createMetric(HadoopClusterConstants.MetricName.HADOOP_USER_USED_MEMORY,userTags,currentTimestamp,user.getResourcesUsed().getMemory());
-                createMetric(HadoopClusterConstants.MetricName.HADOOP_USER_USED_MEMORY_RATIO,userTags,currentTimestamp,((double)user.getResourcesUsed().getMemory()) / queue.getResourcesUsed().getMemory());
+                createMetric(HadoopClusterConstants.MetricName.HADOOP_USER_NUMPENDING_JOBS, userTags, currentTimestamp, user.getNumPendingApplications());
+                createMetric(HadoopClusterConstants.MetricName.HADOOP_USER_USED_MEMORY, userTags, currentTimestamp, user.getResourcesUsed().getMemory());
+                createMetric(HadoopClusterConstants.MetricName.HADOOP_USER_USED_MEMORY_RATIO, userTags, currentTimestamp,
+                    ((double) user.getResourcesUsed().getMemory()) / queue.getResourcesUsed().getMemory());
             }
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/exceptions/HadoopQueueFetcherException.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/exceptions/HadoopQueueFetcherException.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/exceptions/HadoopQueueFetcherException.java
index 0bce87d..416bbf3 100644
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/exceptions/HadoopQueueFetcherException.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/exceptions/HadoopQueueFetcherException.java
@@ -20,17 +20,17 @@ package org.apache.eagle.hadoop.queue.exceptions;
 
 public class HadoopQueueFetcherException extends Exception {
 
-	private static final long serialVersionUID = -2425311876734366496L;
+    private static final long serialVersionUID = -2425311876734366496L;
 
-	/**
-	 * Default constructor of FeederException
-	 */
+    /**
+     * Default constructor of FeederException.
+     */
     public HadoopQueueFetcherException() {
         super();
     }
 
     /**
-     * Constructor of FeederException
+     * Constructor of FeederException.
      *
      * @param message error message
      */
@@ -39,18 +39,17 @@ public class HadoopQueueFetcherException extends Exception {
     }
 
     /**
-     * Constructor of FeederException
+     * Constructor of FeederException.
      *
      * @param message error message
-     * @param cause the cause of the exception
-     *
+     * @param cause   the cause of the exception
      */
     public HadoopQueueFetcherException(String message, Throwable cause) {
         super(message, cause);
     }
 
     /**
-     * Constructor of FeederException
+     * Constructor of FeederException.
      *
      * @param cause the cause of the exception
      */

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/HadoopQueueEntityRepository.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/HadoopQueueEntityRepository.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/HadoopQueueEntityRepository.java
index 728c81e..f598779 100644
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/HadoopQueueEntityRepository.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/HadoopQueueEntityRepository.java
@@ -21,7 +21,7 @@ import org.apache.eagle.hadoop.queue.model.scheduler.RunningQueueAPIEntity;
 import org.apache.eagle.log.entity.repo.EntityRepository;
 
 public class HadoopQueueEntityRepository extends EntityRepository {
-	public HadoopQueueEntityRepository() {
-		this.registerEntity(RunningQueueAPIEntity.class);
-	}
+    public HadoopQueueEntityRepository() {
+        this.registerEntity(RunningQueueAPIEntity.class);
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/applications/App.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/applications/App.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/applications/App.java
index e0a2b61..b741086 100755
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/applications/App.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/applications/App.java
@@ -17,206 +17,206 @@
  */
 
 /**
- * 
+ *
  */
 package org.apache.eagle.hadoop.queue.model.applications;
 
 import org.codehaus.jackson.annotate.JsonIgnoreProperties;
 import org.codehaus.jackson.map.annotate.JsonSerialize;
 
-/*
- *	App model for Yarn Resource http://<rm http address:port>/ws/v1/cluster/apps
+/**
+ * App model for Yarn Resource http://[rm http address:port]/ws/v1/cluster/apps.
  */
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
 @JsonIgnoreProperties(ignoreUnknown = true)
 public class App {
-	private String id;
-	private String user;
-	private String name;
-	private String queue;
-	private String state;
-	private String finalStatus;
-	private double progress;
-	private String trackingUI;
-	private String trackingUrl;
-	private String diagnostics;
-	private long clusterId;
-	private String applicationType;
-	private String applicationTags;
-	private long startedTime;
-	private long finishedTime;
-	private long elapsedTime;
-	private String amContainerLogs;
-	private String amHostHttpAddress;
-	private int allocatedMB;
-	private int allocatedVCores;
-	private int runningContainers;
-
-	public String getId() {
-		return id;
-	}
-	
-	public void setId(String id) {
-		this.id = id;
-	}
-	
-	public String getUser() {
-		return user;
-	}
-
-	public void setUser(String user) {
-		this.user = user;
-	}
-
-	public String getName() {
-		return name;
-	}
-
-	public void setName(String name) {
-		this.name = name;
-	}
-
-	public String getQueue() {
-		return queue;
-	}
-
-	public void setQueue(String queue) {
-		this.queue = queue;
-	}
-
-	public String getState() {
-		return state;
-	}
-
-	public void setState(String state) {
-		this.state = state;
-	}
-
-	public String getFinalStatus() {
-		return finalStatus;
-	}
-
-	public void setFinalStatus(String finalStatus) {
-		this.finalStatus = finalStatus;
-	}
-
-	public double getProgress() {
-		return progress;
-	}
-
-	public void setProgress(double progress) {
-		this.progress = progress;
-	}
-
-	public String getTrackingUI() {
-		return trackingUI;
-	}
-
-	public void setTrackingUI(String trackingUI) {
-		this.trackingUI = trackingUI;
-	}
-
-	public String getTrackingUrl() {
-		return trackingUrl;
-	}
-
-	public void setTrackingUrl(String trackingUrl) {
-		this.trackingUrl = trackingUrl;
-	}
-
-	public String getDiagnostics() {
-		return diagnostics;
-	}
-
-	public void setDiagnostics(String diagnostics) {
-		this.diagnostics = diagnostics;
-	}
-
-	public long getClusterId() {
-		return clusterId;
-	}
-
-	public void setClusterId(long clusterId) {
-		this.clusterId = clusterId;
-	}
-
-	public long getStartedTime() {
-		return startedTime;
-	}
-
-	public void setStartedTime(long startedTime) {
-		this.startedTime = startedTime;
-	}
-
-	public long getFinishedTime() {
-		return finishedTime;
-	}
-
-	public void setFinishedTime(long finishedTime) {
-		this.finishedTime = finishedTime;
-	}
-
-	public long getElapsedTime() {
-		return elapsedTime;
-	}
-
-	public void setElapsedTime(long elapsedTime) {
-		this.elapsedTime = elapsedTime;
-	}
-	
-	public String getApplicationType() {
-		return applicationType;
-	}
-
-	public void setApplicationType(String applicationType) {
-		this.applicationType = applicationType;
-	}
-
-	public String getApplicationTags() {
-		return applicationTags;
-	}
-
-	public void setApplicationTags(String applicationTags) {
-		this.applicationTags = applicationTags;
-	}
-
-	public String getAmContainerLogs() {
-		return amContainerLogs;
-	}
-
-	public void setAmContainerLogs(String amContainerLogs) {
-		this.amContainerLogs = amContainerLogs;
-	}
-
-	public String getAmHostHttpAddress() {
-		return amHostHttpAddress;
-	}
-
-	public void setAmHostHttpAddress(String amHostHttpAddress) {
-		this.amHostHttpAddress = amHostHttpAddress;
-	}
-
-	public int getAllocatedMB() {
-		return allocatedMB;
-	}
-
-	public void setAllocatedMB(int allocatedMB) {
-		this.allocatedMB = allocatedMB;
-	}
-
-	public int getAllocatedVCores() {
-		return allocatedVCores;
-	}
-
-	public void setAllocatedVCores(int allocatedVCores) {
-		this.allocatedVCores = allocatedVCores;
-	}
-	
-	public int getRunningContainers() {
-		return runningContainers;
-	}
-
-	public void setRunningContainers(int runningContainers) {
-		this.runningContainers = runningContainers;
-	}
+    private String id;
+    private String user;
+    private String name;
+    private String queue;
+    private String state;
+    private String finalStatus;
+    private double progress;
+    private String trackingUI;
+    private String trackingUrl;
+    private String diagnostics;
+    private long clusterId;
+    private String applicationType;
+    private String applicationTags;
+    private long startedTime;
+    private long finishedTime;
+    private long elapsedTime;
+    private String amContainerLogs;
+    private String amHostHttpAddress;
+    private int allocatedMB;
+    private int allocatedVCores;
+    private int runningContainers;
+
+    public String getId() {
+        return id;
+    }
+
+    public void setId(String id) {
+        this.id = id;
+    }
+
+    public String getUser() {
+        return user;
+    }
+
+    public void setUser(String user) {
+        this.user = user;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public String getQueue() {
+        return queue;
+    }
+
+    public void setQueue(String queue) {
+        this.queue = queue;
+    }
+
+    public String getState() {
+        return state;
+    }
+
+    public void setState(String state) {
+        this.state = state;
+    }
+
+    public String getFinalStatus() {
+        return finalStatus;
+    }
+
+    public void setFinalStatus(String finalStatus) {
+        this.finalStatus = finalStatus;
+    }
+
+    public double getProgress() {
+        return progress;
+    }
+
+    public void setProgress(double progress) {
+        this.progress = progress;
+    }
+
+    public String getTrackingUI() {
+        return trackingUI;
+    }
+
+    public void setTrackingUI(String trackingUI) {
+        this.trackingUI = trackingUI;
+    }
+
+    public String getTrackingUrl() {
+        return trackingUrl;
+    }
+
+    public void setTrackingUrl(String trackingUrl) {
+        this.trackingUrl = trackingUrl;
+    }
+
+    public String getDiagnostics() {
+        return diagnostics;
+    }
+
+    public void setDiagnostics(String diagnostics) {
+        this.diagnostics = diagnostics;
+    }
+
+    public long getClusterId() {
+        return clusterId;
+    }
+
+    public void setClusterId(long clusterId) {
+        this.clusterId = clusterId;
+    }
+
+    public long getStartedTime() {
+        return startedTime;
+    }
+
+    public void setStartedTime(long startedTime) {
+        this.startedTime = startedTime;
+    }
+
+    public long getFinishedTime() {
+        return finishedTime;
+    }
+
+    public void setFinishedTime(long finishedTime) {
+        this.finishedTime = finishedTime;
+    }
+
+    public long getElapsedTime() {
+        return elapsedTime;
+    }
+
+    public void setElapsedTime(long elapsedTime) {
+        this.elapsedTime = elapsedTime;
+    }
+
+    public String getApplicationType() {
+        return applicationType;
+    }
+
+    public void setApplicationType(String applicationType) {
+        this.applicationType = applicationType;
+    }
+
+    public String getApplicationTags() {
+        return applicationTags;
+    }
+
+    public void setApplicationTags(String applicationTags) {
+        this.applicationTags = applicationTags;
+    }
+
+    public String getAmContainerLogs() {
+        return amContainerLogs;
+    }
+
+    public void setAmContainerLogs(String amContainerLogs) {
+        this.amContainerLogs = amContainerLogs;
+    }
+
+    public String getAmHostHttpAddress() {
+        return amHostHttpAddress;
+    }
+
+    public void setAmHostHttpAddress(String amHostHttpAddress) {
+        this.amHostHttpAddress = amHostHttpAddress;
+    }
+
+    public int getAllocatedMB() {
+        return allocatedMB;
+    }
+
+    public void setAllocatedMB(int allocatedMB) {
+        this.allocatedMB = allocatedMB;
+    }
+
+    public int getAllocatedVCores() {
+        return allocatedVCores;
+    }
+
+    public void setAllocatedVCores(int allocatedVCores) {
+        this.allocatedVCores = allocatedVCores;
+    }
+
+    public int getRunningContainers() {
+        return runningContainers;
+    }
+
+    public void setRunningContainers(int runningContainers) {
+        this.runningContainers = runningContainers;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/applications/Apps.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/applications/Apps.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/applications/Apps.java
index 649f17b..ecd1836 100755
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/applications/Apps.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/applications/Apps.java
@@ -17,7 +17,7 @@
  */
 
 /**
- * 
+ *
  */
 package org.apache.eagle.hadoop.queue.model.applications;
 
@@ -26,16 +26,16 @@ import org.codehaus.jackson.map.annotate.JsonSerialize;
 
 import java.util.List;
 
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
 @JsonIgnoreProperties(ignoreUnknown = true)
 public class Apps {
-	private List<App> app;
+    private List<App> app;
 
-	public List<App> getApp() {
-		return app;
-	}
+    public List<App> getApp() {
+        return app;
+    }
 
-	public void setApp(List<App> app) {
-		this.app = app;
-	}
+    public void setApp(List<App> app) {
+        this.app = app;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/applications/AppsWrapper.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/applications/AppsWrapper.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/applications/AppsWrapper.java
index c204638..5584226 100755
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/applications/AppsWrapper.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/applications/AppsWrapper.java
@@ -21,16 +21,16 @@ package org.apache.eagle.hadoop.queue.model.applications;
 import org.codehaus.jackson.annotate.JsonIgnoreProperties;
 import org.codehaus.jackson.map.annotate.JsonSerialize;
 
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
 @JsonIgnoreProperties(ignoreUnknown = true)
 public class AppsWrapper {
-	private Apps apps;
+    private Apps apps;
 
-	public Apps getApps() {
-		return apps;
-	}
+    public Apps getApps() {
+        return apps;
+    }
 
-	public void setApps(Apps apps) {
-		this.apps = apps;
-	}
+    public void setApps(Apps apps) {
+        this.apps = apps;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/cluster/ClusterMetrics.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/cluster/ClusterMetrics.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/cluster/ClusterMetrics.java
new file mode 100644
index 0000000..8bf8917
--- /dev/null
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/cluster/ClusterMetrics.java
@@ -0,0 +1,198 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *
+ */
+
+package org.apache.eagle.hadoop.queue.model.cluster;
+
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class ClusterMetrics {
+    private int appsSubmitted;
+    private int appsCompleted;
+    private int appsPending;
+    private int appsRunning;
+    private int appsFailed;
+    private int appsKilled;
+    private long reservedMB;
+    private long availableMB;
+    private long allocatedMB;
+    private int containersAllocated;
+    private int containersReserved;
+    private int containersPending;
+    private long totalMB;
+    private int totalNodes;
+    private int lostNodes;
+    private int unhealthyNodes;
+    private int decommissionedNodes;
+    private int rebootedNodes;
+    private int activeNodes;
+
+    public int getAppsSubmitted() {
+        return appsSubmitted;
+    }
+
+    public void setAppsSubmitted(int appsSubmitted) {
+        this.appsSubmitted = appsSubmitted;
+    }
+
+    public int getAppsCompleted() {
+        return appsCompleted;
+    }
+
+    public void setAppsCompleted(int appsCompleted) {
+        this.appsCompleted = appsCompleted;
+    }
+
+    public int getAppsPending() {
+        return appsPending;
+    }
+
+    public void setAppsPending(int appsPending) {
+        this.appsPending = appsPending;
+    }
+
+    public int getAppsRunning() {
+        return appsRunning;
+    }
+
+    public void setAppsRunning(int appsRunning) {
+        this.appsRunning = appsRunning;
+    }
+
+    public int getAppsFailed() {
+        return appsFailed;
+    }
+
+    public void setAppsFailed(int appsFailed) {
+        this.appsFailed = appsFailed;
+    }
+
+    public int getAppsKilled() {
+        return appsKilled;
+    }
+
+    public void setAppsKilled(int appsKilled) {
+        this.appsKilled = appsKilled;
+    }
+
+    public long getReservedMB() {
+        return reservedMB;
+    }
+
+    public void setReservedMB(long reservedMB) {
+        this.reservedMB = reservedMB;
+    }
+
+    public long getAvailableMB() {
+        return availableMB;
+    }
+
+    public void setAvailableMB(long availableMB) {
+        this.availableMB = availableMB;
+    }
+
+    public long getAllocatedMB() {
+        return allocatedMB;
+    }
+
+    public void setAllocatedMB(long allocatedMB) {
+        this.allocatedMB = allocatedMB;
+    }
+
+    public int getContainersAllocated() {
+        return containersAllocated;
+    }
+
+    public void setContainersAllocated(int containersAllocated) {
+        this.containersAllocated = containersAllocated;
+    }
+
+    public int getContainersReserved() {
+        return containersReserved;
+    }
+
+    public void setContainersReserved(int containersReserved) {
+        this.containersReserved = containersReserved;
+    }
+
+    public int getContainersPending() {
+        return containersPending;
+    }
+
+    public void setContainersPending(int containersPending) {
+        this.containersPending = containersPending;
+    }
+
+    public long getTotalMB() {
+        return totalMB;
+    }
+
+    public void setTotalMB(long totalMB) {
+        this.totalMB = totalMB;
+    }
+
+    public int getTotalNodes() {
+        return totalNodes;
+    }
+
+    public void setTotalNodes(int totalNodes) {
+        this.totalNodes = totalNodes;
+    }
+
+    public int getLostNodes() {
+        return lostNodes;
+    }
+
+    public void setLostNodes(int lostNodes) {
+        this.lostNodes = lostNodes;
+    }
+
+    public int getUnhealthyNodes() {
+        return unhealthyNodes;
+    }
+
+    public void setUnhealthyNodes(int unhealthyNodes) {
+        this.unhealthyNodes = unhealthyNodes;
+    }
+
+    public int getDecommissionedNodes() {
+        return decommissionedNodes;
+    }
+
+    public void setDecommissionedNodes(int decommissionedNodes) {
+        this.decommissionedNodes = decommissionedNodes;
+    }
+
+    public int getRebootedNodes() {
+        return rebootedNodes;
+    }
+
+    public void setRebootedNodes(int rebootedNodes) {
+        this.rebootedNodes = rebootedNodes;
+    }
+
+    public int getActiveNodes() {
+        return activeNodes;
+    }
+
+    public void setActiveNodes(int activeNodes) {
+        this.activeNodes = activeNodes;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/cluster/ClusterMetricsWrapper.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/cluster/ClusterMetricsWrapper.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/cluster/ClusterMetricsWrapper.java
new file mode 100644
index 0000000..bbefd99
--- /dev/null
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/cluster/ClusterMetricsWrapper.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *
+ */
+
+package org.apache.eagle.hadoop.queue.model.cluster;
+
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class ClusterMetricsWrapper {
+    public ClusterMetrics getClusterMetrics() {
+        return clusterMetrics;
+    }
+
+    public void setClusterMetrics(ClusterMetrics clusterMetrics) {
+        this.clusterMetrics = clusterMetrics;
+    }
+
+    private ClusterMetrics clusterMetrics;
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/clusterMetrics/ClusterMetrics.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/clusterMetrics/ClusterMetrics.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/clusterMetrics/ClusterMetrics.java
deleted file mode 100644
index b254ebf..0000000
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/clusterMetrics/ClusterMetrics.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *
- */
-
-package org.apache.eagle.hadoop.queue.model.clusterMetrics;
-
-import org.codehaus.jackson.annotate.JsonIgnoreProperties;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
-@JsonIgnoreProperties(ignoreUnknown = true)
-public class ClusterMetrics {
-	private int appsSubmitted;
-	private int appsCompleted;
-	private int appsPending;
-	private int appsRunning;
-	private int appsFailed;
-	private int appsKilled;
-	private long reservedMB;
-	private long availableMB;
-	private long allocatedMB;
-	private int containersAllocated;
-	private int containersReserved;
-	private int containersPending;
-	private long totalMB;
-	private int totalNodes;
-	private int lostNodes;
-	private int unhealthyNodes;
-	private int decommissionedNodes;
-	private int rebootedNodes;
-	private int activeNodes;
-
-	public int getAppsSubmitted() {
-		return appsSubmitted;
-	}
-
-	public void setAppsSubmitted(int appsSubmitted) {
-		this.appsSubmitted = appsSubmitted;
-	}
-
-	public int getAppsCompleted() {
-		return appsCompleted;
-	}
-
-	public void setAppsCompleted(int appsCompleted) {
-		this.appsCompleted = appsCompleted;
-	}
-
-	public int getAppsPending() {
-		return appsPending;
-	}
-
-	public void setAppsPending(int appsPending) {
-		this.appsPending = appsPending;
-	}
-
-	public int getAppsRunning() {
-		return appsRunning;
-	}
-
-	public void setAppsRunning(int appsRunning) {
-		this.appsRunning = appsRunning;
-	}
-
-	public int getAppsFailed() {
-		return appsFailed;
-	}
-
-	public void setAppsFailed(int appsFailed) {
-		this.appsFailed = appsFailed;
-	}
-
-	public int getAppsKilled() {
-		return appsKilled;
-	}
-
-	public void setAppsKilled(int appsKilled) {
-		this.appsKilled = appsKilled;
-	}
-
-	public long getReservedMB() {
-		return reservedMB;
-	}
-
-	public void setReservedMB(long reservedMB) {
-		this.reservedMB = reservedMB;
-	}
-
-	public long getAvailableMB() {
-		return availableMB;
-	}
-
-	public void setAvailableMB(long availableMB) {
-		this.availableMB = availableMB;
-	}
-
-	public long getAllocatedMB() {
-		return allocatedMB;
-	}
-
-	public void setAllocatedMB(long allocatedMB) {
-		this.allocatedMB = allocatedMB;
-	}
-
-	public int getContainersAllocated() {
-		return containersAllocated;
-	}
-
-	public void setContainersAllocated(int containersAllocated) {
-		this.containersAllocated = containersAllocated;
-	}
-
-	public int getContainersReserved() {
-		return containersReserved;
-	}
-
-	public void setContainersReserved(int containersReserved) {
-		this.containersReserved = containersReserved;
-	}
-
-	public int getContainersPending() {
-		return containersPending;
-	}
-
-	public void setContainersPending(int containersPending) {
-		this.containersPending = containersPending;
-	}
-
-	public long getTotalMB() {
-		return totalMB;
-	}
-
-	public void setTotalMB(long totalMB) {
-		this.totalMB = totalMB;
-	}
-
-	public int getTotalNodes() {
-		return totalNodes;
-	}
-
-	public void setTotalNodes(int totalNodes) {
-		this.totalNodes = totalNodes;
-	}
-
-	public int getLostNodes() {
-		return lostNodes;
-	}
-
-	public void setLostNodes(int lostNodes) {
-		this.lostNodes = lostNodes;
-	}
-
-	public int getUnhealthyNodes() {
-		return unhealthyNodes;
-	}
-
-	public void setUnhealthyNodes(int unhealthyNodes) {
-		this.unhealthyNodes = unhealthyNodes;
-	}
-
-	public int getDecommissionedNodes() {
-		return decommissionedNodes;
-	}
-
-	public void setDecommissionedNodes(int decommissionedNodes) {
-		this.decommissionedNodes = decommissionedNodes;
-	}
-
-	public int getRebootedNodes() {
-		return rebootedNodes;
-	}
-
-	public void setRebootedNodes(int rebootedNodes) {
-		this.rebootedNodes = rebootedNodes;
-	}
-
-	public int getActiveNodes() {
-		return activeNodes;
-	}
-
-	public void setActiveNodes(int activeNodes) {
-		this.activeNodes = activeNodes;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/clusterMetrics/ClusterMetricsWrapper.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/clusterMetrics/ClusterMetricsWrapper.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/clusterMetrics/ClusterMetricsWrapper.java
deleted file mode 100644
index a9d1a9a..0000000
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/clusterMetrics/ClusterMetricsWrapper.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *
- */
-
-package org.apache.eagle.hadoop.queue.model.clusterMetrics;
-
-import org.codehaus.jackson.annotate.JsonIgnoreProperties;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
-@JsonIgnoreProperties(ignoreUnknown = true)
-public class ClusterMetricsWrapper {
-	public ClusterMetrics getClusterMetrics() {
-		return clusterMetrics;
-	}
-
-	public void setClusterMetrics(ClusterMetrics clusterMetrics) {
-		this.clusterMetrics = clusterMetrics;
-	}
-
-	private ClusterMetrics clusterMetrics;
-
-}


Mime
View raw message