eagle-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From h..@apache.org
Subject [1/2] incubator-eagle git commit: [EAGLE-532] Fix checkstyle on eagle-jpm and enable failOnViolation
Date Thu, 08 Sep 2016 07:28:20 GMT
Repository: incubator-eagle
Updated Branches:
  refs/heads/master a0fc87084 -> ecbdec8ef


http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/Queue.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/Queue.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/Queue.java
index ace5879..550c9d5 100644
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/Queue.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/Queue.java
@@ -21,204 +21,209 @@ package org.apache.eagle.hadoop.queue.model.scheduler;
 import org.codehaus.jackson.annotate.JsonIgnoreProperties;
 import org.codehaus.jackson.map.annotate.JsonSerialize;
 
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
 @JsonIgnoreProperties(ignoreUnknown = true)
 public class Queue {
-	private String type;
-	private double capacity;
-	private double usedCapacity;
-	private double maxCapacity;
-	private double absoluteCapacity;
-	private double absoluteMaxCapacity;
-	private double absoluteUsedCapacity;
-
-	private ResourcesUsed resourcesUsed;
-	private String usedResources;
-	private String queueName;
-	private String state;
-	private Users users;
-
-	private int numApplications;
-	private int numPendingApplications;
-	private int numContainers;
-	private int maxApplications;
-	private int maxApplicationsPerUser;
-	private int maxActiveApplications;
-	private int maxActiveApplicationsPerUser;
-	private int userLimit;
-	private int userLimitFactor;
-	private Queues queues;
-
-	public String getUsedResources() {
-		return usedResources;
-	}
-
-	public void setUsedResources(String usedResources) {
-		this.usedResources = usedResources;
-	}
-
-	public int getMaxActiveApplicationsPerUser() {
-		return maxActiveApplicationsPerUser;
-	}
-
-	public void setMaxActiveApplicationsPerUser(int maxActiveApplicationsPerUser) {
-		this.maxActiveApplicationsPerUser = maxActiveApplicationsPerUser;
-	}
-
-	public int getNumPendingApplications() {
-		return numPendingApplications;
-	}
-
-	public void setNumPendingApplications(int numPendingApplications) {
-		this.numPendingApplications = numPendingApplications;
-	}
-
-	public int getNumContainers() {
-		return numContainers;
-	}
-
-	public void setNumContainers(int numContainers) {
-		this.numContainers = numContainers;
-	}
-
-	public int getMaxApplications() {
-		return maxApplications;
-	}
-
-	public void setMaxApplications(int maxApplications) {
-		this.maxApplications = maxApplications;
-	}
-
-	public int getMaxApplicationsPerUser() {
-		return maxApplicationsPerUser;
-	}
-
-	public void setMaxApplicationsPerUser(int maxApplicationsPerUser) {
-		this.maxApplicationsPerUser = maxApplicationsPerUser;
-	}
-
-	public int getMaxActiveApplications() {
-		return maxActiveApplications;
-	}
-
-	public void setMaxActiveApplications(int maxActiveApplications) {
-		this.maxActiveApplications = maxActiveApplications;
-	}
-
-	public int getUserLimit() {
-		return userLimit;
-	}
-
-	public void setUserLimit(int userLimit) {
-		this.userLimit = userLimit;
-	}
-
-	public int getUserLimitFactor() {
-		return userLimitFactor;
-	}
-
-	public void setUserLimitFactor(int userLimitFactor) {
-		this.userLimitFactor = userLimitFactor;
-	}
-
-
-	public String getType() {
-		return type;
-	}
-
-	public void setType(String type) {
-		this.type = type;
-	}
-	public ResourcesUsed getResourcesUsed() {
-		return resourcesUsed;
-	}
-
-	public void setResourcesUsed(ResourcesUsed resourcesUsed) {
-		this.resourcesUsed = resourcesUsed;
-	}
+    private String type;
+    private double capacity;
+    private double usedCapacity;
+    private double maxCapacity;
+    private double absoluteCapacity;
+    private double absoluteMaxCapacity;
+    private double absoluteUsedCapacity;
+
+    private ResourcesUsed resourcesUsed;
+    private String usedResources;
+    private String queueName;
+    private String state;
+    private Users users;
+
+    private int numApplications;
+    private int numPendingApplications;
+    private int numContainers;
+    private int maxApplications;
+    private int maxApplicationsPerUser;
+    private int maxActiveApplications;
+    private int maxActiveApplicationsPerUser;
+    private int userLimit;
+    private int userLimitFactor;
+    private Queues queues;
+
+    public String getUsedResources() {
+        return usedResources;
+    }
+
+    public void setUsedResources(String usedResources) {
+        this.usedResources = usedResources;
+    }
+
+    public int getMaxActiveApplicationsPerUser() {
+        return maxActiveApplicationsPerUser;
+    }
+
+    public void setMaxActiveApplicationsPerUser(int maxActiveApplicationsPerUser) {
+        this.maxActiveApplicationsPerUser = maxActiveApplicationsPerUser;
+    }
+
+    public int getNumPendingApplications() {
+        return numPendingApplications;
+    }
+
+    public void setNumPendingApplications(int numPendingApplications) {
+        this.numPendingApplications = numPendingApplications;
+    }
+
+    public int getNumContainers() {
+        return numContainers;
+    }
+
+    public void setNumContainers(int numContainers) {
+        this.numContainers = numContainers;
+    }
+
+    public int getMaxApplications() {
+        return maxApplications;
+    }
+
+    public void setMaxApplications(int maxApplications) {
+        this.maxApplications = maxApplications;
+    }
+
+    public int getMaxApplicationsPerUser() {
+        return maxApplicationsPerUser;
+    }
+
+    public void setMaxApplicationsPerUser(int maxApplicationsPerUser) {
+        this.maxApplicationsPerUser = maxApplicationsPerUser;
+    }
+
+    public int getMaxActiveApplications() {
+        return maxActiveApplications;
+    }
+
+    public void setMaxActiveApplications(int maxActiveApplications) {
+        this.maxActiveApplications = maxActiveApplications;
+    }
+
+    public int getUserLimit() {
+        return userLimit;
+    }
+
+    public void setUserLimit(int userLimit) {
+        this.userLimit = userLimit;
+    }
+
+    public int getUserLimitFactor() {
+        return userLimitFactor;
+    }
+
+    public void setUserLimitFactor(int userLimitFactor) {
+        this.userLimitFactor = userLimitFactor;
+    }
+
+
+    public String getType() {
+        return type;
+    }
+
+    public void setType(String type) {
+        this.type = type;
+    }
+
+    public ResourcesUsed getResourcesUsed() {
+        return resourcesUsed;
+    }
+
+    public void setResourcesUsed(ResourcesUsed resourcesUsed) {
+        this.resourcesUsed = resourcesUsed;
+    }
 
 
-	public Users getUsers() { return users; }
+    public Users getUsers() {
+        return users;
+    }
+
+    public void setUsers(Users users) {
+        this.users = users;
+    }
 
-	public void setUsers(Users users) { this.users = users; }
+    public double getAbsoluteUsedCapacity() {
+        return absoluteUsedCapacity;
+    }
 
-	public double getAbsoluteUsedCapacity() {
-		return absoluteUsedCapacity;
-	}
+    public void setAbsoluteUsedCapacity(double absoluteUsedCapacity) {
+        this.absoluteUsedCapacity = absoluteUsedCapacity;
+    }
 
-	public void setAbsoluteUsedCapacity(double absoluteUsedCapacity) {
-		this.absoluteUsedCapacity = absoluteUsedCapacity;
-	}
+    public double getCapacity() {
+        return capacity;
+    }
 
-	public double getCapacity() {
-		return capacity;
-	}
+    public void setCapacity(double capacity) {
+        this.capacity = capacity;
+    }
 
-	public void setCapacity(double capacity) {
-		this.capacity = capacity;
-	}
+    public double getUsedCapacity() {
+        return usedCapacity;
+    }
 
-	public double getUsedCapacity() {
-		return usedCapacity;
-	}
+    public void setUsedCapacity(double usedCapacity) {
+        this.usedCapacity = usedCapacity;
+    }
 
-	public void setUsedCapacity(double usedCapacity) {
-		this.usedCapacity = usedCapacity;
-	}
+    public double getMaxCapacity() {
+        return maxCapacity;
+    }
 
-	public double getMaxCapacity() {
-		return maxCapacity;
-	}
+    public void setMaxCapacity(double maxCapacity) {
+        this.maxCapacity = maxCapacity;
+    }
 
-	public void setMaxCapacity(double maxCapacity) {
-		this.maxCapacity = maxCapacity;
-	}
+    public double getAbsoluteCapacity() {
+        return absoluteCapacity;
+    }
 
-	public double getAbsoluteCapacity() {
-		return absoluteCapacity;
-	}
+    public void setAbsoluteCapacity(double absoluteCapacity) {
+        this.absoluteCapacity = absoluteCapacity;
+    }
 
-	public void setAbsoluteCapacity(double absoluteCapacity) {
-		this.absoluteCapacity = absoluteCapacity;
-	}
+    public double getAbsoluteMaxCapacity() {
+        return absoluteMaxCapacity;
+    }
 
-	public double getAbsoluteMaxCapacity() {
-		return absoluteMaxCapacity;
-	}
+    public void setAbsoluteMaxCapacity(double absoluteMaxCapacity) {
+        this.absoluteMaxCapacity = absoluteMaxCapacity;
+    }
 
-	public void setAbsoluteMaxCapacity(double absoluteMaxCapacity) {
-		this.absoluteMaxCapacity = absoluteMaxCapacity;
-	}
+    public int getNumApplications() {
+        return numApplications;
+    }
 
-	public int getNumApplications() {
-		return numApplications;
-	}
+    public void setNumApplications(int numApplications) {
+        this.numApplications = numApplications;
+    }
 
-	public void setNumApplications(int numApplications) {
-		this.numApplications = numApplications;
-	}
+    public String getQueueName() {
+        return queueName;
+    }
 
-	public String getQueueName() {
-		return queueName;
-	}
+    public void setQueueName(String queueName) {
+        this.queueName = queueName;
+    }
 
-	public void setQueueName(String queueName) {
-		this.queueName = queueName;
-	}
+    public String getState() {
+        return state;
+    }
 
-	public String getState() {
-		return state;
-	}
+    public void setState(String state) {
+        this.state = state;
+    }
 
-	public void setState(String state) {
-		this.state = state;
-	}
+    public Queues getQueues() {
+        return queues;
+    }
 
-	public Queues getQueues() {
-		return queues;
-	}
-
-	public void setQueues(Queues queues) {
-		this.queues = queues;
-	}	
+    public void setQueues(Queues queues) {
+        this.queues = queues;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/Queues.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/Queues.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/Queues.java
index 09b4eb0..fdf0f99 100644
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/Queues.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/Queues.java
@@ -23,16 +23,16 @@ import org.codehaus.jackson.map.annotate.JsonSerialize;
 
 import java.util.List;
 
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
 @JsonIgnoreProperties(ignoreUnknown = true)
 public class Queues {
-	public List<Queue> getQueue() {
-		return queue;
-	}
+    public List<Queue> getQueue() {
+        return queue;
+    }
 
-	public void setQueue(List<Queue> queue) {
-		this.queue = queue;
-	}
+    public void setQueue(List<Queue> queue) {
+        this.queue = queue;
+    }
 
-	private List<Queue> queue;
+    private List<Queue> queue;
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/ResourcesUsed.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/ResourcesUsed.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/ResourcesUsed.java
index cb80ff6..49e6a8c 100644
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/ResourcesUsed.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/ResourcesUsed.java
@@ -21,28 +21,28 @@ package org.apache.eagle.hadoop.queue.model.scheduler;
 import org.codehaus.jackson.annotate.JsonIgnoreProperties;
 import org.codehaus.jackson.map.annotate.JsonSerialize;
 
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
 @JsonIgnoreProperties(ignoreUnknown = true)
 public class ResourcesUsed {
 
-	private long memory;
+    private long memory;
 
-	private long vCores;
+    private long vCores;
 
-	public long getvCores() {
-		return vCores;
-	}
+    public long getvCores() {
+        return vCores;
+    }
 
-	public void setvCores(long vCores) {
-		this.vCores = vCores;
-	}
+    public void setvCores(long vCores) {
+        this.vCores = vCores;
+    }
 
-	public long getMemory() {
-		return memory;
-	}
+    public long getMemory() {
+        return memory;
+    }
 
-	public void setMemory(long memory) {
-		this.memory = memory;
-	}
+    public void setMemory(long memory) {
+        this.memory = memory;
+    }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/RunningQueueAPIEntity.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/RunningQueueAPIEntity.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/RunningQueueAPIEntity.java
index a9b9b3a..38e8043 100755
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/RunningQueueAPIEntity.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/RunningQueueAPIEntity.java
@@ -25,134 +25,134 @@ import org.codehaus.jackson.map.annotate.JsonSerialize;
 
 import java.util.List;
 
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
 @Table("running_queue")
 @ColumnFamily("f")
 @Prefix("rqueue")
 @Service(HadoopClusterConstants.RUNNING_QUEUE_SERVICE_NAME)
 @TimeSeries(true)
-@Partition({"site"})
+@Partition( {"site"})
 public class RunningQueueAPIEntity extends TaggedLogAPIEntity {
-	@Column("a")
-	private String state;
-	@Column("b")
-	private double absoluteCapacity;
-	@Column("c")
-	private double absoluteMaxCapacity;
-	@Column("d")
-	private double absoluteUsedCapacity;
-	@Column("e")
-	private long memory;
-	@Column("f")
-	private long vcores;
-	@Column("g")
-	private int numActiveApplications;
-	@Column("h")
-	private int numPendingApplications;
-	@Column("i")
-	private int maxActiveApplications;
-	@Column("j")
-	private String scheduler;
-	@Column("k")
-	private List<UserWrapper> users;
-
-	public String getScheduler() {
-		return scheduler;
-	}
-
-	public void setScheduler(String scheduler) {
-		this.scheduler = scheduler;
-		valueChanged("scheduler");
-	}
-
-	public int getMaxActiveApplications() {
-		return maxActiveApplications;
-	}
-
-	public void setMaxActiveApplications(int maxActiveApplications) {
-		this.maxActiveApplications = maxActiveApplications;
-		valueChanged("maxActiveApplications");
-	}
-
-	public String getState() {
-		return state;
-	}
-
-	public void setState(String state) {
-		this.state = state;
-		valueChanged("state");
-
-	}
-
-	public double getAbsoluteCapacity() {
-		return absoluteCapacity;
-	}
-
-	public void setAbsoluteCapacity(double absoluteCapacity) {
-		this.absoluteCapacity = absoluteCapacity;
-		valueChanged("absoluteCapacity");
-	}
-
-	public double getAbsoluteMaxCapacity() {
-		return absoluteMaxCapacity;
-	}
-
-	public void setAbsoluteMaxCapacity(double absoluteMaxCapacity) {
-		this.absoluteMaxCapacity = absoluteMaxCapacity;
-		valueChanged("absoluteMaxCapacity");
-	}
-
-	public double getAbsoluteUsedCapacity() {
-		return absoluteUsedCapacity;
-	}
-
-	public void setAbsoluteUsedCapacity(double absoluteUsedCapacity) {
-		this.absoluteUsedCapacity = absoluteUsedCapacity;
-		valueChanged("absoluteUsedCapacity");
-	}
-
-	public long getMemory() {
-		return memory;
-	}
-
-	public void setMemory(long memory) {
-		this.memory = memory;
-		valueChanged("memory");
-	}
-
-	public long getVcores() {
-		return vcores;
-	}
-
-	public void setVcores(long vcores) {
-		this.vcores = vcores;
-		valueChanged("vcores");
-	}
-
-	public int getNumActiveApplications() {
-		return numActiveApplications;
-	}
-
-	public void setNumActiveApplications(int numActiveApplications) {
-		this.numActiveApplications = numActiveApplications;
-		valueChanged("numActiveApplications");
-	}
-
-	public int getNumPendingApplications() {
-		return numPendingApplications;
-	}
-
-	public void setNumPendingApplications(int numPendingApplications) {
-		this.numPendingApplications = numPendingApplications;
-		valueChanged("numPendingApplications");
-	}
-
-	public List<UserWrapper> getUsers() {
-		return users;
-	}
-
-	public void setUsers(List<UserWrapper> users) {
-		this.users = users;
-		valueChanged("numPendingApplications");
-	}
+    @Column("a")
+    private String state;
+    @Column("b")
+    private double absoluteCapacity;
+    @Column("c")
+    private double absoluteMaxCapacity;
+    @Column("d")
+    private double absoluteUsedCapacity;
+    @Column("e")
+    private long memory;
+    @Column("f")
+    private long vcores;
+    @Column("g")
+    private int numActiveApplications;
+    @Column("h")
+    private int numPendingApplications;
+    @Column("i")
+    private int maxActiveApplications;
+    @Column("j")
+    private String scheduler;
+    @Column("k")
+    private List<UserWrapper> users;
+
+    public String getScheduler() {
+        return scheduler;
+    }
+
+    public void setScheduler(String scheduler) {
+        this.scheduler = scheduler;
+        valueChanged("scheduler");
+    }
+
+    public int getMaxActiveApplications() {
+        return maxActiveApplications;
+    }
+
+    public void setMaxActiveApplications(int maxActiveApplications) {
+        this.maxActiveApplications = maxActiveApplications;
+        valueChanged("maxActiveApplications");
+    }
+
+    public String getState() {
+        return state;
+    }
+
+    public void setState(String state) {
+        this.state = state;
+        valueChanged("state");
+
+    }
+
+    public double getAbsoluteCapacity() {
+        return absoluteCapacity;
+    }
+
+    public void setAbsoluteCapacity(double absoluteCapacity) {
+        this.absoluteCapacity = absoluteCapacity;
+        valueChanged("absoluteCapacity");
+    }
+
+    public double getAbsoluteMaxCapacity() {
+        return absoluteMaxCapacity;
+    }
+
+    public void setAbsoluteMaxCapacity(double absoluteMaxCapacity) {
+        this.absoluteMaxCapacity = absoluteMaxCapacity;
+        valueChanged("absoluteMaxCapacity");
+    }
+
+    public double getAbsoluteUsedCapacity() {
+        return absoluteUsedCapacity;
+    }
+
+    public void setAbsoluteUsedCapacity(double absoluteUsedCapacity) {
+        this.absoluteUsedCapacity = absoluteUsedCapacity;
+        valueChanged("absoluteUsedCapacity");
+    }
+
+    public long getMemory() {
+        return memory;
+    }
+
+    public void setMemory(long memory) {
+        this.memory = memory;
+        valueChanged("memory");
+    }
+
+    public long getVcores() {
+        return vcores;
+    }
+
+    public void setVcores(long vcores) {
+        this.vcores = vcores;
+        valueChanged("vcores");
+    }
+
+    public int getNumActiveApplications() {
+        return numActiveApplications;
+    }
+
+    public void setNumActiveApplications(int numActiveApplications) {
+        this.numActiveApplications = numActiveApplications;
+        valueChanged("numActiveApplications");
+    }
+
+    public int getNumPendingApplications() {
+        return numPendingApplications;
+    }
+
+    public void setNumPendingApplications(int numPendingApplications) {
+        this.numPendingApplications = numPendingApplications;
+        valueChanged("numPendingApplications");
+    }
+
+    public List<UserWrapper> getUsers() {
+        return users;
+    }
+
+    public void setUsers(List<UserWrapper> users) {
+        this.users = users;
+        valueChanged("numPendingApplications");
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/Scheduler.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/Scheduler.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/Scheduler.java
index 3d26d9a..09164ca 100644
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/Scheduler.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/model/scheduler/Scheduler.java
@@ -21,16 +21,16 @@ package org.apache.eagle.hadoop.queue.model.scheduler;
 import org.codehaus.jackson.annotate.JsonIgnoreProperties;
 import org.codehaus.jackson.map.annotate.JsonSerialize;
 
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
 @JsonIgnoreProperties(ignoreUnknown = true)
 public class Scheduler {
-	public SchedulerInfo getSchedulerInfo() {
-		return schedulerInfo;
-	}
+    public SchedulerInfo getSchedulerInfo() {
+        return schedulerInfo;
+    }
 
-	public void setSchedulerInfo(SchedulerInfo schedulerInfo) {
-		this.schedulerInfo = schedulerInfo;
-	}
+    public void setSchedulerInfo(SchedulerInfo schedulerInfo) {
+        this.schedulerInfo = schedulerInfo;
+    }
 
-	private SchedulerInfo schedulerInfo;
+    private SchedulerInfo schedulerInfo;
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/storm/HadoopQueueMessageId.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/storm/HadoopQueueMessageId.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/storm/HadoopQueueMessageId.java
index f4b9a90..85c82d3 100644
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/storm/HadoopQueueMessageId.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/storm/HadoopQueueMessageId.java
@@ -18,10 +18,10 @@
 
 package org.apache.eagle.hadoop.queue.storm;
 
-import com.google.common.base.Objects;
-import org.apache.commons.lang.builder.HashCodeBuilder;
 import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants.DataSource;
 import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants.DataType;
+import com.google.common.base.Objects;
+import org.apache.commons.lang.builder.HashCodeBuilder;
 
 public class HadoopQueueMessageId {
     private String dataType;
@@ -41,8 +41,8 @@ public class HadoopQueueMessageId {
         }
         final HadoopQueueMessageId other = (HadoopQueueMessageId) obj;
         return Objects.equal(this.dataType, other.dataType)
-                && Objects.equal(this.dataSource, other.dataSource)
-                && Objects.equal(this.timestamp, other.timestamp);
+            && Objects.equal(this.dataSource, other.dataSource)
+            && Objects.equal(this.timestamp, other.timestamp);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/storm/HadoopQueueMetricPersistBolt.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/storm/HadoopQueueMetricPersistBolt.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/storm/HadoopQueueMetricPersistBolt.java
index db61841..c6c204a 100644
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/storm/HadoopQueueMetricPersistBolt.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/storm/HadoopQueueMetricPersistBolt.java
@@ -34,7 +34,6 @@ import backtype.storm.tuple.Tuple;
 import com.typesafe.config.Config;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-
 import java.util.List;
 import java.util.Map;
 

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/storm/HadoopQueueRunningSpout.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/storm/HadoopQueueRunningSpout.java b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/storm/HadoopQueueRunningSpout.java
index 2fc85b6..7053a09 100644
--- a/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/storm/HadoopQueueRunningSpout.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/main/java/org/apache/eagle/hadoop/queue/storm/HadoopQueueRunningSpout.java
@@ -18,14 +18,15 @@
 
 package org.apache.eagle.hadoop.queue.storm;
 
+import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants;
+import org.apache.eagle.hadoop.queue.common.HadoopYarnResourceUtils;
+
 import backtype.storm.spout.SpoutOutputCollector;
 import backtype.storm.task.TopologyContext;
 import backtype.storm.topology.OutputFieldsDeclarer;
 import backtype.storm.topology.base.BaseRichSpout;
 import backtype.storm.tuple.Fields;
 import com.typesafe.config.Config;
-import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants;
-import org.apache.eagle.hadoop.queue.common.HadoopYarnResourceUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -33,9 +34,9 @@ import java.util.Map;
 
 public class HadoopQueueRunningSpout extends BaseRichSpout {
 
-    private final static Logger LOG = LoggerFactory.getLogger(HadoopQueueRunningSpout.class);
-    private final static String FETCH_INTERVAL_CONF = "dataSourceConfig.FetchIntervalSec";
-    private final static String DEFAULT_FETCH_INTERVAL_SECONDS = "10";
+    private static final Logger LOG = LoggerFactory.getLogger(HadoopQueueRunningSpout.class);
+    private static final String FETCH_INTERVAL_CONF = "dataSourceConfig.FetchIntervalSec";
+    private static final String DEFAULT_FETCH_INTERVAL_SECONDS = "10";
 
     private long fetchIntervalSec;
     private long lastFetchTime = 0;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-hadoop-queue/src/test/java/org/apache/eagle/hadoop/queue/TestHadoopYarnResourceUtils.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-hadoop-queue/src/test/java/org/apache/eagle/hadoop/queue/TestHadoopYarnResourceUtils.java b/eagle-jpm/eagle-hadoop-queue/src/test/java/org/apache/eagle/hadoop/queue/TestHadoopYarnResourceUtils.java
index 27bdc3b..06af0f3 100644
--- a/eagle-jpm/eagle-hadoop-queue/src/test/java/org/apache/eagle/hadoop/queue/TestHadoopYarnResourceUtils.java
+++ b/eagle-jpm/eagle-hadoop-queue/src/test/java/org/apache/eagle/hadoop/queue/TestHadoopYarnResourceUtils.java
@@ -23,7 +23,7 @@ import com.typesafe.config.ConfigFactory;
 import org.apache.eagle.hadoop.queue.common.HadoopYarnResourceUtils;
 import org.apache.eagle.hadoop.queue.common.YarnClusterResourceURLBuilder;
 import org.apache.eagle.hadoop.queue.model.applications.AppsWrapper;
-import org.apache.eagle.hadoop.queue.model.clusterMetrics.ClusterMetricsWrapper;
+import org.apache.eagle.hadoop.queue.model.cluster.ClusterMetricsWrapper;
 import org.apache.eagle.hadoop.queue.model.scheduler.SchedulerWrapper;
 import org.junit.Assert;
 import org.junit.Ignore;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-app/src/main/java/org/apache/eagle/app/jpm/JPMApplication.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-app/src/main/java/org/apache/eagle/app/jpm/JPMApplication.java b/eagle-jpm/eagle-jpm-app/src/main/java/org/apache/eagle/app/jpm/JPMApplication.java
index 68b7eff..ba5f9d9 100644
--- a/eagle-jpm/eagle-jpm-app/src/main/java/org/apache/eagle/app/jpm/JPMApplication.java
+++ b/eagle-jpm/eagle-jpm-app/src/main/java/org/apache/eagle/app/jpm/JPMApplication.java
@@ -32,6 +32,7 @@ import java.util.Arrays;
 import java.util.Map;
 
 public class JPMApplication extends StormApplication {
+
     @Override
     public StormTopology execute(Config config, StormEnvironment environment) {
         TopologyBuilder builder = new TopologyBuilder();
@@ -43,6 +44,7 @@ public class JPMApplication extends StormApplication {
 
     private class RandomEventSpout extends BaseRichSpout {
         private SpoutOutputCollector collector;
+
         @Override
         public void open(Map map, TopologyContext topologyContext, SpoutOutputCollector spoutOutputCollector) {
             collector = spoutOutputCollector;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-app/src/main/java/org/apache/eagle/app/jpm/JPMApplicationProvider.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-app/src/main/java/org/apache/eagle/app/jpm/JPMApplicationProvider.java b/eagle-jpm/eagle-jpm-app/src/main/java/org/apache/eagle/app/jpm/JPMApplicationProvider.java
index 685a104..5d2c2a9 100644
--- a/eagle-jpm/eagle-jpm-app/src/main/java/org/apache/eagle/app/jpm/JPMApplicationProvider.java
+++ b/eagle-jpm/eagle-jpm-app/src/main/java/org/apache/eagle/app/jpm/JPMApplicationProvider.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -19,7 +19,7 @@ package org.apache.eagle.app.jpm;
 import org.apache.eagle.app.spi.AbstractApplicationProvider;
 
 /**
- * Define application provider pragmatically
+ * Define application provider pragmatically.
  */
 public class JPMApplicationProvider extends AbstractApplicationProvider<JPMApplication> {
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/crawler/JHFCrawlerDriverImpl.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/crawler/JHFCrawlerDriverImpl.java b/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/crawler/JHFCrawlerDriverImpl.java
index 55ffc19..ae9df37 100644
--- a/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/crawler/JHFCrawlerDriverImpl.java
+++ b/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/crawler/JHFCrawlerDriverImpl.java
@@ -20,15 +20,12 @@ package org.apache.eagle.jpm.mr.history.crawler;
 
 import org.apache.eagle.jpm.mr.history.MRHistoryJobConfig;
 import org.apache.eagle.jpm.mr.history.metrics.JobCountMetricsGenerator;
-import org.apache.eagle.jpm.mr.history.parser.EagleJobStatus;
 import org.apache.eagle.jpm.mr.history.zkres.JobHistoryZKStateManager;
-import org.apache.eagle.jpm.mr.historyentity.JobCountEntity;
 import org.apache.eagle.jpm.util.JobIdFilter;
 import org.apache.commons.lang3.tuple.Pair;
-import org.apache.eagle.service.client.IEagleServiceClient;
-import org.apache.eagle.service.client.impl.EagleServiceClientImpl;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
 import java.util.*;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/metrics/JobCountMetricsGenerator.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/metrics/JobCountMetricsGenerator.java b/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/metrics/JobCountMetricsGenerator.java
index 642170d..a6f9d56 100644
--- a/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/metrics/JobCountMetricsGenerator.java
+++ b/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/metrics/JobCountMetricsGenerator.java
@@ -18,7 +18,6 @@
 
 package org.apache.eagle.jpm.mr.history.metrics;
 
-import org.apache.commons.lang3.tuple.Pair;
 import org.apache.eagle.jpm.mr.history.MRHistoryJobConfig;
 import org.apache.eagle.jpm.mr.history.parser.EagleJobStatus;
 import org.apache.eagle.jpm.mr.history.zkres.JobHistoryZKStateManager;
@@ -26,6 +25,7 @@ import org.apache.eagle.jpm.util.Constants;
 import org.apache.eagle.log.entity.GenericMetricEntity;
 import org.apache.eagle.service.client.IEagleServiceClient;
 import org.apache.eagle.service.client.impl.EagleServiceClientImpl;
+import org.apache.commons.lang3.tuple.Pair;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -42,7 +42,7 @@ public class JobCountMetricsGenerator {
 
     public void flush(String date, int year, int month, int day) throws Exception {
         List<Pair<String, String>> jobs = JobHistoryZKStateManager.instance().getProcessedJobs(date);
-        int total = jobs.size();
+        final int total = jobs.size();
         int fail = 0;
         for (Pair<String, String> job : jobs) {
             if (!job.getRight().equals(EagleJobStatus.SUCCEEDED.toString())) {
@@ -50,7 +50,7 @@ public class JobCountMetricsGenerator {
             }
         }
 
-        IEagleServiceClient client = new EagleServiceClientImpl(
+        final IEagleServiceClient client = new EagleServiceClientImpl(
             MRHistoryJobConfig.get().getEagleServiceConfig().eagleServiceHost,
             MRHistoryJobConfig.get().getEagleServiceConfig().eagleServicePort,
             MRHistoryJobConfig.get().getEagleServiceConfig().username,
@@ -62,7 +62,7 @@ public class JobCountMetricsGenerator {
         GenericMetricEntity metricEntity = new GenericMetricEntity();
         metricEntity.setTimestamp(cal.getTimeInMillis());
         metricEntity.setPrefix(Constants.JOB_COUNT_PER_DAY);
-        metricEntity.setValue(new double[]{total, fail});
+        metricEntity.setValue(new double[] {total, fail});
         @SuppressWarnings("serial")
         Map<String, String> baseTags = new HashMap<String, String>() {
             {

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/metrics/JobExecutionMetricsCreationListener.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/metrics/JobExecutionMetricsCreationListener.java b/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/metrics/JobExecutionMetricsCreationListener.java
index d7e8fcc..ce788a3 100644
--- a/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/metrics/JobExecutionMetricsCreationListener.java
+++ b/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/metrics/JobExecutionMetricsCreationListener.java
@@ -20,8 +20,8 @@ package org.apache.eagle.jpm.mr.history.metrics;
 
 import org.apache.eagle.jpm.mr.historyentity.JobExecutionAPIEntity;
 import org.apache.eagle.jpm.util.Constants;
-import org.apache.eagle.log.entity.GenericMetricEntity;
 import org.apache.eagle.jpm.util.metrics.AbstractMetricsCreationListener;
+import org.apache.eagle.log.entity.GenericMetricEntity;
 
 import java.util.ArrayList;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/parser/JHFMRVer2Parser.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/parser/JHFMRVer2Parser.java b/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/parser/JHFMRVer2Parser.java
index f93f942..d03356d 100644
--- a/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/parser/JHFMRVer2Parser.java
+++ b/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/parser/JHFMRVer2Parser.java
@@ -44,7 +44,7 @@ public class JHFMRVer2Parser implements JHFParserBase {
     public void parse(InputStream is) throws Exception {
         int eventCtr = 0;
         try {
-            long start = System.currentTimeMillis();
+            final long start = System.currentTimeMillis();
             DataInputStream in = new DataInputStream(is);
             String version = in.readLine();
             if (!"Avro-Json".equals(version)) {

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/storm/JobHistorySpout.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/storm/JobHistorySpout.java b/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/storm/JobHistorySpout.java
index da98e0d..8ad3284 100644
--- a/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/storm/JobHistorySpout.java
+++ b/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/storm/JobHistorySpout.java
@@ -18,7 +18,6 @@
 
 package org.apache.eagle.jpm.mr.history.storm;
 
-import com.typesafe.config.Config;
 import org.apache.eagle.jpm.mr.history.MRHistoryJobConfig;
 import org.apache.eagle.jpm.mr.history.crawler.*;
 import org.apache.eagle.jpm.mr.history.zkres.JobHistoryZKStateManager;
@@ -28,11 +27,11 @@ import org.apache.eagle.jpm.util.JobIdFilterByPartition;
 import org.apache.eagle.jpm.util.JobIdPartitioner;
 import org.apache.eagle.service.client.IEagleServiceClient;
 import org.apache.eagle.service.client.impl.EagleServiceClientImpl;
-
 import backtype.storm.spout.SpoutOutputCollector;
 import backtype.storm.task.TopologyContext;
 import backtype.storm.topology.OutputFieldsDeclarer;
 import backtype.storm.topology.base.BaseRichSpout;
+import com.typesafe.config.Config;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/zkres/JobHistoryZKStateManager.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/zkres/JobHistoryZKStateManager.java b/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/zkres/JobHistoryZKStateManager.java
index 2e64da3..78a906a 100644
--- a/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/zkres/JobHistoryZKStateManager.java
+++ b/eagle-jpm/eagle-jpm-mr-history/src/main/java/org/apache/eagle/jpm/mr/history/zkres/JobHistoryZKStateManager.java
@@ -18,9 +18,9 @@
 
 package org.apache.eagle.jpm.mr.history.zkres;
 
-import org.apache.commons.lang3.tuple.Pair;
 import org.apache.eagle.jpm.mr.history.MRHistoryJobConfig.ZKStateConfig;
 
+import org.apache.commons.lang3.tuple.Pair;
 import org.apache.curator.framework.CuratorFramework;
 import org.apache.curator.framework.CuratorFrameworkFactory;
 import org.apache.curator.framework.recipes.locks.InterProcessMutex;
@@ -37,7 +37,7 @@ import java.util.List;
 public class JobHistoryZKStateManager implements JobHistoryZKStateLCM {
     public static final Logger LOG = LoggerFactory.getLogger(JobHistoryZKStateManager.class);
     private String zkRoot;
-    private CuratorFramework _curator;
+    private CuratorFramework curator;
     public static final String ZNODE_LOCK_FOR_ENSURE_JOB_PARTITIONS = "lockForEnsureJobPartitions";
     public static final String ZNODE_FORCE_START_FROM = "forceStartFrom";
     public static final String ZNODE_PARTITIONS = "partitions";
@@ -66,23 +66,23 @@ public class JobHistoryZKStateManager implements JobHistoryZKStateLCM {
         this.zkRoot = config.zkRoot;
 
         try {
-            _curator = newCurator(config);
-            _curator.start();
+            curator = newCurator(config);
+            curator.start();
         } catch (Exception e) {
             throw new RuntimeException(e);
         }
     }
 
     public void close() {
-        _curator.close();
-        _curator = null;
+        curator.close();
+        curator = null;
     }
 
     private String readForceStartFrom() {
         String path = zkRoot + "/" + ZNODE_FORCE_START_FROM;
         try {
-            if (_curator.checkExists().forPath(path) != null) {
-                return new String(_curator.getData().forPath(path), "UTF-8");
+            if (curator.checkExists().forPath(path) != null) {
+                return new String(curator.getData().forPath(path), "UTF-8");
             }
         } catch (Exception ex) {
             LOG.error("fail reading forceStartFrom znode", ex);
@@ -93,8 +93,8 @@ public class JobHistoryZKStateManager implements JobHistoryZKStateLCM {
     private void deleteForceStartFrom() {
         String path = zkRoot + "/" + ZNODE_FORCE_START_FROM;
         try {
-            if (_curator.checkExists().forPath(path) != null) {
-                _curator.delete().forPath(path);
+            if (curator.checkExists().forPath(path) != null) {
+                curator.delete().forPath(path);
             }
         } catch (Exception ex) {
             LOG.error("fail reading forceStartFrom znode", ex);
@@ -139,7 +139,7 @@ public class JobHistoryZKStateManager implements JobHistoryZKStateLCM {
     public void ensureJobPartitions(int numTotalPartitions) {
         // lock before rebuild job partitions
         String lockForEnsureJobPartitions = zkRoot + "/" + ZNODE_LOCK_FOR_ENSURE_JOB_PARTITIONS;
-        InterProcessMutex lock = new InterProcessMutex(_curator, lockForEnsureJobPartitions);
+        InterProcessMutex lock = new InterProcessMutex(curator, lockForEnsureJobPartitions);
         String path = zkRoot + "/" + ZNODE_PARTITIONS;
         try {
             lock.acquire();
@@ -153,10 +153,10 @@ public class JobHistoryZKStateManager implements JobHistoryZKStateLCM {
                     throw new IllegalStateException();
                 }
             } else {
-                boolean pathExists = _curator.checkExists().forPath(path) == null ? false : true;
+                boolean pathExists = curator.checkExists().forPath(path) == null ? false : true;
                 boolean structureChanged = true;
                 if (pathExists) {
-                    int currentCount = _curator.getChildren().forPath(path).size();
+                    int currentCount = curator.getChildren().forPath(path).size();
                     if (numTotalPartitions == currentCount) {
                         structureChanged = false;
                         LOG.info("znode partitions structure is unchanged");
@@ -169,9 +169,9 @@ public class JobHistoryZKStateManager implements JobHistoryZKStateLCM {
                 }
 
                 if (pathExists) {
-                    List<String> partitions = _curator.getChildren().forPath(path);
+                    List<String> partitions = curator.getChildren().forPath(path);
                     for (String partition : partitions) {
-                        String date = new String(_curator.getData().forPath(path + "/" + partition), "UTF-8");
+                        String date = new String(curator.getData().forPath(path + "/" + partition), "UTF-8");
                         int tmp = Integer.valueOf(date);
                         if (tmp < minDate) {
                             minDate = tmp;
@@ -202,12 +202,12 @@ public class JobHistoryZKStateManager implements JobHistoryZKStateLCM {
         LOG.info("rebuild job partitions with numTotalPartitions " + numTotalPartitions + " with starting date " + startingDate);
         String path = zkRoot + "/" + ZNODE_PARTITIONS;
         // truncate all existing partitions
-        if (_curator.checkExists().forPath(path) != null) {
-            _curator.delete().deletingChildrenIfNeeded().forPath(path);
+        if (curator.checkExists().forPath(path) != null) {
+            curator.delete().deletingChildrenIfNeeded().forPath(path);
         }
 
         for (int i = 0; i < numTotalPartitions; i++) {
-            _curator.create()
+            curator.create()
                 .creatingParentsIfNeeded()
                 .withMode(CreateMode.PERSISTENT)
                 .forPath(path + "/" + i, startingDate.getBytes("UTF-8"));
@@ -218,8 +218,8 @@ public class JobHistoryZKStateManager implements JobHistoryZKStateLCM {
     public String readProcessedDate(int partitionId) {
         String path = zkRoot + "/" + ZNODE_PARTITIONS + "/" + partitionId;
         try {
-            if (_curator.checkExists().forPath(path) != null) {
-                return new String(_curator.getData().forPath(path), "UTF-8");
+            if (curator.checkExists().forPath(path) != null) {
+                return new String(curator.getData().forPath(path), "UTF-8");
             } else {
                 return null;
             }
@@ -233,13 +233,13 @@ public class JobHistoryZKStateManager implements JobHistoryZKStateLCM {
     public void updateProcessedDate(int partitionId, String date) {
         String path = zkRoot + "/" + ZNODE_PARTITIONS + "/" + partitionId;
         try {
-            if (_curator.checkExists().forPath(path) == null) {
-                _curator.create()
+            if (curator.checkExists().forPath(path) == null) {
+                curator.create()
                     .creatingParentsIfNeeded()
                     .withMode(CreateMode.PERSISTENT)
                     .forPath(path, date.getBytes("UTF-8"));
             } else {
-                _curator.setData().forPath(path, date.getBytes("UTF-8"));
+                curator.setData().forPath(path, date.getBytes("UTF-8"));
             }
         } catch (Exception e) {
             LOG.error("fail update processed date", e);
@@ -251,13 +251,13 @@ public class JobHistoryZKStateManager implements JobHistoryZKStateLCM {
     public void addProcessedJob(String date, String jobId) {
         String path = zkRoot + "/" + ZNODE_JOBS + "/" + date + "/" + jobId;
         try {
-            if (_curator.checkExists().forPath(path) == null) {
-                _curator.create()
+            if (curator.checkExists().forPath(path) == null) {
+                curator.create()
                     .creatingParentsIfNeeded()
                     .withMode(CreateMode.PERSISTENT)
                     .forPath(path);
             } else {
-                _curator.setData().forPath(path);
+                curator.setData().forPath(path);
             }
         } catch (Exception e) {
             LOG.error("fail adding processed jobs", e);
@@ -270,17 +270,17 @@ public class JobHistoryZKStateManager implements JobHistoryZKStateLCM {
         LOG.info("trying to truncate all data for day " + date);
         // we need lock before we do truncate
         String path = zkRoot + "/" + ZNODE_JOBS + "/" + date;
-        InterProcessMutex lock = new InterProcessMutex(_curator, path);
+        InterProcessMutex lock = new InterProcessMutex(curator, path);
         try {
             lock.acquire();
-            if (_curator.checkExists().forPath(path) != null) {
-                _curator.delete().deletingChildrenIfNeeded().forPath(path);
+            if (curator.checkExists().forPath(path) != null) {
+                curator.delete().deletingChildrenIfNeeded().forPath(path);
                 LOG.info("really truncated all data for day " + date);
             }
 
             String jobIdPath = zkRoot + "/" + ZNODE_JOB_IDS + "/" + date;
-            if (_curator.checkExists().forPath(jobIdPath) != null) {
-                _curator.delete().deletingChildrenIfNeeded().forPath(jobIdPath);
+            if (curator.checkExists().forPath(jobIdPath) != null) {
+                curator.delete().deletingChildrenIfNeeded().forPath(jobIdPath);
                 LOG.info("really truncated all jobIds for day " + date);
             }
         } catch (Exception e) {
@@ -300,8 +300,8 @@ public class JobHistoryZKStateManager implements JobHistoryZKStateLCM {
     public List<String> readProcessedJobs(String date) {
         String path = zkRoot + "/" + ZNODE_JOBS + "/" + date;
         try {
-            if (_curator.checkExists().forPath(path) != null) {
-                return _curator.getChildren().forPath(path);
+            if (curator.checkExists().forPath(path) != null) {
+                return curator.getChildren().forPath(path);
             } else {
                 return null;
             }
@@ -315,8 +315,8 @@ public class JobHistoryZKStateManager implements JobHistoryZKStateLCM {
     public void truncateEverything() {
         String path = zkRoot;
         try {
-            if (_curator.checkExists().forPath(path) != null) {
-                _curator.delete().deletingChildrenIfNeeded().forPath(path);
+            if (curator.checkExists().forPath(path) != null) {
+                curator.delete().deletingChildrenIfNeeded().forPath(path);
             }
         } catch (Exception ex) {
             LOG.error("fail truncating verything", ex);
@@ -328,14 +328,14 @@ public class JobHistoryZKStateManager implements JobHistoryZKStateLCM {
     public long readProcessedTimeStamp(int partitionId) {
         String path = zkRoot + "/" + ZNODE_PARTITIONS + "/" + partitionId + "/" + ZNODE_TIMESTAMPS;
         try {
-            if (_curator.checkExists().forPath(path) == null) {
-                _curator.create()
+            if (curator.checkExists().forPath(path) == null) {
+                curator.create()
                     .creatingParentsIfNeeded()
                     .withMode(CreateMode.PERSISTENT)
                     .forPath(path);
                 return 0L;
             } else {
-                return Long.parseLong(new String(_curator.getData().forPath(path), "UTF-8"));
+                return Long.parseLong(new String(curator.getData().forPath(path), "UTF-8"));
             }
         } catch (Exception e) {
             LOG.error("fail to read timeStamp for partition " + partitionId, e);
@@ -347,14 +347,14 @@ public class JobHistoryZKStateManager implements JobHistoryZKStateLCM {
     public void updateProcessedTimeStamp(int partitionId, long timeStamp) {
         String path = zkRoot + "/" + ZNODE_PARTITIONS + "/" + partitionId + "/" + ZNODE_TIMESTAMPS;
         try {
-            if (_curator.checkExists().forPath(path) == null) {
-                _curator.create()
+            if (curator.checkExists().forPath(path) == null) {
+                curator.create()
                     .creatingParentsIfNeeded()
                     .withMode(CreateMode.PERSISTENT)
                     .forPath(path);
             }
 
-            _curator.setData().forPath(path, (timeStamp + "").getBytes("UTF-8"));
+            curator.setData().forPath(path, (timeStamp + "").getBytes("UTF-8"));
         } catch (Exception e) {
             LOG.error("fail to update timeStamp for partition " + partitionId, e);
             throw new RuntimeException(e);
@@ -366,11 +366,11 @@ public class JobHistoryZKStateManager implements JobHistoryZKStateLCM {
         List<Pair<String, String>> result = new ArrayList<>();
         String path = zkRoot + "/" + ZNODE_JOB_IDS + "/" + date;
         try {
-            if (_curator.checkExists().forPath(path) != null) {
-                List<String> jobs = _curator.getChildren().forPath(path);
+            if (curator.checkExists().forPath(path) != null) {
+                List<String> jobs = curator.getChildren().forPath(path);
                 for (String job : jobs) {
                     String jobPath = path + "/" + job;
-                    String status = new String(_curator.getData().forPath(jobPath), "UTF-8");
+                    String status = new String(curator.getData().forPath(jobPath), "UTF-8");
                     result.add(Pair.of(job, status));
                 }
             }
@@ -385,13 +385,13 @@ public class JobHistoryZKStateManager implements JobHistoryZKStateLCM {
     public void updateProcessedJob(String date, String jobId, String status) {
         String path = zkRoot + "/" + ZNODE_JOB_IDS + "/" + date + "/" + jobId;
         try {
-            if (_curator.checkExists().forPath(path) == null) {
-                _curator.create()
+            if (curator.checkExists().forPath(path) == null) {
+                curator.create()
                     .creatingParentsIfNeeded()
                     .withMode(CreateMode.PERSISTENT)
                     .forPath(path);
             }
-            _curator.setData().forPath(path, status.getBytes("UTF-8"));
+            curator.setData().forPath(path, status.getBytes("UTF-8"));
         } catch (Exception e) {
             LOG.error("fail adding processed jobs", e);
             throw new RuntimeException(e);

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/MRRunningJobApplication.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/MRRunningJobApplication.java b/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/MRRunningJobApplication.java
index 21ee1d9..66e0f37 100644
--- a/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/MRRunningJobApplication.java
+++ b/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/MRRunningJobApplication.java
@@ -16,15 +16,15 @@
  */
 package org.apache.eagle.jpm.mr.running;
 
-import backtype.storm.generated.StormTopology;
-import backtype.storm.topology.TopologyBuilder;
-import backtype.storm.tuple.Fields;
-import com.typesafe.config.Config;
 import org.apache.eagle.app.StormApplication;
 import org.apache.eagle.app.environment.impl.StormEnvironment;
 import org.apache.eagle.jpm.mr.running.storm.MRRunningJobFetchSpout;
 import org.apache.eagle.jpm.mr.running.storm.MRRunningJobParseBolt;
 import org.apache.eagle.jpm.util.Constants;
+import backtype.storm.generated.StormTopology;
+import backtype.storm.topology.TopologyBuilder;
+import backtype.storm.tuple.Fields;
+import com.typesafe.config.Config;
 
 import java.util.List;
 

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/parser/metrics/JobExecutionMetricsCreationListener.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/parser/metrics/JobExecutionMetricsCreationListener.java b/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/parser/metrics/JobExecutionMetricsCreationListener.java
index 8b30d45..3dd1291 100644
--- a/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/parser/metrics/JobExecutionMetricsCreationListener.java
+++ b/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/parser/metrics/JobExecutionMetricsCreationListener.java
@@ -20,8 +20,8 @@ package org.apache.eagle.jpm.mr.running.parser.metrics;
 
 import org.apache.eagle.jpm.mr.runningentity.JobExecutionAPIEntity;
 import org.apache.eagle.jpm.util.Constants;
-import org.apache.eagle.log.entity.GenericMetricEntity;
 import org.apache.eagle.jpm.util.metrics.AbstractMetricsCreationListener;
+import org.apache.eagle.log.entity.GenericMetricEntity;
 
 import java.util.ArrayList;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/parser/metrics/TaskExecutionMetricsCreationListener.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/parser/metrics/TaskExecutionMetricsCreationListener.java b/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/parser/metrics/TaskExecutionMetricsCreationListener.java
index 9f22a7f..51d16b3 100644
--- a/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/parser/metrics/TaskExecutionMetricsCreationListener.java
+++ b/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/parser/metrics/TaskExecutionMetricsCreationListener.java
@@ -20,8 +20,8 @@ package org.apache.eagle.jpm.mr.running.parser.metrics;
 
 import org.apache.eagle.jpm.mr.runningentity.TaskExecutionAPIEntity;
 import org.apache.eagle.jpm.util.Constants;
-import org.apache.eagle.log.entity.GenericMetricEntity;
 import org.apache.eagle.jpm.util.metrics.AbstractMetricsCreationListener;
+import org.apache.eagle.log.entity.GenericMetricEntity;
 
 import java.util.ArrayList;
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/recover/MRRunningJobManager.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/recover/MRRunningJobManager.java b/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/recover/MRRunningJobManager.java
index 50b4726..20a8701 100644
--- a/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/recover/MRRunningJobManager.java
+++ b/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/recover/MRRunningJobManager.java
@@ -21,9 +21,9 @@ package org.apache.eagle.jpm.mr.running.recover;
 import org.apache.eagle.jpm.mr.running.MRRunningJobConfig;
 import org.apache.eagle.jpm.mr.runningentity.JobExecutionAPIEntity;
 import org.apache.eagle.jpm.util.jobrecover.RunningJobManager;
+import org.apache.eagle.jpm.util.resourcefetch.model.AppInfo;
 
 import org.apache.commons.lang3.tuple.Pair;
-import org.apache.eagle.jpm.util.resourcefetch.model.AppInfo;
 
 import java.io.Serializable;
 import java.util.HashMap;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/storm/MRRunningJobParseBolt.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/storm/MRRunningJobParseBolt.java b/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/storm/MRRunningJobParseBolt.java
index e918597..4e0cdbc 100644
--- a/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/storm/MRRunningJobParseBolt.java
+++ b/eagle-jpm/eagle-jpm-mr-running/src/main/java/org/apache/eagle/jpm/mr/running/storm/MRRunningJobParseBolt.java
@@ -26,12 +26,11 @@ import org.apache.eagle.jpm.util.Constants;
 import org.apache.eagle.jpm.util.resourcefetch.RMResourceFetcher;
 import org.apache.eagle.jpm.util.resourcefetch.ResourceFetcher;
 import org.apache.eagle.jpm.util.resourcefetch.model.AppInfo;
-
 import backtype.storm.task.OutputCollector;
 import backtype.storm.task.TopologyContext;
 import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Tuple;
 import backtype.storm.topology.base.BaseRichBolt;
+import backtype.storm.tuple.Tuple;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/SparkHistoryJobApp.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/SparkHistoryJobApp.java b/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/SparkHistoryJobApp.java
index 180b1e8..446eb4e 100644
--- a/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/SparkHistoryJobApp.java
+++ b/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/SparkHistoryJobApp.java
@@ -17,13 +17,13 @@
 
 package org.apache.eagle.jpm.spark.history;
 
-import backtype.storm.generated.StormTopology;
-import backtype.storm.topology.TopologyBuilder;
-import com.typesafe.config.Config;
 import org.apache.eagle.app.StormApplication;
 import org.apache.eagle.app.environment.impl.StormEnvironment;
-import org.apache.eagle.jpm.spark.history.storm.SparkHistoryJobSpout;
 import org.apache.eagle.jpm.spark.history.storm.SparkHistoryJobParseBolt;
+import org.apache.eagle.jpm.spark.history.storm.SparkHistoryJobSpout;
+import backtype.storm.generated.StormTopology;
+import backtype.storm.topology.TopologyBuilder;
+import com.typesafe.config.Config;
 
 public class SparkHistoryJobApp extends StormApplication {
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/SparkHistoryJobAppConfig.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/SparkHistoryJobAppConfig.java b/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/SparkHistoryJobAppConfig.java
index 284eeee..58571cb 100644
--- a/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/SparkHistoryJobAppConfig.java
+++ b/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/SparkHistoryJobAppConfig.java
@@ -20,13 +20,12 @@
 package org.apache.eagle.jpm.spark.history;
 
 import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
 
 import java.io.Serializable;
 
 public class SparkHistoryJobAppConfig implements Serializable {
-    final static String SPARK_HISTORY_JOB_FETCH_SPOUT_NAME = "sparkHistoryJobFetchSpout";
-    final static String SPARK_HISTORY_JOB_PARSE_BOLT_NAME = "sparkHistoryJobParseBolt";
+    static final String SPARK_HISTORY_JOB_FETCH_SPOUT_NAME = "sparkHistoryJobFetchSpout";
+    static final String SPARK_HISTORY_JOB_PARSE_BOLT_NAME = "sparkHistoryJobParseBolt";
 
     public ZKStateConfig zkStateConfig;
     public JobHistoryEndpointConfig jobHistoryConfig;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/crawl/JHFSparkEventReader.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/crawl/JHFSparkEventReader.java b/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/crawl/JHFSparkEventReader.java
index 571620a..76f560e 100644
--- a/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/crawl/JHFSparkEventReader.java
+++ b/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/crawl/JHFSparkEventReader.java
@@ -17,15 +17,15 @@
 
 package org.apache.eagle.jpm.spark.history.crawl;
 
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
-import org.apache.commons.lang.ArrayUtils;
 import org.apache.eagle.jpm.spark.entity.*;
 import org.apache.eagle.jpm.util.*;
 import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity;
 import org.apache.eagle.service.client.EagleServiceClientException;
 import org.apache.eagle.service.client.impl.EagleServiceBaseClient;
 import org.apache.eagle.service.client.impl.EagleServiceClientImpl;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import org.apache.commons.lang.ArrayUtils;
 import org.json.simple.JSONArray;
 import org.json.simple.JSONObject;
 import org.slf4j.Logger;
@@ -116,7 +116,7 @@ public class JHFSparkEventReader {
         String[] props = {"spark.yarn.app.id", "spark.executor.memory", "spark.driver.host", "spark.driver.port",
             "spark.driver.memory", "spark.scheduler.pool", "spark.executor.cores", "spark.yarn.am.memory",
             "spark.yarn.am.cores", "spark.yarn.executor.memoryOverhead", "spark.yarn.driver.memoryOverhead", "spark.yarn.am.memoryOverhead", "spark.master"};
-        String[] jobConf = (String[])ArrayUtils.addAll(additionalJobConf, props);
+        String[] jobConf = (String[]) ArrayUtils.addAll(additionalJobConf, props);
         for (String prop : jobConf) {
             if (sparkProps.containsKey(prop)) {
                 app.getConfig().getConfig().put(prop, (String) sparkProps.get(prop));
@@ -481,14 +481,14 @@ public class JHFSparkEventReader {
         if (fieldValue != null) {
             result = Utils.parseMemory(fieldValue + "m");
             if (result == 0L) {
-               result = Utils.parseMemory(fieldValue);
+                result = Utils.parseMemory(fieldValue);
             }
         }
 
         if (result == 0L) {
             result = Math.max(
-                    Utils.parseMemory(conf.getString("spark.defaultVal.spark.yarn.overhead.min")),
-                    executorMemory * conf.getInt("spark.defaultVal." + fieldName + ".factor") / 100);
+                Utils.parseMemory(conf.getString("spark.defaultVal.spark.yarn.overhead.min")),
+                executorMemory * conf.getInt("spark.defaultVal." + fieldName + ".factor") / 100);
         }
         return result;
     }
@@ -621,7 +621,7 @@ public class JHFSparkEventReader {
         int stageAttemptId = Integer.parseInt(stage.getTags().get(SparkJobTagName.SPARK_STAGE_ATTEMPT_ID.toString()));
         for (int i = 0; i < stageAttemptId; i++) {
             SparkStage previousStage = stages.get(this.generateStageKey(
-                    stage.getTags().get(SparkJobTagName.SPARK_SATGE_ID.toString()), Integer.toString(i)));
+                stage.getTags().get(SparkJobTagName.SPARK_SATGE_ID.toString()), Integer.toString(i)));
             if (previousStage.getStatus().equalsIgnoreCase(SparkEntityConstant.SparkStageStatus.COMPLETE.toString())) {
                 return true;
             }
@@ -644,8 +644,8 @@ public class JHFSparkEventReader {
         stage.setName(name);
         stage.setNumActiveTasks(0);
         stage.setNumTasks(numTasks);
-        stage.setSchedulingPool(this.app.getConfig().getConfig().get("spark.scheduler.pool") == null ?
-                "default" : this.app.getConfig().getConfig().get("spark.scheduler.pool"));
+        stage.setSchedulingPool(this.app.getConfig().getConfig().get("spark.scheduler.pool") == null
+            ? "default" : this.app.getConfig().getConfig().get("spark.scheduler.pool"));
 
         String stageKey = this.generateStageKey(Integer.toString(stageId), Integer.toString(stageAttemptId));
         stages.put(stageKey, stage);

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/status/JobHistoryZKStateManager.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/status/JobHistoryZKStateManager.java b/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/status/JobHistoryZKStateManager.java
index 0bb65df..4f98996 100644
--- a/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/status/JobHistoryZKStateManager.java
+++ b/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/status/JobHistoryZKStateManager.java
@@ -19,8 +19,8 @@
 
 package org.apache.eagle.jpm.spark.history.status;
 
-import org.apache.eagle.jpm.spark.history.crawl.SparkApplicationInfo;
 import org.apache.eagle.jpm.spark.history.SparkHistoryJobAppConfig;
+import org.apache.eagle.jpm.spark.history.crawl.SparkApplicationInfo;
 import org.apache.curator.framework.CuratorFramework;
 import org.apache.curator.framework.CuratorFrameworkFactory;
 import org.apache.curator.framework.api.transaction.CuratorTransactionBridge;
@@ -35,9 +35,9 @@ import java.util.Iterator;
 import java.util.List;
 
 public class JobHistoryZKStateManager {
-    private final static Logger LOG = LoggerFactory.getLogger(JobHistoryZKStateManager.class);
+    private static final Logger LOG = LoggerFactory.getLogger(JobHistoryZKStateManager.class);
 
-    private final static String START_TIMESTAMP = "lastAppTime";
+    private static final String START_TIMESTAMP = "lastAppTime";
     private String zkRoot;
     private CuratorFramework curator;
 

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/storm/SparkHistoryJobParseBolt.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/storm/SparkHistoryJobParseBolt.java b/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/storm/SparkHistoryJobParseBolt.java
index 0351de3..82f26c2 100644
--- a/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/storm/SparkHistoryJobParseBolt.java
+++ b/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/storm/SparkHistoryJobParseBolt.java
@@ -19,16 +19,17 @@
 
 package org.apache.eagle.jpm.spark.history.storm;
 
+import org.apache.eagle.jpm.spark.history.SparkHistoryJobAppConfig;
 import org.apache.eagle.jpm.spark.history.crawl.JHFInputStreamReader;
 import org.apache.eagle.jpm.spark.history.crawl.SparkApplicationInfo;
 import org.apache.eagle.jpm.spark.history.crawl.SparkFilesystemInputStreamReaderImpl;
-import org.apache.eagle.jpm.spark.history.SparkHistoryJobAppConfig;
 import org.apache.eagle.jpm.spark.history.status.JobHistoryZKStateManager;
 import org.apache.eagle.jpm.spark.history.status.ZKStateConstant;
 import org.apache.eagle.jpm.util.HDFSUtil;
 import org.apache.eagle.jpm.util.resourcefetch.ResourceFetcher;
 import org.apache.eagle.jpm.util.resourcefetch.SparkHistoryServerResourceFetcher;
 import org.apache.eagle.jpm.util.resourcefetch.model.SparkApplication;
+
 import backtype.storm.task.OutputCollector;
 import backtype.storm.task.TopologyContext;
 import backtype.storm.topology.OutputFieldsDeclarer;
@@ -163,11 +164,11 @@ public class SparkHistoryJobParseBolt extends BaseRichBolt {
 
                 // For Yarn version >= 2.7,
                 // log name: "application_1468625664674_0003_appattempt_1468625664674_0003_000001"
-//                String attemptIdFormatted = String.format("%06d", attemptId);
-//
-//                // remove "application_" to get the number part of appID.
-//                String sparkAppIdNum = appId.substring(12);
-//                String attemptIdString = "appattempt_" + sparkAppIdNum + "_" + attemptIdFormatted;
+                // String attemptIdFormatted = String.format("%06d", attemptId);
+                //
+                // // remove "application_" to get the number part of appID.
+                // String sparkAppIdNum = appId.substring(12);
+                // String attemptIdString = "appattempt_" + sparkAppIdNum + "_" + attemptIdFormatted;
 
                 String appAttemptLogName = this.getAppAttemptLogName(appId, attemptIdString);
                 LOG.info("Attempt ID: {}, App Attempt Log: {}", attemptIdString, appAttemptLogName);

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/storm/SparkHistoryJobSpout.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/storm/SparkHistoryJobSpout.java b/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/storm/SparkHistoryJobSpout.java
index 4c50607..afc0b90 100644
--- a/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/storm/SparkHistoryJobSpout.java
+++ b/eagle-jpm/eagle-jpm-spark-history/src/main/java/org/apache/eagle/jpm/spark/history/storm/SparkHistoryJobSpout.java
@@ -19,13 +19,6 @@
 
 package org.apache.eagle.jpm.spark.history.storm;
 
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseRichSpout;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Values;
-import backtype.storm.utils.Utils;
 import org.apache.eagle.jpm.spark.history.SparkHistoryJobAppConfig;
 import org.apache.eagle.jpm.spark.history.status.JobHistoryZKStateManager;
 import org.apache.eagle.jpm.spark.history.status.ZKStateConstant;
@@ -33,6 +26,14 @@ import org.apache.eagle.jpm.util.Constants;
 import org.apache.eagle.jpm.util.resourcefetch.RMResourceFetcher;
 import org.apache.eagle.jpm.util.resourcefetch.ResourceFetcher;
 import org.apache.eagle.jpm.util.resourcefetch.model.AppInfo;
+import backtype.storm.spout.SpoutOutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.topology.base.BaseRichSpout;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Values;
+import backtype.storm.utils.Utils;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/eagle-jpm-util/src/main/java/org/apache/eagle/jpm/util/JobNameNormalization.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-util/src/main/java/org/apache/eagle/jpm/util/JobNameNormalization.java b/eagle-jpm/eagle-jpm-util/src/main/java/org/apache/eagle/jpm/util/JobNameNormalization.java
index 4e67f89..b3673ea 100644
--- a/eagle-jpm/eagle-jpm-util/src/main/java/org/apache/eagle/jpm/util/JobNameNormalization.java
+++ b/eagle-jpm/eagle-jpm-util/src/main/java/org/apache/eagle/jpm/util/JobNameNormalization.java
@@ -38,7 +38,7 @@ public class JobNameNormalization {
      * source string is regular expression, for example ^(.*)[0-9]{4}/[0-9]{2}/[0-9]{2}/[0-9]{2}$
      * target string is parameterized string, for example $1, $2
      */
-    private List<JobNameNormalizationRule> _rules = new ArrayList<JobNameNormalizationRule>();
+    private List<JobNameNormalizationRule> rules = new ArrayList<JobNameNormalizationRule>();
 
     private enum NormalizationOp {
         REPLACE("=>");
@@ -97,7 +97,7 @@ public class JobNameNormalization {
             r.pattern = Pattern.compile(elements[0].trim());
             r.op = op;
             r.target = elements[1].trim();
-            _rules.add(r);
+            rules.add(r);
             break;  //once one Op is matched, exit
         }
 
@@ -106,7 +106,7 @@ public class JobNameNormalization {
     public String normalize(String jobName) {
         String normalizedJobName = jobName;
         // go through each rules and do actions
-        for (JobNameNormalizationRule rule : _rules) {
+        for (JobNameNormalizationRule rule : rules) {
             Pattern p = rule.pattern;
             Matcher m = p.matcher(jobName);
             if (m.find()) {

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/ecbdec8e/eagle-jpm/pom.xml
----------------------------------------------------------------------
diff --git a/eagle-jpm/pom.xml b/eagle-jpm/pom.xml
index ae7a182..a96604d 100644
--- a/eagle-jpm/pom.xml
+++ b/eagle-jpm/pom.xml
@@ -56,4 +56,17 @@
             <version>${project.version}</version>
         </dependency>
     </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-checkstyle-plugin</artifactId>
+                <configuration>
+                    <failOnViolation>true</failOnViolation>
+                    <failsOnError>true</failsOnError>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
 </project>


Mime
View raw message