helix-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From zzh...@apache.org
Subject [42/51] [partial] [HELIX-198] Unify helix code style, rb=13710
Date Wed, 21 Aug 2013 20:43:55 GMT
http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/controller/restlet/ZNRecordUpdate.java
----------------------------------------------------------------------
diff --git a/helix-core/src/main/java/org/apache/helix/controller/restlet/ZNRecordUpdate.java b/helix-core/src/main/java/org/apache/helix/controller/restlet/ZNRecordUpdate.java
index 22113a0..deef748 100644
--- a/helix-core/src/main/java/org/apache/helix/controller/restlet/ZNRecordUpdate.java
+++ b/helix-core/src/main/java/org/apache/helix/controller/restlet/ZNRecordUpdate.java
@@ -29,68 +29,55 @@ import org.codehaus.jackson.annotate.JsonProperty;
 /**
  * Unit of transfered ZNRecord updates. Contains the ZNRecord Value, zkPath
  * to store the update value, and the property type (used to merge the ZNRecord)
- * For ZNRecord subtraction, it is currently not supported yet. 
- * */
-public class ZNRecordUpdate
-{
-  public enum OpCode
-  {
+ * For ZNRecord subtraction, it is currently not supported yet.
+ */
+public class ZNRecordUpdate {
+  public enum OpCode {
     // TODO: create is not supported; but update will create if not exist
     CREATE,
     UPDATE,
     SET
   }
+
   final String _path;
   ZNRecord _record;
   final OpCode _code;
 
   @JsonCreator
-  public ZNRecordUpdate(@JsonProperty("path")String path, 
-                        @JsonProperty("opcode")OpCode code, 
-                        @JsonProperty("record")ZNRecord record)
-  {
+  public ZNRecordUpdate(@JsonProperty("path") String path, @JsonProperty("opcode") OpCode code,
+      @JsonProperty("record") ZNRecord record) {
     _path = path;
     _record = record;
     _code = code;
   }
-  
-  public String getPath()
-  {
+
+  public String getPath() {
     return _path;
   }
-  
-  public ZNRecord getRecord()
-  {
+
+  public ZNRecord getRecord() {
     return _record;
   }
-  
-  public OpCode getOpcode()
-  {
+
+  public OpCode getOpcode() {
     return _code;
   }
 
   @JsonIgnore(true)
-  public DataUpdater<ZNRecord> getZNRecordUpdater()
-  {
-    if(_code == OpCode.SET)
+  public DataUpdater<ZNRecord> getZNRecordUpdater() {
+    if (_code == OpCode.SET)
 
     {
-      return new ZNRecordUpdater(_record)
-      {
+      return new ZNRecordUpdater(_record) {
         @Override
-        public ZNRecord update(ZNRecord current)
-        {
+        public ZNRecord update(ZNRecord current) {
           return _record;
         }
       };
-    }
-    else if ((_code == OpCode.UPDATE))
-    {
+    } else if ((_code == OpCode.UPDATE)) {
       return new ZNRecordUpdater(_record);
-    }
-    else
-    {
+    } else {
       throw new UnsupportedOperationException("Not supported : " + _code);
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/controller/restlet/ZNRecordUpdateResource.java
----------------------------------------------------------------------
diff --git a/helix-core/src/main/java/org/apache/helix/controller/restlet/ZNRecordUpdateResource.java b/helix-core/src/main/java/org/apache/helix/controller/restlet/ZNRecordUpdateResource.java
index 52a3e7a..f704f54 100644
--- a/helix-core/src/main/java/org/apache/helix/controller/restlet/ZNRecordUpdateResource.java
+++ b/helix-core/src/main/java/org/apache/helix/controller/restlet/ZNRecordUpdateResource.java
@@ -33,65 +33,56 @@ import org.restlet.resource.Representation;
 import org.restlet.resource.Resource;
 
 /**
- * REST resource for ZkPropertyTransfer server to receive PUT requests 
+ * REST resource for ZkPropertyTransfer server to receive PUT requests
  * that submits ZNRecordUpdates
- * */
-public class ZNRecordUpdateResource  extends Resource
-{
+ */
+public class ZNRecordUpdateResource extends Resource {
   public static final String UPDATEKEY = "ZNRecordUpdate";
   private static Logger LOG = Logger.getLogger(ZNRecordUpdateResource.class);
+
   @Override
-  public boolean allowGet()
-  {
+  public boolean allowGet() {
     return false;
   }
 
   @Override
-  public boolean allowPost()
-  {
+  public boolean allowPost() {
     return false;
   }
 
   @Override
-  public boolean allowPut()
-  {
+  public boolean allowPut() {
     return true;
   }
 
   @Override
-  public boolean allowDelete()
-  {
+  public boolean allowDelete() {
     return false;
   }
-  
+
   @Override
-  public void storeRepresentation(Representation entity)
-  {
-    try
-    {
+  public void storeRepresentation(Representation entity) {
+    try {
       ZKPropertyTransferServer server = ZKPropertyTransferServer.getInstance();
-      
+
       Form form = new Form(entity);
       String jsonPayload = form.getFirstValue(UPDATEKEY, true);
-      
+
       // Parse the map from zkPath --> ZNRecordUpdate from the payload
       StringReader sr = new StringReader(jsonPayload);
       ObjectMapper mapper = new ObjectMapper();
       TypeReference<TreeMap<String, ZNRecordUpdate>> typeRef =
-          new TypeReference<TreeMap<String, ZNRecordUpdate>>()
-          {
+          new TypeReference<TreeMap<String, ZNRecordUpdate>>() {
           };
       Map<String, ZNRecordUpdate> holderMap = mapper.readValue(sr, typeRef);
       // Enqueue the ZNRecordUpdate for sending
-      for(ZNRecordUpdate holder : holderMap.values())
-      {
+      for (ZNRecordUpdate holder : holderMap.values()) {
         server.enqueueData(holder);
-        LOG.info("Received " + holder.getPath() + " from " + getRequest().getClientInfo().getAddress());
+        LOG.info("Received " + holder.getPath() + " from "
+            + getRequest().getClientInfo().getAddress());
       }
       getResponse().setStatus(Status.SUCCESS_OK);
-    }
-    catch(Exception e)
-    {
+    } catch (Exception e) {
       LOG.error("", e);
       getResponse().setStatus(Status.SERVER_ERROR_INTERNAL);
     }

http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/controller/restlet/ZkPropertyTransferApplication.java
----------------------------------------------------------------------
diff --git a/helix-core/src/main/java/org/apache/helix/controller/restlet/ZkPropertyTransferApplication.java b/helix-core/src/main/java/org/apache/helix/controller/restlet/ZkPropertyTransferApplication.java
index edd0ff6..f345fe2 100644
--- a/helix-core/src/main/java/org/apache/helix/controller/restlet/ZkPropertyTransferApplication.java
+++ b/helix-core/src/main/java/org/apache/helix/controller/restlet/ZkPropertyTransferApplication.java
@@ -26,24 +26,20 @@ import org.restlet.Router;
 
 /**
  * Restlet application for ZkPropertyTransfer server
- * */
-public class ZkPropertyTransferApplication extends Application
-{
-  public ZkPropertyTransferApplication()
-  {
+ */
+public class ZkPropertyTransferApplication extends Application {
+  public ZkPropertyTransferApplication() {
     super();
   }
 
-  public ZkPropertyTransferApplication(Context context)
-  {
+  public ZkPropertyTransferApplication(Context context) {
     super(context);
   }
-  
+
   @Override
-  public Restlet createRoot()
-  {
+  public Restlet createRoot() {
     Router router = new Router(getContext());
     router.attach("/" + ZKPropertyTransferServer.RESTRESOURCENAME, ZNRecordUpdateResource.class);
     return router;
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/controller/restlet/ZkPropertyTransferClient.java
----------------------------------------------------------------------
diff --git a/helix-core/src/main/java/org/apache/helix/controller/restlet/ZkPropertyTransferClient.java b/helix-core/src/main/java/org/apache/helix/controller/restlet/ZkPropertyTransferClient.java
index 475eefd..ed1a114 100644
--- a/helix-core/src/main/java/org/apache/helix/controller/restlet/ZkPropertyTransferClient.java
+++ b/helix-core/src/main/java/org/apache/helix/controller/restlet/ZkPropertyTransferClient.java
@@ -43,151 +43,126 @@ import org.restlet.data.Request;
 import org.restlet.data.Response;
 import org.restlet.data.Status;
 
-public class ZkPropertyTransferClient
-{
+public class ZkPropertyTransferClient {
   private static Logger LOG = Logger.getLogger(ZkPropertyTransferClient.class);
   public static final int DEFAULT_MAX_CONCURRENTTASKS = 2;
   public static int SEND_PERIOD = 10 * 1000;
-  
+
   public static final String USE_PROPERTYTRANSFER = "UsePropertyTransfer";
-  
+
   int _maxConcurrentTasks;
   ExecutorService _executorService;
   Client[] _clients;
   AtomicInteger _requestCount = new AtomicInteger(0);
-  
+
   // ZNRecord update buffer: key is the zkPath, value is the ZNRecordUpdate
-  AtomicReference<ConcurrentHashMap<String, ZNRecordUpdate>> _dataBufferRef
-    = new AtomicReference<ConcurrentHashMap<String, ZNRecordUpdate>>();
+  AtomicReference<ConcurrentHashMap<String, ZNRecordUpdate>> _dataBufferRef =
+      new AtomicReference<ConcurrentHashMap<String, ZNRecordUpdate>>();
   Timer _timer;
   volatile String _webServiceUrl = "";
-  
-  public ZkPropertyTransferClient(int maxConcurrentTasks)
-  {
+
+  public ZkPropertyTransferClient(int maxConcurrentTasks) {
     _maxConcurrentTasks = maxConcurrentTasks;
     _executorService = Executors.newFixedThreadPool(_maxConcurrentTasks);
     _clients = new Client[_maxConcurrentTasks];
-    for(int i = 0; i< _clients.length; i++)
-    {
+    for (int i = 0; i < _clients.length; i++) {
       _clients[i] = new Client(Protocol.HTTP);
     }
     _timer = new Timer(true);
     _timer.schedule(new SendZNRecordTimerTask(), SEND_PERIOD, SEND_PERIOD);
     _dataBufferRef.getAndSet(new ConcurrentHashMap<String, ZNRecordUpdate>());
   }
-  
-  class SendZNRecordTimerTask extends TimerTask
-  {
+
+  class SendZNRecordTimerTask extends TimerTask {
     @Override
-    public void run()
-    { 
+    public void run() {
       sendUpdateBatch();
     }
   }
-  
-  public void enqueueZNRecordUpdate(ZNRecordUpdate update, String webserviceUrl)
-  {
-    try
-    {
-      LOG.info("Enqueue update to " + update.getPath() + " opcode: " + update.getOpcode() + " to " + webserviceUrl);
+
+  public void enqueueZNRecordUpdate(ZNRecordUpdate update, String webserviceUrl) {
+    try {
+      LOG.info("Enqueue update to " + update.getPath() + " opcode: " + update.getOpcode() + " to "
+          + webserviceUrl);
       _webServiceUrl = webserviceUrl;
       update.getRecord().setSimpleField(USE_PROPERTYTRANSFER, "true");
-      synchronized(_dataBufferRef)
-      {
-        if(_dataBufferRef.get().containsKey(update._path))
-        {
+      synchronized (_dataBufferRef) {
+        if (_dataBufferRef.get().containsKey(update._path)) {
           ZNRecord oldVal = _dataBufferRef.get().get(update.getPath()).getRecord();
           oldVal = update.getZNRecordUpdater().update(oldVal);
           _dataBufferRef.get().get(update.getPath())._record = oldVal;
-        }
-        else
-        {
+        } else {
           _dataBufferRef.get().put(update.getPath(), update);
         }
       }
-    }
-    catch(Exception e)
-    {
+    } catch (Exception e) {
       LOG.error("", e);
     }
   }
-  
-  void sendUpdateBatch()
-  {
-    LOG.debug("Actual sending update with " + _dataBufferRef.get().size() + " updates to " + _webServiceUrl);
-    Map<String, ZNRecordUpdate> updateCache  = null;
-    
-    synchronized(_dataBufferRef)
-    {
+
+  void sendUpdateBatch() {
+    LOG.debug("Actual sending update with " + _dataBufferRef.get().size() + " updates to "
+        + _webServiceUrl);
+    Map<String, ZNRecordUpdate> updateCache = null;
+
+    synchronized (_dataBufferRef) {
       updateCache = _dataBufferRef.getAndSet(new ConcurrentHashMap<String, ZNRecordUpdate>());
     }
-    
-    if(updateCache != null && updateCache.size() > 0)
-    {
-      ZNRecordUpdateUploadTask task = new ZNRecordUpdateUploadTask(updateCache, _webServiceUrl, _clients[_requestCount.intValue() % _maxConcurrentTasks]);
+
+    if (updateCache != null && updateCache.size() > 0) {
+      ZNRecordUpdateUploadTask task =
+          new ZNRecordUpdateUploadTask(updateCache, _webServiceUrl,
+              _clients[_requestCount.intValue() % _maxConcurrentTasks]);
       _requestCount.incrementAndGet();
       _executorService.submit(task);
-      LOG.trace("Queue size :" + ((ThreadPoolExecutor)_executorService).getQueue().size());
+      LOG.trace("Queue size :" + ((ThreadPoolExecutor) _executorService).getQueue().size());
     }
   }
-  
-  public void shutdown()
-  {
+
+  public void shutdown() {
     LOG.info("Shutting down ZkPropertyTransferClient");
     _executorService.shutdown();
     _timer.cancel();
-    for(Client client: _clients)
-    {
-      try
-      {
+    for (Client client : _clients) {
+      try {
         client.stop();
-      }
-      catch (Exception e)
-      {
+      } catch (Exception e) {
         LOG.error("", e);
       }
     }
   }
-  
-  class ZNRecordUpdateUploadTask implements Callable<Void>
-  {
+
+  class ZNRecordUpdateUploadTask implements Callable<Void> {
     Map<String, ZNRecordUpdate> _updateMap;
     String _webServiceUrl;
     Client _client;
-    
-    ZNRecordUpdateUploadTask(Map<String, ZNRecordUpdate> update, String webserviceUrl, Client client)
-    {
+
+    ZNRecordUpdateUploadTask(Map<String, ZNRecordUpdate> update, String webserviceUrl, Client client) {
       _updateMap = update;
       _webServiceUrl = webserviceUrl;
       _client = client;
     }
-    
+
     @Override
-    public Void call() throws Exception
-    {
+    public Void call() throws Exception {
       LOG.debug("Actual sending update with " + _updateMap.size() + " updates to " + _webServiceUrl);
       long time = System.currentTimeMillis();
       Reference resourceRef = new Reference(_webServiceUrl);
       Request request = new Request(Method.PUT, resourceRef);
-      
+
       ObjectMapper mapper = new ObjectMapper();
       StringWriter sw = new StringWriter();
-      try
-      {
+      try {
         mapper.writeValue(sw, _updateMap);
-      }
-      catch (Exception e)
-      {
-        LOG.error("",e);
+      } catch (Exception e) {
+        LOG.error("", e);
       }
 
-      request.setEntity(
-          ZNRecordUpdateResource.UPDATEKEY + "=" + sw, MediaType.APPLICATION_ALL);
+      request.setEntity(ZNRecordUpdateResource.UPDATEKEY + "=" + sw, MediaType.APPLICATION_ALL);
       // This is a sync call. See com.noelios.restlet.http.StreamClientCall.sendRequest()
       Response response = _client.handle(request);
-      
-      if(response.getStatus().getCode() != Status.SUCCESS_OK.getCode())
-      {
+
+      if (response.getStatus().getCode() != Status.SUCCESS_OK.getCode()) {
         LOG.error("Status : " + response.getStatus());
       }
       LOG.info("Using time : " + (System.currentTimeMillis() - time));

http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/controller/stages/AttributeName.java
----------------------------------------------------------------------
diff --git a/helix-core/src/main/java/org/apache/helix/controller/stages/AttributeName.java b/helix-core/src/main/java/org/apache/helix/controller/stages/AttributeName.java
index 16e7c97..ae0278b 100644
--- a/helix-core/src/main/java/org/apache/helix/controller/stages/AttributeName.java
+++ b/helix-core/src/main/java/org/apache/helix/controller/stages/AttributeName.java
@@ -19,8 +19,7 @@ package org.apache.helix.controller.stages;
  * under the License.
  */
 
-public enum AttributeName
-{
+public enum AttributeName {
   RESOURCES,
   BEST_POSSIBLE_STATE,
   CURRENT_STATE,

http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/controller/stages/BestPossibleStateCalcStage.java
----------------------------------------------------------------------
diff --git a/helix-core/src/main/java/org/apache/helix/controller/stages/BestPossibleStateCalcStage.java b/helix-core/src/main/java/org/apache/helix/controller/stages/BestPossibleStateCalcStage.java
index 6c6fe08..598c318 100644
--- a/helix-core/src/main/java/org/apache/helix/controller/stages/BestPossibleStateCalcStage.java
+++ b/helix-core/src/main/java/org/apache/helix/controller/stages/BestPossibleStateCalcStage.java
@@ -34,49 +34,38 @@ import org.apache.helix.model.IdealState.RebalanceMode;
 import org.apache.helix.util.HelixUtil;
 import org.apache.log4j.Logger;
 
-
 /**
  * For partition compute best possible (instance,state) pair based on
  * IdealState,StateModel,LiveInstance
- *
  */
-public class BestPossibleStateCalcStage extends AbstractBaseStage
-{
-  private static final Logger logger =
-      Logger.getLogger(BestPossibleStateCalcStage.class.getName());
+public class BestPossibleStateCalcStage extends AbstractBaseStage {
+  private static final Logger logger = Logger.getLogger(BestPossibleStateCalcStage.class.getName());
 
   @Override
-  public void process(ClusterEvent event) throws Exception
-  {
+  public void process(ClusterEvent event) throws Exception {
     long startTime = System.currentTimeMillis();
     logger.info("START BestPossibleStateCalcStage.process()");
 
     CurrentStateOutput currentStateOutput =
         event.getAttribute(AttributeName.CURRENT_STATE.toString());
-    Map<String, Resource> resourceMap =
-        event.getAttribute(AttributeName.RESOURCES.toString());
+    Map<String, Resource> resourceMap = event.getAttribute(AttributeName.RESOURCES.toString());
     ClusterDataCache cache = event.getAttribute("ClusterDataCache");
 
-    if (currentStateOutput == null || resourceMap == null || cache == null)
-    {
+    if (currentStateOutput == null || resourceMap == null || cache == null) {
       throw new StageException("Missing attributes in event:" + event
           + ". Requires CURRENT_STATE|RESOURCES|DataCache");
     }
 
     BestPossibleStateOutput bestPossibleStateOutput =
         compute(event, resourceMap, currentStateOutput);
-    event.addAttribute(AttributeName.BEST_POSSIBLE_STATE.toString(),
-                       bestPossibleStateOutput);
+    event.addAttribute(AttributeName.BEST_POSSIBLE_STATE.toString(), bestPossibleStateOutput);
 
     long endTime = System.currentTimeMillis();
-    logger.info("END BestPossibleStateCalcStage.process(). took: "
-        + (endTime - startTime) + " ms");
+    logger.info("END BestPossibleStateCalcStage.process(). took: " + (endTime - startTime) + " ms");
   }
 
-  private BestPossibleStateOutput compute(ClusterEvent event,
-                                          Map<String, Resource> resourceMap,
-                                          CurrentStateOutput currentStateOutput)
-  {
+  private BestPossibleStateOutput compute(ClusterEvent event, Map<String, Resource> resourceMap,
+      CurrentStateOutput currentStateOutput) {
     // for each ideal state
     // read the state model def
     // for each resource
@@ -86,8 +75,7 @@ public class BestPossibleStateCalcStage extends AbstractBaseStage
 
     BestPossibleStateOutput output = new BestPossibleStateOutput();
 
-    for (String resourceName : resourceMap.keySet())
-    {
+    for (String resourceName : resourceMap.keySet()) {
       logger.debug("Processing resource:" + resourceName);
 
       Resource resource = resourceMap.get(resourceName);
@@ -95,54 +83,40 @@ public class BestPossibleStateCalcStage extends AbstractBaseStage
       // from the current state
       IdealState idealState = cache.getIdealState(resourceName);
 
-      if (idealState == null)
-      {
+      if (idealState == null) {
         // if ideal state is deleted, use an empty one
         logger.info("resource:" + resourceName + " does not exist anymore");
         idealState = new IdealState(resourceName);
       }
 
       Rebalancer rebalancer = null;
-      if(idealState.getRebalanceMode() == RebalanceMode.USER_DEFINED
-          && idealState.getRebalancerClassName() != null)
-      {
+      if (idealState.getRebalanceMode() == RebalanceMode.USER_DEFINED
+          && idealState.getRebalancerClassName() != null) {
         String rebalancerClassName = idealState.getRebalancerClassName();
-        logger.info("resource " + resourceName + " use idealStateRebalancer " + rebalancerClassName);
-        try
-        {
-          rebalancer = (Rebalancer) (HelixUtil.loadClass(
-              getClass(), rebalancerClassName).newInstance());
-        }
-        catch(Exception e)
-        {
-          logger.warn("Exception while invoking custom rebalancer class:" + rebalancerClassName , e);
+        logger
+            .info("resource " + resourceName + " use idealStateRebalancer " + rebalancerClassName);
+        try {
+          rebalancer =
+              (Rebalancer) (HelixUtil.loadClass(getClass(), rebalancerClassName).newInstance());
+        } catch (Exception e) {
+          logger.warn("Exception while invoking custom rebalancer class:" + rebalancerClassName, e);
         }
       }
-      if (rebalancer == null)
-      {
-        if (idealState.getRebalanceMode() == RebalanceMode.FULL_AUTO)
-        {
+      if (rebalancer == null) {
+        if (idealState.getRebalanceMode() == RebalanceMode.FULL_AUTO) {
           rebalancer = new AutoRebalancer();
-        }
-        else if (idealState.getRebalanceMode() == RebalanceMode.SEMI_AUTO)
-        {
+        } else if (idealState.getRebalanceMode() == RebalanceMode.SEMI_AUTO) {
           rebalancer = new SemiAutoRebalancer();
-        }
-        else
-        {
+        } else {
           rebalancer = new CustomRebalancer();
         }
       }
 
-      ResourceMapping partitionStateAssignment
-        = rebalancer.computeBestPossiblePartitionState(cache,
-                                                       idealState,
-                                                       resource,
-                                                       currentStateOutput);
-      for (Partition partition : resource.getPartitions())
-      {
-        Map<String, String> newStateMap =
-            partitionStateAssignment.getInstanceStateMap(partition);
+      ResourceMapping partitionStateAssignment =
+          rebalancer.computeBestPossiblePartitionState(cache, idealState, resource,
+              currentStateOutput);
+      for (Partition partition : resource.getPartitions()) {
+        Map<String, String> newStateMap = partitionStateAssignment.getInstanceStateMap(partition);
         output.setState(resourceName, partition, newStateMap);
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/controller/stages/BestPossibleStateOutput.java
----------------------------------------------------------------------
diff --git a/helix-core/src/main/java/org/apache/helix/controller/stages/BestPossibleStateOutput.java b/helix-core/src/main/java/org/apache/helix/controller/stages/BestPossibleStateOutput.java
index 18eca4f..3da9bef 100644
--- a/helix-core/src/main/java/org/apache/helix/controller/stages/BestPossibleStateOutput.java
+++ b/helix-core/src/main/java/org/apache/helix/controller/stages/BestPossibleStateOutput.java
@@ -25,57 +25,45 @@ import java.util.Map;
 
 import org.apache.helix.model.Partition;
 
-
-public class BestPossibleStateOutput
-{
+public class BestPossibleStateOutput {
   // resource->partition->instance->state
   Map<String, Map<Partition, Map<String, String>>> _dataMap;
 
-  public BestPossibleStateOutput()
-  {
+  public BestPossibleStateOutput() {
     _dataMap = new HashMap<String, Map<Partition, Map<String, String>>>();
   }
 
   public void setState(String resourceName, Partition resource,
-      Map<String, String> bestInstanceStateMappingForResource)
-  {
-    if (!_dataMap.containsKey(resourceName))
-    {
-      _dataMap.put(resourceName,
-          new HashMap<Partition, Map<String, String>>());
+      Map<String, String> bestInstanceStateMappingForResource) {
+    if (!_dataMap.containsKey(resourceName)) {
+      _dataMap.put(resourceName, new HashMap<Partition, Map<String, String>>());
     }
     Map<Partition, Map<String, String>> map = _dataMap.get(resourceName);
     map.put(resource, bestInstanceStateMappingForResource);
   }
 
-  public Map<String, String> getInstanceStateMap(String resourceName,
-      Partition resource)
-  {
+  public Map<String, String> getInstanceStateMap(String resourceName, Partition resource) {
     Map<Partition, Map<String, String>> map = _dataMap.get(resourceName);
-    if (map != null)
-    {
+    if (map != null) {
       return map.get(resource);
     }
     return Collections.emptyMap();
   }
 
-  public Map<Partition, Map<String, String>> getResourceMap(String resourceName)
-  {
+  public Map<Partition, Map<String, String>> getResourceMap(String resourceName) {
     Map<Partition, Map<String, String>> map = _dataMap.get(resourceName);
-    if (map != null)
-    {
+    if (map != null) {
       return map;
     }
     return Collections.emptyMap();
   }
-  
+
   public Map<String, Map<Partition, Map<String, String>>> getStateMap() {
     return _dataMap;
   }
 
   @Override
-  public String toString()
-  {
+  public String toString() {
     return _dataMap.toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/controller/stages/ClusterDataCache.java
----------------------------------------------------------------------
diff --git a/helix-core/src/main/java/org/apache/helix/controller/stages/ClusterDataCache.java b/helix-core/src/main/java/org/apache/helix/controller/stages/ClusterDataCache.java
index f4e20a0..b90880e 100644
--- a/helix-core/src/main/java/org/apache/helix/controller/stages/ClusterDataCache.java
+++ b/helix-core/src/main/java/org/apache/helix/controller/stages/ClusterDataCache.java
@@ -38,23 +38,19 @@ import org.apache.helix.model.StateModelDefinition;
 import org.apache.helix.model.ClusterConstraints.ConstraintType;
 import org.apache.log4j.Logger;
 
-
 /**
  * Reads the data from the cluster using data accessor. This output ClusterData which
  * provides useful methods to search/lookup properties
- * 
- * 
  */
-public class ClusterDataCache
-{
+public class ClusterDataCache {
 
-  Map<String, LiveInstance>                           _liveInstanceMap;
-  Map<String, IdealState>                             _idealStateMap;
-  Map<String, StateModelDefinition>                   _stateModelDefMap;
-  Map<String, InstanceConfig>                         _instanceConfigMap;
-  Map<String, ClusterConstraints>                     _constraintMap;
+  Map<String, LiveInstance> _liveInstanceMap;
+  Map<String, IdealState> _idealStateMap;
+  Map<String, StateModelDefinition> _stateModelDefMap;
+  Map<String, InstanceConfig> _instanceConfigMap;
+  Map<String, ClusterConstraints> _constraintMap;
   Map<String, Map<String, Map<String, CurrentState>>> _currentStateMap;
-  Map<String, Map<String, Message>>                   _messageMap;
+  Map<String, Map<String, Message>> _messageMap;
 
   // Map<String, Map<String, HealthStat>> _healthStatMap;
   // private HealthStat _globalStats; // DON'T THINK I WILL USE THIS ANYMORE
@@ -62,111 +58,93 @@ public class ClusterDataCache
   // private Alerts _alerts;
   // private AlertStatus _alertStatus;
 
-  private static final Logger                         LOG =
-                                                              Logger.getLogger(ClusterDataCache.class.getName());
+  private static final Logger LOG = Logger.getLogger(ClusterDataCache.class.getName());
 
   /**
    * This refreshes the cluster data by re-fetching the data from zookeeper in
    * an efficient way
-   * 
    * @param accessor
    * @return
    */
-  public boolean refresh(HelixDataAccessor accessor)
-  {
+  public boolean refresh(HelixDataAccessor accessor) {
     Builder keyBuilder = accessor.keyBuilder();
     _idealStateMap = accessor.getChildValuesMap(keyBuilder.idealStates());
     _liveInstanceMap = accessor.getChildValuesMap(keyBuilder.liveInstances());
 
-    for (LiveInstance instance : _liveInstanceMap.values())
-    {
-      LOG.trace("live instance: " + instance.getInstanceName() + " "
-          + instance.getSessionId());
+    for (LiveInstance instance : _liveInstanceMap.values()) {
+      LOG.trace("live instance: " + instance.getInstanceName() + " " + instance.getSessionId());
     }
 
     _stateModelDefMap = accessor.getChildValuesMap(keyBuilder.stateModelDefs());
     _instanceConfigMap = accessor.getChildValuesMap(keyBuilder.instanceConfigs());
-    _constraintMap =
-        accessor.getChildValuesMap(keyBuilder.constraints());
+    _constraintMap = accessor.getChildValuesMap(keyBuilder.constraints());
 
-    Map<String, Map<String, Message>> msgMap =
-        new HashMap<String, Map<String, Message>>();
-    for (String instanceName : _liveInstanceMap.keySet())
-    {
-      Map<String, Message> map =
-          accessor.getChildValuesMap(keyBuilder.messages(instanceName));
+    Map<String, Map<String, Message>> msgMap = new HashMap<String, Map<String, Message>>();
+    for (String instanceName : _liveInstanceMap.keySet()) {
+      Map<String, Message> map = accessor.getChildValuesMap(keyBuilder.messages(instanceName));
       msgMap.put(instanceName, map);
     }
     _messageMap = Collections.unmodifiableMap(msgMap);
 
     Map<String, Map<String, Map<String, CurrentState>>> allCurStateMap =
         new HashMap<String, Map<String, Map<String, CurrentState>>>();
-    for (String instanceName : _liveInstanceMap.keySet())
-    {
+    for (String instanceName : _liveInstanceMap.keySet()) {
       LiveInstance liveInstance = _liveInstanceMap.get(instanceName);
       String sessionId = liveInstance.getSessionId();
-      if (!allCurStateMap.containsKey(instanceName))
-      {
+      if (!allCurStateMap.containsKey(instanceName)) {
         allCurStateMap.put(instanceName, new HashMap<String, Map<String, CurrentState>>());
       }
-      Map<String, Map<String, CurrentState>> curStateMap =
-          allCurStateMap.get(instanceName);
+      Map<String, Map<String, CurrentState>> curStateMap = allCurStateMap.get(instanceName);
       Map<String, CurrentState> map =
           accessor.getChildValuesMap(keyBuilder.currentStates(instanceName, sessionId));
       curStateMap.put(sessionId, map);
     }
 
-    for (String instance : allCurStateMap.keySet())
-    {
-      allCurStateMap.put(instance,
-                         Collections.unmodifiableMap(allCurStateMap.get(instance)));
+    for (String instance : allCurStateMap.keySet()) {
+      allCurStateMap.put(instance, Collections.unmodifiableMap(allCurStateMap.get(instance)));
     }
     _currentStateMap = Collections.unmodifiableMap(allCurStateMap);
 
     return true;
   }
+
   /**
    * Retrieves the idealstates for all resources
    * @return
    */
-  public Map<String, IdealState> getIdealStates()
-  {
+  public Map<String, IdealState> getIdealStates() {
     return _idealStateMap;
   }
+
   /**
    * Returns the LiveInstances for each of the instances that are curretnly up and running
    * @return
    */
-  public Map<String, LiveInstance> getLiveInstances()
-  {
+  public Map<String, LiveInstance> getLiveInstances() {
     return _liveInstanceMap;
   }
+
   /**
-   * Provides the current state of the node for a given session id, 
+   * Provides the current state of the node for a given session id,
    * the sessionid can be got from LiveInstance
    * @param instanceName
    * @param clientSessionId
    * @return
    */
-  public Map<String, CurrentState> getCurrentState(String instanceName,
-                                                   String clientSessionId)
-  {
+  public Map<String, CurrentState> getCurrentState(String instanceName, String clientSessionId) {
     return _currentStateMap.get(instanceName).get(clientSessionId);
   }
+
   /**
    * Provides a list of current outstanding transitions on a given instance.
    * @param instanceName
    * @return
    */
-  public Map<String, Message> getMessages(String instanceName)
-  {
+  public Map<String, Message> getMessages(String instanceName) {
     Map<String, Message> map = _messageMap.get(instanceName);
-    if (map != null)
-    {
+    if (map != null) {
       return map;
-    }
-    else
-    {
+    } else {
       return Collections.emptyMap();
     }
   }
@@ -202,87 +180,73 @@ public class ClusterDataCache
   // return Collections.emptyMap();
   // }
   // }
- /**
-  * Provides the state model definition for a given state model
-  * @param stateModelDefRef
-  * @return
-  */
-  public StateModelDefinition getStateModelDef(String stateModelDefRef)
-  {
+  /**
+   * Provides the state model definition for a given state model
+   * @param stateModelDefRef
+   * @return
+   */
+  public StateModelDefinition getStateModelDef(String stateModelDefRef) {
 
     return _stateModelDefMap.get(stateModelDefRef);
   }
+
   /**
    * Provides the idealstate for a given resource
    * @param resourceName
    * @return
    */
-  public IdealState getIdealState(String resourceName)
-  {
+  public IdealState getIdealState(String resourceName) {
     return _idealStateMap.get(resourceName);
   }
+
   /**
    * Returns the instance config map
    * @return
    */
-  public Map<String, InstanceConfig> getInstanceConfigMap()
-  {
+  public Map<String, InstanceConfig> getInstanceConfigMap() {
     return _instanceConfigMap;
   }
-  
+
   /**
-   * Some partitions might be disabled on specific nodes. 
+   * Some partitions might be disabled on specific nodes.
    * This method allows one to fetch the set of nodes where a given partition is disabled
    * @param partition
    * @return
    */
-  public Set<String> getDisabledInstancesForPartition(String partition)
-  {
+  public Set<String> getDisabledInstancesForPartition(String partition) {
     Set<String> disabledInstancesSet = new HashSet<String>();
-    for (String instance : _instanceConfigMap.keySet())
-    {
+    for (String instance : _instanceConfigMap.keySet()) {
       InstanceConfig config = _instanceConfigMap.get(instance);
       if (config.getInstanceEnabled() == false
-          || config.getInstanceEnabledForPartition(partition) == false)
-      {
+          || config.getInstanceEnabledForPartition(partition) == false) {
         disabledInstancesSet.add(instance);
       }
     }
     return disabledInstancesSet;
   }
+
   /**
    * Returns the number of replicas for a given resource.
    * @param resourceName
    * @return
    */
-  public int getReplicas(String resourceName)
-  {
+  public int getReplicas(String resourceName) {
     int replicas = -1;
 
-    if (_idealStateMap.containsKey(resourceName))
-    {
+    if (_idealStateMap.containsKey(resourceName)) {
       String replicasStr = _idealStateMap.get(resourceName).getReplicas();
 
-      if (replicasStr != null)
-      {
-        if (replicasStr.equals(StateModelToken.ANY_LIVEINSTANCE.toString()))
-        {
+      if (replicasStr != null) {
+        if (replicasStr.equals(StateModelToken.ANY_LIVEINSTANCE.toString())) {
           replicas = _liveInstanceMap.size();
-        }
-        else
-        {
-          try
-          {
+        } else {
+          try {
             replicas = Integer.parseInt(replicasStr);
-          }
-          catch (Exception e)
-          {
+          } catch (Exception e) {
             LOG.error("invalid replicas string: " + replicasStr);
           }
         }
-      }
-      else
-      {
+      } else {
         LOG.error("idealState for resource: " + resourceName + " does NOT have replicas");
       }
     }
@@ -294,21 +258,18 @@ public class ClusterDataCache
    * @param type
    * @return
    */
-  public ClusterConstraints getConstraint(ConstraintType type)
-  {
-    if (_constraintMap != null)
-    {
+  public ClusterConstraints getConstraint(ConstraintType type) {
+    if (_constraintMap != null) {
       return _constraintMap.get(type.toString());
     }
     return null;
   }
-  
+
   /**
    * toString method to print the entire cluster state
    */
   @Override
-  public String toString()
-  {
+  public String toString() {
     StringBuilder sb = new StringBuilder();
     sb.append("liveInstaceMap:" + _liveInstanceMap).append("\n");
     sb.append("idealStateMap:" + _idealStateMap).append("\n");

http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/controller/stages/ClusterEvent.java
----------------------------------------------------------------------
diff --git a/helix-core/src/main/java/org/apache/helix/controller/stages/ClusterEvent.java b/helix-core/src/main/java/org/apache/helix/controller/stages/ClusterEvent.java
index 14b480d..66957df 100644
--- a/helix-core/src/main/java/org/apache/helix/controller/stages/ClusterEvent.java
+++ b/helix-core/src/main/java/org/apache/helix/controller/stages/ClusterEvent.java
@@ -24,52 +24,43 @@ import java.util.Map;
 
 import org.apache.log4j.Logger;
 
-public class ClusterEvent
-{
-  private static final Logger logger = Logger.getLogger(ClusterEvent.class
-      .getName());
+public class ClusterEvent {
+  private static final Logger logger = Logger.getLogger(ClusterEvent.class.getName());
   private final String _eventName;
   private final Map<String, Object> _eventAttributeMap;
 
-  public ClusterEvent(String name)
-  {
+  public ClusterEvent(String name) {
     _eventName = name;
     _eventAttributeMap = new HashMap<String, Object>();
   }
 
-  public void addAttribute(String attrName, Object attrValue)
-  {
-    if (logger.isTraceEnabled())
-    {
+  public void addAttribute(String attrName, Object attrValue) {
+    if (logger.isTraceEnabled()) {
       logger.trace("Adding attribute:" + attrName);
       logger.trace(" attribute value:" + attrValue);
     }
-   
+
     _eventAttributeMap.put(attrName, attrValue);
   }
 
-  public String getName()
-  {
+  public String getName() {
     return _eventName;
   }
 
   @SuppressWarnings("unchecked")
-  public <T extends Object> T getAttribute(String attrName)
-  {
+  public <T extends Object> T getAttribute(String attrName) {
     Object ret = _eventAttributeMap.get(attrName);
-    if (ret != null)
-    {
+    if (ret != null) {
       return (T) ret;
     }
     return null;
   }
-  
+
   @Override
-  public String toString()
-  {
+  public String toString() {
     StringBuilder sb = new StringBuilder();
-    sb.append("name:"+ _eventName).append("\n");
-    for(String key:_eventAttributeMap.keySet()){
+    sb.append("name:" + _eventName).append("\n");
+    for (String key : _eventAttributeMap.keySet()) {
       sb.append(key).append(":").append(_eventAttributeMap.get(key)).append("\n");
     }
     return sb.toString();

http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/controller/stages/CompatibilityCheckStage.java
----------------------------------------------------------------------
diff --git a/helix-core/src/main/java/org/apache/helix/controller/stages/CompatibilityCheckStage.java b/helix-core/src/main/java/org/apache/helix/controller/stages/CompatibilityCheckStage.java
index b7dbc24..d8f98ed 100644
--- a/helix-core/src/main/java/org/apache/helix/controller/stages/CompatibilityCheckStage.java
+++ b/helix-core/src/main/java/org/apache/helix/controller/stages/CompatibilityCheckStage.java
@@ -29,38 +29,32 @@ import org.apache.helix.model.LiveInstance;
 import org.apache.log4j.Logger;
 
 /**
- * controller checks if participant version is compatible 
- *
+ * controller checks if participant version is compatible
  */
 public class CompatibilityCheckStage extends AbstractBaseStage {
-  private static final Logger LOG = Logger
-      .getLogger(CompatibilityCheckStage.class.getName());
+  private static final Logger LOG = Logger.getLogger(CompatibilityCheckStage.class.getName());
 
   @Override
-  public void process(ClusterEvent event) throws Exception
-  {
+  public void process(ClusterEvent event) throws Exception {
     HelixManager manager = event.getAttribute("helixmanager");
     ClusterDataCache cache = event.getAttribute("ClusterDataCache");
-    if (manager == null || cache == null)
-    {
+    if (manager == null || cache == null) {
       throw new StageException("Missing attributes in event:" + event
           + ". Requires HelixManager | DataCache");
     }
 
     HelixManagerProperties properties = manager.getProperties();
     Map<String, LiveInstance> liveInstanceMap = cache.getLiveInstances();
-    for (LiveInstance liveInstance : liveInstanceMap.values())
-    {
+    for (LiveInstance liveInstance : liveInstanceMap.values()) {
       String participantVersion = liveInstance.getHelixVersion();
-      if (!properties.isParticipantCompatible(participantVersion))
-      {
-        String errorMsg = "incompatible participant. pipeline will not continue. "
-                        + "controller: " + manager.getInstanceName() 
-                        + ", controllerVersion: " + properties.getVersion()
-                        + ", minimumSupportedParticipantVersion: " 
-                        + properties.getProperty("miminum_supported_version.participant")
-                        + ", participant: " + liveInstance.getInstanceName() 
-                        + ", participantVersion: " + participantVersion;
+      if (!properties.isParticipantCompatible(participantVersion)) {
+        String errorMsg =
+            "incompatible participant. pipeline will not continue. " + "controller: "
+                + manager.getInstanceName() + ", controllerVersion: " + properties.getVersion()
+                + ", minimumSupportedParticipantVersion: "
+                + properties.getProperty("miminum_supported_version.participant")
+                + ", participant: " + liveInstance.getInstanceName() + ", participantVersion: "
+                + participantVersion;
         LOG.error(errorMsg);
         throw new StageException(errorMsg);
       }

http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/controller/stages/CurrentStateComputationStage.java
----------------------------------------------------------------------
diff --git a/helix-core/src/main/java/org/apache/helix/controller/stages/CurrentStateComputationStage.java b/helix-core/src/main/java/org/apache/helix/controller/stages/CurrentStateComputationStage.java
index 6612ee0..6097432 100644
--- a/helix-core/src/main/java/org/apache/helix/controller/stages/CurrentStateComputationStage.java
+++ b/helix-core/src/main/java/org/apache/helix/controller/stages/CurrentStateComputationStage.java
@@ -31,25 +31,18 @@ import org.apache.helix.model.Partition;
 import org.apache.helix.model.Resource;
 import org.apache.helix.model.Message.MessageType;
 
-
 /**
  * For each LiveInstances select currentState and message whose sessionId matches
  * sessionId from LiveInstance Get Partition,State for all the resources computed in
  * previous State [ResourceComputationStage]
- * 
- * 
  */
-public class CurrentStateComputationStage extends AbstractBaseStage
-{
+public class CurrentStateComputationStage extends AbstractBaseStage {
   @Override
-  public void process(ClusterEvent event) throws Exception
-  {
+  public void process(ClusterEvent event) throws Exception {
     ClusterDataCache cache = event.getAttribute("ClusterDataCache");
-    Map<String, Resource> resourceMap =
-        event.getAttribute(AttributeName.RESOURCES.toString());
+    Map<String, Resource> resourceMap = event.getAttribute(AttributeName.RESOURCES.toString());
 
-    if (cache == null || resourceMap == null)
-    {
+    if (cache == null || resourceMap == null) {
       throw new StageException("Missing attributes in event:" + event
           + ". Requires DataCache|RESOURCE");
     }
@@ -57,61 +50,40 @@ public class CurrentStateComputationStage extends AbstractBaseStage
     Map<String, LiveInstance> liveInstances = cache.getLiveInstances();
     CurrentStateOutput currentStateOutput = new CurrentStateOutput();
 
-    for (LiveInstance instance : liveInstances.values())
-    {
+    for (LiveInstance instance : liveInstances.values()) {
       String instanceName = instance.getInstanceName();
       Map<String, Message> instanceMessages = cache.getMessages(instanceName);
-      for (Message message : instanceMessages.values())
-      {
-        if (!MessageType.STATE_TRANSITION.toString()
-                                         .equalsIgnoreCase(message.getMsgType()))
-        {
+      for (Message message : instanceMessages.values()) {
+        if (!MessageType.STATE_TRANSITION.toString().equalsIgnoreCase(message.getMsgType())) {
           continue;
         }
-        if (!instance.getSessionId().equals(message.getTgtSessionId()))
-        {
+        if (!instance.getSessionId().equals(message.getTgtSessionId())) {
           continue;
         }
         String resourceName = message.getResourceName();
         Resource resource = resourceMap.get(resourceName);
-        if (resource == null)
-        {
+        if (resource == null) {
           continue;
         }
 
-        if (!message.getBatchMessageMode())
-        {
+        if (!message.getBatchMessageMode()) {
           String partitionName = message.getPartitionName();
           Partition partition = resource.getPartition(partitionName);
-          if (partition != null)
-          {
-            currentStateOutput.setPendingState(resourceName,
-                                               partition,
-                                               instanceName,
-                                               message.getToState());
-          }
-          else
-          {
+          if (partition != null) {
+            currentStateOutput.setPendingState(resourceName, partition, instanceName,
+                message.getToState());
+          } else {
             // log
           }
-        }
-        else
-        {
+        } else {
           List<String> partitionNames = message.getPartitionNames();
-          if (!partitionNames.isEmpty())
-          {
-            for (String partitionName : partitionNames)
-            {
+          if (!partitionNames.isEmpty()) {
+            for (String partitionName : partitionNames) {
               Partition partition = resource.getPartition(partitionName);
-              if (partition != null)
-              {
-                currentStateOutput.setPendingState(resourceName,
-                                                   partition,
-                                                   instanceName,
-                                                   message.getToState());
-              }
-              else
-              {
+              if (partition != null) {
+                currentStateOutput.setPendingState(resourceName, partition, instanceName,
+                    message.getToState());
+              } else {
                 // log
               }
             }
@@ -119,48 +91,37 @@ public class CurrentStateComputationStage extends AbstractBaseStage
         }
       }
     }
-    for (LiveInstance instance : liveInstances.values())
-    {
+    for (LiveInstance instance : liveInstances.values()) {
       String instanceName = instance.getInstanceName();
 
       String clientSessionId = instance.getSessionId();
       Map<String, CurrentState> currentStateMap =
           cache.getCurrentState(instanceName, clientSessionId);
-      for (CurrentState currentState : currentStateMap.values())
-      {
+      for (CurrentState currentState : currentStateMap.values()) {
 
-        if (!instance.getSessionId().equals(currentState.getSessionId()))
-        {
+        if (!instance.getSessionId().equals(currentState.getSessionId())) {
           continue;
         }
         String resourceName = currentState.getResourceName();
         String stateModelDefName = currentState.getStateModelDefRef();
         Resource resource = resourceMap.get(resourceName);
-        if (resource == null)
-        {
+        if (resource == null) {
           continue;
         }
-        if (stateModelDefName != null)
-        {
+        if (stateModelDefName != null) {
           currentStateOutput.setResourceStateModelDef(resourceName, stateModelDefName);
         }
 
         currentStateOutput.setBucketSize(resourceName, currentState.getBucketSize());
 
         Map<String, String> partitionStateMap = currentState.getPartitionStateMap();
-        for (String partitionName : partitionStateMap.keySet())
-        {
+        for (String partitionName : partitionStateMap.keySet()) {
           Partition partition = resource.getPartition(partitionName);
-          if (partition != null)
-          {
-            currentStateOutput.setCurrentState(resourceName,
-                                               partition,
-                                               instanceName,
-                                               currentState.getState(partitionName));
+          if (partition != null) {
+            currentStateOutput.setCurrentState(resourceName, partition, instanceName,
+                currentState.getState(partitionName));
 
-          }
-          else
-          {
+          } else {
             // log
           }
         }

http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/controller/stages/CurrentStateOutput.java
----------------------------------------------------------------------
diff --git a/helix-core/src/main/java/org/apache/helix/controller/stages/CurrentStateOutput.java b/helix-core/src/main/java/org/apache/helix/controller/stages/CurrentStateOutput.java
index a7a3702..b41f14b 100644
--- a/helix-core/src/main/java/org/apache/helix/controller/stages/CurrentStateOutput.java
+++ b/helix-core/src/main/java/org/apache/helix/controller/stages/CurrentStateOutput.java
@@ -26,16 +26,13 @@ import java.util.Map;
 import org.apache.helix.model.CurrentState;
 import org.apache.helix.model.Partition;
 
-
-public class CurrentStateOutput
-{
+public class CurrentStateOutput {
   private final Map<String, Map<Partition, Map<String, String>>> _currentStateMap;
   private final Map<String, Map<Partition, Map<String, String>>> _pendingStateMap;
-  private final Map<String, String>                              _resourceStateModelMap;
-  private final Map<String, CurrentState>                        _curStateMetaMap;
+  private final Map<String, String> _resourceStateModelMap;
+  private final Map<String, CurrentState> _curStateMetaMap;
 
-  public CurrentStateOutput()
-  {
+  public CurrentStateOutput() {
     _currentStateMap = new HashMap<String, Map<Partition, Map<String, String>>>();
     _pendingStateMap = new HashMap<String, Map<Partition, Map<String, String>>>();
     _resourceStateModelMap = new HashMap<String, String>();
@@ -43,66 +40,50 @@ public class CurrentStateOutput
 
   }
 
-  public void setResourceStateModelDef(String resourceName, String stateModelDefName)
-  {
+  public void setResourceStateModelDef(String resourceName, String stateModelDefName) {
     _resourceStateModelMap.put(resourceName, stateModelDefName);
   }
 
-  public String getResourceStateModelDef(String resourceName)
-  {
+  public String getResourceStateModelDef(String resourceName) {
     return _resourceStateModelMap.get(resourceName);
   }
 
-  public void setBucketSize(String resource, int bucketSize)
-  {
+  public void setBucketSize(String resource, int bucketSize) {
     CurrentState curStateMeta = _curStateMetaMap.get(resource);
-    if (curStateMeta == null)
-    {
+    if (curStateMeta == null) {
       curStateMeta = new CurrentState(resource);
       _curStateMetaMap.put(resource, curStateMeta);
     }
     curStateMeta.setBucketSize(bucketSize);
   }
-  
-  public int getBucketSize(String resource)
-  {
+
+  public int getBucketSize(String resource) {
     int bucketSize = 0;
     CurrentState curStateMeta = _curStateMetaMap.get(resource);
-    if (curStateMeta != null)
-    {
-      bucketSize = curStateMeta.getBucketSize();  
+    if (curStateMeta != null) {
+      bucketSize = curStateMeta.getBucketSize();
     }
-    
+
     return bucketSize;
   }
-  
-  public void setCurrentState(String resourceName,
-                              Partition partition,
-                              String instanceName,
-                              String state)
-  {
-    if (!_currentStateMap.containsKey(resourceName))
-    {
+
+  public void setCurrentState(String resourceName, Partition partition, String instanceName,
+      String state) {
+    if (!_currentStateMap.containsKey(resourceName)) {
       _currentStateMap.put(resourceName, new HashMap<Partition, Map<String, String>>());
     }
-    if (!_currentStateMap.get(resourceName).containsKey(partition))
-    {
+    if (!_currentStateMap.get(resourceName).containsKey(partition)) {
       _currentStateMap.get(resourceName).put(partition, new HashMap<String, String>());
     }
     _currentStateMap.get(resourceName).get(partition).put(instanceName, state);
   }
 
-  public void setPendingState(String resourceName,
-                              Partition partition,
-                              String instanceName,
-                              String state)
-  {
-    if (!_pendingStateMap.containsKey(resourceName))
-    {
+  public void setPendingState(String resourceName, Partition partition, String instanceName,
+      String state) {
+    if (!_pendingStateMap.containsKey(resourceName)) {
       _pendingStateMap.put(resourceName, new HashMap<Partition, Map<String, String>>());
     }
-    if (!_pendingStateMap.get(resourceName).containsKey(partition))
-    {
+    if (!_pendingStateMap.get(resourceName).containsKey(partition)) {
       _pendingStateMap.get(resourceName).put(partition, new HashMap<String, String>());
     }
     _pendingStateMap.get(resourceName).get(partition).put(instanceName, state);
@@ -110,22 +91,16 @@ public class CurrentStateOutput
 
   /**
    * given (resource, partition, instance), returns currentState
-   * 
    * @param resourceName
    * @param partition
    * @param instanceName
    * @return
    */
-  public String getCurrentState(String resourceName,
-                                Partition partition,
-                                String instanceName)
-  {
+  public String getCurrentState(String resourceName, Partition partition, String instanceName) {
     Map<Partition, Map<String, String>> map = _currentStateMap.get(resourceName);
-    if (map != null)
-    {
+    if (map != null) {
       Map<String, String> instanceStateMap = map.get(partition);
-      if (instanceStateMap != null)
-      {
+      if (instanceStateMap != null) {
         return instanceStateMap.get(instanceName);
       }
     }
@@ -134,22 +109,16 @@ public class CurrentStateOutput
 
   /**
    * given (resource, partition, instance), returns toState
-   * 
    * @param resourceName
    * @param partition
    * @param instanceName
    * @return
    */
-  public String getPendingState(String resourceName,
-                                Partition partition,
-                                String instanceName)
-  {
+  public String getPendingState(String resourceName, Partition partition, String instanceName) {
     Map<Partition, Map<String, String>> map = _pendingStateMap.get(resourceName);
-    if (map != null)
-    {
+    if (map != null) {
       Map<String, String> instanceStateMap = map.get(partition);
-      if (instanceStateMap != null)
-      {
+      if (instanceStateMap != null) {
         return instanceStateMap.get(instanceName);
       }
     }
@@ -158,18 +127,14 @@ public class CurrentStateOutput
 
   /**
    * given (resource, partition), returns (instance->currentState) map
-   * 
    * @param resourceName
    * @param partition
    * @return
    */
-  public Map<String, String> getCurrentStateMap(String resourceName, Partition partition)
-  {
-    if (_currentStateMap.containsKey(resourceName))
-    {
+  public Map<String, String> getCurrentStateMap(String resourceName, Partition partition) {
+    if (_currentStateMap.containsKey(resourceName)) {
       Map<Partition, Map<String, String>> map = _currentStateMap.get(resourceName);
-      if (map.containsKey(partition))
-      {
+      if (map.containsKey(partition)) {
         return map.get(partition);
       }
     }
@@ -178,18 +143,14 @@ public class CurrentStateOutput
 
   /**
    * given (resource, partition), returns (instance->toState) map
-   * 
    * @param resourceName
    * @param partition
    * @return
    */
-  public Map<String, String> getPendingStateMap(String resourceName, Partition partition)
-  {
-    if (_pendingStateMap.containsKey(resourceName))
-    {
+  public Map<String, String> getPendingStateMap(String resourceName, Partition partition) {
+    if (_pendingStateMap.containsKey(resourceName)) {
       Map<Partition, Map<String, String>> map = _pendingStateMap.get(resourceName);
-      if (map.containsKey(partition))
-      {
+      if (map.containsKey(partition)) {
         return map.get(partition);
       }
     }
@@ -197,8 +158,7 @@ public class CurrentStateOutput
   }
 
   @Override
-  public String toString()
-  {
+  public String toString() {
     StringBuilder sb = new StringBuilder();
     sb.append("current state= ").append(_currentStateMap);
     sb.append(", pending state= ").append(_pendingStateMap);

http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/controller/stages/ExternalViewComputeStage.java
----------------------------------------------------------------------
diff --git a/helix-core/src/main/java/org/apache/helix/controller/stages/ExternalViewComputeStage.java b/helix-core/src/main/java/org/apache/helix/controller/stages/ExternalViewComputeStage.java
index 368db9d..35ef177 100644
--- a/helix-core/src/main/java/org/apache/helix/controller/stages/ExternalViewComputeStage.java
+++ b/helix-core/src/main/java/org/apache/helix/controller/stages/ExternalViewComputeStage.java
@@ -48,23 +48,19 @@ import org.apache.helix.model.StatusUpdate;
 import org.apache.helix.monitoring.mbeans.ClusterStatusMonitor;
 import org.apache.log4j.Logger;
 
-public class ExternalViewComputeStage extends AbstractBaseStage
-{
+public class ExternalViewComputeStage extends AbstractBaseStage {
   private static Logger log = Logger.getLogger(ExternalViewComputeStage.class);
 
   @Override
-  public void process(ClusterEvent event) throws Exception
-  {
+  public void process(ClusterEvent event) throws Exception {
     long startTime = System.currentTimeMillis();
     log.info("START ExternalViewComputeStage.process()");
 
     HelixManager manager = event.getAttribute("helixmanager");
-    Map<String, Resource> resourceMap =
-        event.getAttribute(AttributeName.RESOURCES.toString());
+    Map<String, Resource> resourceMap = event.getAttribute(AttributeName.RESOURCES.toString());
     ClusterDataCache cache = event.getAttribute("ClusterDataCache");
 
-    if (manager == null || resourceMap == null || cache == null)
-    {
+    if (manager == null || resourceMap == null || cache == null) {
       throw new StageException("Missing attributes in event:" + event
           + ". Requires ClusterManager|RESOURCES|DataCache");
     }
@@ -79,39 +75,30 @@ public class ExternalViewComputeStage extends AbstractBaseStage
     List<PropertyKey> keys = new ArrayList<PropertyKey>();
 
     Map<String, ExternalView> curExtViews =
-          dataAccessor.getChildValuesMap(keyBuilder.externalViews());
+        dataAccessor.getChildValuesMap(keyBuilder.externalViews());
 
-    for (String resourceName : resourceMap.keySet())
-    {
+    for (String resourceName : resourceMap.keySet()) {
       ExternalView view = new ExternalView(resourceName);
       // view.setBucketSize(currentStateOutput.getBucketSize(resourceName));
       // if resource ideal state has bucket size, set it
       // otherwise resource has been dropped, use bucket size from current state instead
       Resource resource = resourceMap.get(resourceName);
-      if (resource.getBucketSize() > 0)
-      {
+      if (resource.getBucketSize() > 0) {
         view.setBucketSize(resource.getBucketSize());
-      }
-      else
-      {
+      } else {
         view.setBucketSize(currentStateOutput.getBucketSize(resourceName));
       }
 
-      for (Partition partition : resource.getPartitions())
-      {
+      for (Partition partition : resource.getPartitions()) {
         Map<String, String> currentStateMap =
             currentStateOutput.getCurrentStateMap(resourceName, partition);
-        if (currentStateMap != null && currentStateMap.size() > 0)
-        {
+        if (currentStateMap != null && currentStateMap.size() > 0) {
           // Set<String> disabledInstances
           // = cache.getDisabledInstancesForResource(resource.toString());
-          for (String instance : currentStateMap.keySet())
-          {
+          for (String instance : currentStateMap.keySet()) {
             // if (!disabledInstances.contains(instance))
             // {
-            view.setState(partition.getPartitionName(),
-                          instance,
-                          currentStateMap.get(instance));
+            view.setState(partition.getPartitionName(), instance, currentStateMap.get(instance));
             // }
           }
         }
@@ -120,86 +107,83 @@ public class ExternalViewComputeStage extends AbstractBaseStage
       ClusterStatusMonitor clusterStatusMonitor =
           (ClusterStatusMonitor) event.getAttribute("clusterStatusMonitor");
       IdealState idealState = cache._idealStateMap.get(view.getResourceName());
-      if(idealState != null)
-      {
-        if (clusterStatusMonitor != null && !idealState.getStateModelDefRef().equalsIgnoreCase(DefaultSchedulerMessageHandlerFactory.SCHEDULER_TASK_QUEUE))
-        {
+      if (idealState != null) {
+        if (clusterStatusMonitor != null
+            && !idealState.getStateModelDefRef().equalsIgnoreCase(
+                DefaultSchedulerMessageHandlerFactory.SCHEDULER_TASK_QUEUE)) {
           clusterStatusMonitor.onExternalViewChange(view,
-                                                  cache._idealStateMap.get(view.getResourceName()));
+              cache._idealStateMap.get(view.getResourceName()));
         }
       }
 
       // compare the new external view with current one, set only on different
       ExternalView curExtView = curExtViews.get(resourceName);
-      if (curExtView == null || !curExtView.getRecord().equals(view.getRecord()))
-      {
+      if (curExtView == null || !curExtView.getRecord().equals(view.getRecord())) {
         keys.add(keyBuilder.externalView(resourceName));
         newExtViews.add(view);
 
-        // For SCHEDULER_TASK_RESOURCE resource group (helix task queue), we need to find out which task 
-        // partitions are finished (COMPLETED or ERROR), update the status update of the original scheduler 
+        // For SCHEDULER_TASK_RESOURCE resource group (helix task queue), we need to find out which
+        // task
+        // partitions are finished (COMPLETED or ERROR), update the status update of the original
+        // scheduler
         // message, and then remove the partitions from the ideal state
-        if(idealState != null && idealState.getStateModelDefRef().equalsIgnoreCase(DefaultSchedulerMessageHandlerFactory.SCHEDULER_TASK_QUEUE))
-        {
+        if (idealState != null
+            && idealState.getStateModelDefRef().equalsIgnoreCase(
+                DefaultSchedulerMessageHandlerFactory.SCHEDULER_TASK_QUEUE)) {
           updateScheduledTaskStatus(view, manager, idealState);
         }
       }
     }
-    // TODO: consider not setting the externalview of SCHEDULER_TASK_QUEUE at all. 
+    // TODO: consider not setting the externalview of SCHEDULER_TASK_QUEUE at all.
     // Are there any entity that will be interested in its change?
 
     // add/update external-views
-    if (newExtViews.size() > 0)
-    {
+    if (newExtViews.size() > 0) {
       dataAccessor.setChildren(keys, newExtViews);
     }
 
     // remove dead external-views
     for (String resourceName : curExtViews.keySet()) {
-        if (!resourceMap.keySet().contains(resourceName)) {
-            dataAccessor.removeProperty(keyBuilder.externalView(resourceName));
-        }
+      if (!resourceMap.keySet().contains(resourceName)) {
+        dataAccessor.removeProperty(keyBuilder.externalView(resourceName));
+      }
     }
 
     long endTime = System.currentTimeMillis();
-    log.info("END ExternalViewComputeStage.process(). took: " + (endTime - startTime)
-        + " ms");
+    log.info("END ExternalViewComputeStage.process(). took: " + (endTime - startTime) + " ms");
   }
-  
-  private void updateScheduledTaskStatus(ExternalView ev, HelixManager manager, IdealState taskQueueIdealState)
-  {
+
+  private void updateScheduledTaskStatus(ExternalView ev, HelixManager manager,
+      IdealState taskQueueIdealState) {
     HelixDataAccessor accessor = manager.getHelixDataAccessor();
     ZNRecord finishedTasks = new ZNRecord(ev.getResourceName());
-    
+
     // Place holder for finished partitions
     Map<String, String> emptyMap = new HashMap<String, String>();
     List<String> emptyList = new LinkedList<String>();
-    
+
     Map<String, Integer> controllerMsgIdCountMap = new HashMap<String, Integer>();
-    Map<String, Map<String, String>> controllerMsgUpdates = new HashMap<String, Map<String, String>>();
-    
+    Map<String, Map<String, String>> controllerMsgUpdates =
+        new HashMap<String, Map<String, String>>();
+
     Builder keyBuilder = accessor.keyBuilder();
-          
-    for(String taskPartitionName : ev.getPartitionSet())
-    {
-      for(String taskState : ev.getStateMap(taskPartitionName).values())
-      {
-        if(taskState.equalsIgnoreCase(HelixDefinedState.ERROR.toString()) || taskState.equalsIgnoreCase("COMPLETED"))
-        {
+
+    for (String taskPartitionName : ev.getPartitionSet()) {
+      for (String taskState : ev.getStateMap(taskPartitionName).values()) {
+        if (taskState.equalsIgnoreCase(HelixDefinedState.ERROR.toString())
+            || taskState.equalsIgnoreCase("COMPLETED")) {
           log.info(taskPartitionName + " finished as " + taskState);
           finishedTasks.getListFields().put(taskPartitionName, emptyList);
           finishedTasks.getMapFields().put(taskPartitionName, emptyMap);
-          
+
           // Update original scheduler message status update
-          if(taskQueueIdealState.getRecord().getMapField(taskPartitionName) != null)
-          {
-            String controllerMsgId 
-              = taskQueueIdealState.getRecord().getMapField(taskPartitionName).get(DefaultSchedulerMessageHandlerFactory.CONTROLLER_MSG_ID);
-            if(controllerMsgId != null)
-            {
+          if (taskQueueIdealState.getRecord().getMapField(taskPartitionName) != null) {
+            String controllerMsgId =
+                taskQueueIdealState.getRecord().getMapField(taskPartitionName)
+                    .get(DefaultSchedulerMessageHandlerFactory.CONTROLLER_MSG_ID);
+            if (controllerMsgId != null) {
               log.info(taskPartitionName + " finished with controllerMsg " + controllerMsgId);
-              if(!controllerMsgUpdates.containsKey(controllerMsgId))
-              {
+              if (!controllerMsgUpdates.containsKey(controllerMsgId)) {
                 controllerMsgUpdates.put(controllerMsgId, new HashMap<String, String>());
               }
               controllerMsgUpdates.get(controllerMsgId).put(taskPartitionName, taskState);
@@ -209,51 +193,49 @@ public class ExternalViewComputeStage extends AbstractBaseStage
       }
     }
     // fill the controllerMsgIdCountMap
-    for(String taskId : taskQueueIdealState.getPartitionSet())
-    {
-      String controllerMsgId 
-        = taskQueueIdealState.getRecord().getMapField(taskId).get(DefaultSchedulerMessageHandlerFactory.CONTROLLER_MSG_ID);
-      if(controllerMsgId != null)
-      {
-        if(!controllerMsgIdCountMap.containsKey(controllerMsgId))
-        {
+    for (String taskId : taskQueueIdealState.getPartitionSet()) {
+      String controllerMsgId =
+          taskQueueIdealState.getRecord().getMapField(taskId)
+              .get(DefaultSchedulerMessageHandlerFactory.CONTROLLER_MSG_ID);
+      if (controllerMsgId != null) {
+        if (!controllerMsgIdCountMap.containsKey(controllerMsgId)) {
           controllerMsgIdCountMap.put(controllerMsgId, 0);
         }
-        controllerMsgIdCountMap.put(controllerMsgId, (controllerMsgIdCountMap.get(controllerMsgId) + 1));
+        controllerMsgIdCountMap.put(controllerMsgId,
+            (controllerMsgIdCountMap.get(controllerMsgId) + 1));
       }
     }
-    
-    if(controllerMsgUpdates.size() > 0)
-    {
-      for(String controllerMsgId : controllerMsgUpdates.keySet())
-      {
-        PropertyKey controllerStatusUpdateKey 
-          = keyBuilder.controllerTaskStatus(MessageType.SCHEDULER_MSG.toString(), controllerMsgId);
+
+    if (controllerMsgUpdates.size() > 0) {
+      for (String controllerMsgId : controllerMsgUpdates.keySet()) {
+        PropertyKey controllerStatusUpdateKey =
+            keyBuilder.controllerTaskStatus(MessageType.SCHEDULER_MSG.toString(), controllerMsgId);
         StatusUpdate controllerStatusUpdate = accessor.getProperty(controllerStatusUpdateKey);
-        for(String taskPartitionName : controllerMsgUpdates.get(controllerMsgId).keySet())
-        {
+        for (String taskPartitionName : controllerMsgUpdates.get(controllerMsgId).keySet()) {
           Map<String, String> result = new HashMap<String, String>();
           result.put("Result", controllerMsgUpdates.get(controllerMsgId).get(taskPartitionName));
-          controllerStatusUpdate.getRecord().setMapField("MessageResult "  + 
-             taskQueueIdealState.getRecord().getMapField(taskPartitionName).get(Message.Attributes.TGT_NAME.toString()) + " " + taskPartitionName + " " + 
-             taskQueueIdealState.getRecord().getMapField(taskPartitionName).get(Message.Attributes.MSG_ID.toString())
-             , result);
+          controllerStatusUpdate.getRecord().setMapField(
+              "MessageResult "
+                  + taskQueueIdealState.getRecord().getMapField(taskPartitionName)
+                      .get(Message.Attributes.TGT_NAME.toString())
+                  + " "
+                  + taskPartitionName
+                  + " "
+                  + taskQueueIdealState.getRecord().getMapField(taskPartitionName)
+                      .get(Message.Attributes.MSG_ID.toString()), result);
         }
         // All done for the scheduled tasks that came from controllerMsgId, add summary for it
-        if(controllerMsgUpdates.get(controllerMsgId).size() == controllerMsgIdCountMap.get(controllerMsgId).intValue())
-        {
+        if (controllerMsgUpdates.get(controllerMsgId).size() == controllerMsgIdCountMap.get(
+            controllerMsgId).intValue()) {
           int finishedTasksNum = 0;
           int completedTasksNum = 0;
-          for(String key : controllerStatusUpdate.getRecord().getMapFields().keySet())
-          {
-            if(key.startsWith("MessageResult "))
-            {
-              finishedTasksNum ++;
+          for (String key : controllerStatusUpdate.getRecord().getMapFields().keySet()) {
+            if (key.startsWith("MessageResult ")) {
+              finishedTasksNum++;
             }
-            if(controllerStatusUpdate.getRecord().getMapField(key).get("Result") != null)
-            {
-              if(controllerStatusUpdate.getRecord().getMapField(key).get("Result").equalsIgnoreCase("COMPLETED"))
-              {
+            if (controllerStatusUpdate.getRecord().getMapField(key).get("Result") != null) {
+              if (controllerStatusUpdate.getRecord().getMapField(key).get("Result")
+                  .equalsIgnoreCase("COMPLETED")) {
                 completedTasksNum++;
               }
             }
@@ -261,16 +243,15 @@ public class ExternalViewComputeStage extends AbstractBaseStage
           Map<String, String> summary = new TreeMap<String, String>();
           summary.put("TotalMessages:", "" + finishedTasksNum);
           summary.put("CompletedMessages", "" + completedTasksNum);
-          
+
           controllerStatusUpdate.getRecord().setMapField("Summary", summary);
         }
         // Update the statusUpdate of controllerMsgId
         accessor.updateProperty(controllerStatusUpdateKey, controllerStatusUpdate);
       }
     }
-    
-    if(finishedTasks.getListFields().size() > 0)
-    {
+
+    if (finishedTasks.getListFields().size() > 0) {
       ZNRecordDelta znDelta = new ZNRecordDelta(finishedTasks, MergeOperation.SUBTRACT);
       List<ZNRecordDelta> deltaList = new LinkedList<ZNRecordDelta>();
       deltaList.add(znDelta);

http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/controller/stages/HealthDataCache.java
----------------------------------------------------------------------
diff --git a/helix-core/src/main/java/org/apache/helix/controller/stages/HealthDataCache.java b/helix-core/src/main/java/org/apache/helix/controller/stages/HealthDataCache.java
index 11addfc..3ab8336 100644
--- a/helix-core/src/main/java/org/apache/helix/controller/stages/HealthDataCache.java
+++ b/helix-core/src/main/java/org/apache/helix/controller/stages/HealthDataCache.java
@@ -33,9 +33,7 @@ import org.apache.helix.model.HealthStat;
 import org.apache.helix.model.LiveInstance;
 import org.apache.helix.model.PersistentStats;
 
-
-public class HealthDataCache
-{
+public class HealthDataCache {
   Map<String, LiveInstance> _liveInstanceMap;
 
   Map<String, Map<String, HealthStat>> _healthStatMap;
@@ -44,57 +42,47 @@ public class HealthDataCache
   Alerts _alerts;
   AlertStatus _alertStatus;
 
-  public HealthStat getGlobalStats()
-  {
+  public HealthStat getGlobalStats() {
     return _globalStats;
   }
 
-  public PersistentStats getPersistentStats()
-  {
+  public PersistentStats getPersistentStats() {
     return _persistentStats;
   }
 
-  public Alerts getAlerts()
-  {
+  public Alerts getAlerts() {
     return _alerts;
   }
 
-  public AlertStatus getAlertStatus()
-  {
+  public AlertStatus getAlertStatus() {
     return _alertStatus;
   }
 
-  public Map<String, HealthStat> getHealthStats(String instanceName)
-  {
+  public Map<String, HealthStat> getHealthStats(String instanceName) {
     Map<String, HealthStat> map = _healthStatMap.get(instanceName);
-    if (map != null)
-    {
+    if (map != null) {
       return map;
-    } else
-    {
+    } else {
       return Collections.emptyMap();
     }
   }
 
-  public Map<String, LiveInstance> getLiveInstances()
-  {
+  public Map<String, LiveInstance> getLiveInstances() {
     return _liveInstanceMap;
   }
 
-  public boolean refresh(HelixDataAccessor accessor)
-  {
+  public boolean refresh(HelixDataAccessor accessor) {
     Builder keyBuilder = accessor.keyBuilder();
     _liveInstanceMap = accessor.getChildValuesMap(keyBuilder.liveInstances());
 
     Map<String, Map<String, HealthStat>> hsMap = new HashMap<String, Map<String, HealthStat>>();
 
-    for (String instanceName : _liveInstanceMap.keySet())
-    {
+    for (String instanceName : _liveInstanceMap.keySet()) {
       // xxx clearly getting znodes for the instance here...so get the
       // timestamp!
 
-      Map<String, HealthStat> childValuesMap = accessor
-          .getChildValuesMap(keyBuilder.healthReports(instanceName));
+      Map<String, HealthStat> childValuesMap =
+          accessor.getChildValuesMap(keyBuilder.healthReports(instanceName));
       hsMap.put(instanceName, childValuesMap);
     }
     _healthStatMap = Collections.unmodifiableMap(hsMap);

http://git-wip-us.apache.org/repos/asf/incubator-helix/blob/f414aad4/helix-core/src/main/java/org/apache/helix/controller/stages/MessageGenerationOutput.java
----------------------------------------------------------------------
diff --git a/helix-core/src/main/java/org/apache/helix/controller/stages/MessageGenerationOutput.java b/helix-core/src/main/java/org/apache/helix/controller/stages/MessageGenerationOutput.java
index 23723a7..359a959 100644
--- a/helix-core/src/main/java/org/apache/helix/controller/stages/MessageGenerationOutput.java
+++ b/helix-core/src/main/java/org/apache/helix/controller/stages/MessageGenerationOutput.java
@@ -28,51 +28,38 @@ import java.util.Map;
 import org.apache.helix.model.Message;
 import org.apache.helix.model.Partition;
 
-
-public class MessageGenerationOutput
-{
+public class MessageGenerationOutput {
 
   private final Map<String, Map<Partition, List<Message>>> _messagesMap;
 
-  public MessageGenerationOutput()
-  {
+  public MessageGenerationOutput() {
     _messagesMap = new HashMap<String, Map<Partition, List<Message>>>();
 
   }
 
-  public void addMessage(String resourceName, Partition partition,
-      Message message)
-  {
-    if (!_messagesMap.containsKey(resourceName))
-    {
-      _messagesMap.put(resourceName,
-          new HashMap<Partition, List<Message>>());
+  public void addMessage(String resourceName, Partition partition, Message message) {
+    if (!_messagesMap.containsKey(resourceName)) {
+      _messagesMap.put(resourceName, new HashMap<Partition, List<Message>>());
     }
-    if (!_messagesMap.get(resourceName).containsKey(partition))
-    {
-      _messagesMap.get(resourceName).put(partition,
-          new ArrayList<Message>());
+    if (!_messagesMap.get(resourceName).containsKey(partition)) {
+      _messagesMap.get(resourceName).put(partition, new ArrayList<Message>());
 
     }
     _messagesMap.get(resourceName).get(partition).add(message);
 
   }
 
-  public List<Message> getMessages(String resourceName,
-      Partition resource)
-  {
+  public List<Message> getMessages(String resourceName, Partition resource) {
     Map<Partition, List<Message>> map = _messagesMap.get(resourceName);
-    if (map != null)
-    {
+    if (map != null) {
       return map.get(resource);
     }
     return Collections.emptyList();
 
   }
-  
+
   @Override
-  public String toString()
-  {
+  public String toString() {
     return _messagesMap.toString();
   }
 }


Mime
View raw message