hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sj...@apache.org
Subject [50/50] [abbrv] hadoop git commit: Made a number of miscellaneous fixes for javac, javadoc, and checstyle warnings.
Date Sun, 10 Jul 2016 15:51:41 GMT
Made a number of miscellaneous fixes for javac, javadoc, and checstyle warnings.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6cf6ab7b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6cf6ab7b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6cf6ab7b

Branch: refs/heads/YARN-2928
Commit: 6cf6ab7b780de2b0c2c9ea730e1f366965a0d682
Parents: c5dbde0
Author: Sangjin Lee <sjlee@apache.org>
Authored: Sun Jul 10 08:38:19 2016 -0700
Committer: Sangjin Lee <sjlee@apache.org>
Committed: Sun Jul 10 08:46:05 2016 -0700

----------------------------------------------------------------------
 .../jobhistory/JobHistoryEventHandler.java      |   4 -
 .../v2/app/rm/RMContainerAllocator.java         |   2 +-
 .../jobhistory/TestJobHistoryEventHandler.java  |   3 +-
 .../mapreduce/jobhistory/HistoryEvent.java      |  12 +-
 .../mapred/TestMRTimelineEventHandling.java     |  24 ++-
 .../org/apache/hadoop/mapred/UtilsForTests.java |   4 +-
 .../apache/hadoop/mapreduce/EntityWriterV2.java |   3 +-
 .../mapreduce/JobHistoryFileReplayMapperV2.java |   4 +-
 .../mapreduce/SimpleEntityWriterConstants.java  |  14 +-
 .../hadoop/mapreduce/SimpleEntityWriterV1.java  |   6 +-
 .../mapreduce/TimelineEntityConverterV1.java    |  10 +-
 .../mapreduce/TimelineEntityConverterV2.java    |  11 +-
 .../mapreduce/TimelineServicePerformance.java   |   3 +-
 .../hadoop/mapreduce/v2/MiniMRYarnCluster.java  |   5 +-
 .../records/timelineservice/package-info.java   |   1 -
 .../hadoop/yarn/conf/YarnConfiguration.java     |   7 +-
 .../hadoop/yarn/util/TimelineServiceHelper.java |   2 +
 .../distributedshell/ApplicationMaster.java     |  27 +--
 .../distributedshell/TestDistributedShell.java  |  23 ++-
 .../TestDistributedShellWithNodeLabels.java     |   2 -
 .../hadoop/yarn/client/api/AMRMClient.java      |  10 +-
 .../yarn/client/api/async/AMRMClientAsync.java  |   4 +-
 .../hadoop/yarn/client/api/TimelineClient.java  |  34 ++--
 .../client/api/impl/TimelineClientImpl.java     |  42 ++--
 .../TestTimelineServiceRecords.java             |  15 +-
 .../api/impl/TestTimelineClientV2Impl.java      |  37 ++--
 .../yarn/util/TestTimelineServiceHelper.java    |  21 +-
 .../api/CollectorNodemanagerProtocol.java       |  12 +-
 .../api/CollectorNodemanagerProtocolPB.java     |   3 +-
 ...ollectorNodemanagerProtocolPBClientImpl.java |   4 +-
 ...llectorNodemanagerProtocolPBServiceImpl.java |   6 +-
 ...etTimelineCollectorContextRequestPBImpl.java |  25 ++-
 ...tTimelineCollectorContextResponsePBImpl.java |  24 ++-
 .../impl/pb/NodeHeartbeatRequestPBImpl.java     |   9 +-
 .../impl/pb/NodeHeartbeatResponsePBImpl.java    |   6 +-
 .../pb/ReportNewCollectorInfoRequestPBImpl.java |  16 +-
 .../ReportNewCollectorInfoResponsePBImpl.java   |  12 +-
 .../records/impl/pb/AppCollectorsMapPBImpl.java |  17 +-
 .../java/org/apache/hadoop/yarn/TestRPC.java    |  55 ++++--
 .../yarn/server/nodemanager/NodeManager.java    |   4 +-
 .../nodemanager/NodeStatusUpdaterImpl.java      |  20 +-
 .../containermanager/ContainerManagerImpl.java  |   8 +-
 .../ApplicationContainerFinishedEvent.java      |   3 +-
 .../container/ContainerImpl.java                |   2 -
 .../monitor/ContainersMonitorImpl.java          |   5 +-
 .../timelineservice/NMTimelinePublisher.java    |  11 +-
 .../nodemanager/TestNodeStatusUpdater.java      |   5 +-
 .../application/TestApplication.java            |   5 +-
 .../launcher/TestContainerLaunch.java           |  22 +--
 .../TestNMTimelinePublisher.java                |   4 +-
 .../yarn/server/nodemanager/webapp/MockApp.java |   8 +-
 .../server/resourcemanager/RMContextImpl.java   |   3 +-
 .../metrics/TimelineServiceV1Publisher.java     |   2 +-
 .../TestSystemMetricsPublisherForV2.java        |  29 +--
 .../TestTimelineServiceClientIntegration.java   |   9 +-
 ...stTimelineReaderWebServicesHBaseStorage.java |  51 ++---
 .../storage/TestHBaseTimelineStorage.java       | 192 +++++++++----------
 ...TestPhoenixOfflineAggregationWriterImpl.java |   4 +-
 .../storage/flow/TestFlowDataGenerator.java     |  39 ++--
 .../flow/TestHBaseStorageFlowActivity.java      |  13 +-
 .../storage/flow/TestHBaseStorageFlowRun.java   |  10 +-
 .../flow/TestHBaseStorageFlowRunCompaction.java |  33 ++--
 .../collector/TimelineCollectorWebService.java  |   3 +-
 .../storage/application/package-info.java       |   1 -
 .../storage/common/AppIdKeyConverter.java       |   3 +-
 .../storage/common/TimestampGenerator.java      |   3 +-
 .../storage/entity/package-info.java            |   1 -
 .../TestNMTimelineCollectorManager.java         |  24 +--
 ...TestPerNodeTimelineCollectorsAuxService.java |  16 +-
 .../reader/TestTimelineReaderUtils.java         |   6 +-
 .../reader/TestTimelineReaderWebServices.java   |  11 +-
 .../TestTimelineReaderWebServicesUtils.java     |   4 +-
 .../reader/TestTimelineUIDConverter.java        |   2 +-
 .../TestFileSystemTimelineReaderImpl.java       |  44 ++---
 .../TestFileSystemTimelineWriterImpl.java       |   7 +-
 .../storage/common/TestRowKeys.java             |  22 ++-
 .../storage/common/TestSeparator.java           |  28 +--
 .../src/site/markdown/TimelineServiceV2.md      |   1 -
 78 files changed, 634 insertions(+), 543 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
index 9b59676..817cd14 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
@@ -46,7 +46,6 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.TaskStatus;
 import org.apache.hadoop.mapreduce.Counter;
-import org.apache.hadoop.mapreduce.CounterGroup;
 import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.JobCounter;
 import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -76,11 +75,8 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.event.EventHandler;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
-import org.codehaus.jackson.JsonNode;
 import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.node.ArrayNode;
 import org.codehaus.jackson.node.JsonNodeFactory;
-import org.codehaus.jackson.node.ObjectNode;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.sun.jersey.api.client.ClientHandlerException;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
index ea2046b..217337e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
@@ -829,7 +829,7 @@ public class RMContainerAllocator extends RMContainerRequestor
     if (collectorAddr != null && !collectorAddr.isEmpty()
         && appContext.getTimelineClient() != null) {
       appContext.getTimelineClient().setTimelineServiceAddress(
-        response.getCollectorAddr());
+          response.getCollectorAddr());
     }
 
     for (ContainerStatus cont : finishedContainers) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java
index a84e6d2..064f9ec 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java
@@ -786,7 +786,8 @@ public class TestJobHistoryEventHandler {
     public TestParams(boolean isLastAMRetry) {
       this(AppContext.class, isLastAMRetry);
     }
-    public TestParams(Class<? extends AppContext> contextClass, boolean isLastAMRetry) {
+    public TestParams(Class<? extends AppContext> contextClass,
+        boolean isLastAMRetry) {
       this.isLastAMRetry = isLastAMRetry;
       mockAppContext = mockAppContext(contextClass, appId, this.isLastAMRetry);
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryEvent.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryEvent.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryEvent.java
index 1d59ebe..1ba7195 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryEvent.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryEvent.java
@@ -42,9 +42,17 @@ public interface HistoryEvent {
   /** Set the Avro datum wrapped by this. */
   void setDatum(Object datum);
 
-  /** Map HistoryEvent to TimelineEvent */
+  /**
+   * Map HistoryEvent to TimelineEvent.
+   *
+   * @return the timeline event
+   */
   TimelineEvent toTimelineEvent();
 
-  /** Counters or Metrics if any else return null. */
+  /**
+   * Counters or Metrics if any else return null.
+   *
+   * @return the set of timeline metrics
+   */
   Set<TimelineMetric> getTimelineMetrics();
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
index 6b9f27e..90748a9 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
@@ -64,7 +64,7 @@ public class TestMRTimelineEventHandling {
 
   private static final String TIMELINE_AUX_SERVICE_NAME = "timeline_collector";
   private static final Log LOG =
-    LogFactory.getLog(TestMRTimelineEventHandling.class);
+      LogFactory.getLog(TestMRTimelineEventHandling.class);
 
   @Test
   public void testTimelineServiceStartInMiniCluster() throws Exception {
@@ -169,6 +169,7 @@ public class TestMRTimelineEventHandling {
     }
   }
 
+  @SuppressWarnings("deprecation")
   @Test
   public void testMRNewTimelineServiceEventHandling() throws Exception {
     LOG.info("testMRNewTimelineServiceEventHandling start.");
@@ -183,7 +184,7 @@ public class TestMRTimelineEventHandling {
     // enable aux-service based timeline collectors
     conf.set(YarnConfiguration.NM_AUX_SERVICES, TIMELINE_AUX_SERVICE_NAME);
     conf.set(YarnConfiguration.NM_AUX_SERVICES + "." + TIMELINE_AUX_SERVICE_NAME
-      + ".class", PerNodeTimelineCollectorsAuxService.class.getName());
+        + ".class", PerNodeTimelineCollectorsAuxService.class.getName());
 
     conf.setBoolean(YarnConfiguration.SYSTEM_METRICS_PUBLISHER_ENABLED, true);
 
@@ -245,7 +246,8 @@ public class TestMRTimelineEventHandling {
       }
       // Cleanup test file
       String testRoot =
-          FileSystemTimelineWriterImpl.DEFAULT_TIMELINE_SERVICE_STORAGE_DIR_ROOT;
+          FileSystemTimelineWriterImpl.
+              DEFAULT_TIMELINE_SERVICE_STORAGE_DIR_ROOT;
       File testRootFolder = new File(testRoot);
       if(testRootFolder.isDirectory()) {
         FileUtils.deleteDirectory(testRootFolder);
@@ -320,8 +322,10 @@ public class TestMRTimelineEventHandling {
         " does not exist.",
         taskFolder.isDirectory());
 
-    String taskEventFileName = appId.toString().replaceAll("application", "task")
-        + "_m_000000" + FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION;
+    String taskEventFileName =
+        appId.toString().replaceAll("application", "task") +
+        "_m_000000" +
+        FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION;
 
     String taskEventFilePath = outputDirTask + taskEventFileName;
     File taskEventFile = new File(taskEventFilePath);
@@ -372,10 +376,12 @@ public class TestMRTimelineEventHandling {
       reader = new BufferedReader(new FileReader(entityFile));
       while ((strLine = reader.readLine()) != null) {
         if (strLine.trim().length() > 0) {
-          org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity =
-              FileSystemTimelineReaderImpl.getTimelineRecordFromJSON(
-                  strLine.trim(),
-                  org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity.class);
+          org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity
+              entity =
+                  FileSystemTimelineReaderImpl.getTimelineRecordFromJSON(
+                      strLine.trim(),
+                      org.apache.hadoop.yarn.api.records.timelineservice.
+                          TimelineEntity.class);
           if (eventId == null) {
             // Job metrics are published without any events for
             // ApplicationEntity. There is also possibility that some other

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java
index 2fb6828..935c175 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java
@@ -621,8 +621,8 @@ public class UtilsForTests {
     return job;
   }
 
-  public static void waitForAppFinished(RunningJob job, MiniMRYarnCluster cluster)
- throws IOException {
+  public static void waitForAppFinished(RunningJob job,
+      MiniMRYarnCluster cluster) throws IOException {
     ApplicationId appId = ApplicationId.newInstance(
         Long.parseLong(job.getID().getJtIdentifier()), job.getID().getId());
     ConcurrentMap<ApplicationId, RMApp> rmApps =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/EntityWriterV2.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/EntityWriterV2.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/EntityWriterV2.java
index f5d95c3..74d7b94 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/EntityWriterV2.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/EntityWriterV2.java
@@ -33,7 +33,8 @@ import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollector
  * to the timeline service.
  */
 abstract class EntityWriterV2
-    extends org.apache.hadoop.mapreduce.Mapper<IntWritable,IntWritable,Writable,Writable> {
+    extends org.apache.hadoop.mapreduce.Mapper
+        <IntWritable, IntWritable, Writable, Writable> {
   @Override
   public void map(IntWritable key, IntWritable val, Context context)
       throws IOException {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV2.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV2.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV2.java
index 6a9a878..2ec4833 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV2.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV2.java
@@ -89,8 +89,8 @@ class JobHistoryFileReplayMapperV2 extends EntityWriterV2 {
             parser.parseHistoryFile(job.getJobHistoryFilePath());
         Configuration jobConf =
             parser.parseConfiguration(job.getJobConfFilePath());
-        LOG.info("parsed the job history file and the configuration file for job"
-            + jobIdStr);
+        LOG.info("parsed the job history file and the configuration file " +
+            "for job " + jobIdStr);
 
         // set the context
         // flow id: job name, flow run id: timestamp, user id

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterConstants.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterConstants.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterConstants.java
index b89d0e8..d96ad76 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterConstants.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterConstants.java
@@ -32,12 +32,12 @@ interface SimpleEntityWriterConstants {
 
   /**
    *  To ensure that the compression really gets exercised, generate a
-   *  random alphanumeric fixed length payload
+   *  random alphanumeric fixed length payload.
    */
-  char[] ALPHA_NUMS = new char[] { 'a', 'b', 'c', 'd', 'e', 'f',
-    'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r',
-    's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D',
-    'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P',
-    'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '1', '2',
-    '3', '4', '5', '6', '7', '8', '9', '0', ' ' };
+  char[] ALPHA_NUMS = new char[] {'a', 'b', 'c', 'd', 'e', 'f',
+      'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r',
+      's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D',
+      'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P',
+      'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '1', '2',
+      '3', '4', '5', '6', '7', '8', '9', '0', ' '};
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java
index b10ae04..16d14a1 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java
@@ -39,11 +39,13 @@ import org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl;
    * configuration.
    */
 class SimpleEntityWriterV1
-    extends org.apache.hadoop.mapreduce.Mapper<IntWritable,IntWritable,Writable,Writable>
+    extends org.apache.hadoop.mapreduce.Mapper
+        <IntWritable, IntWritable, Writable, Writable>
     implements SimpleEntityWriterConstants {
   private static final Log LOG = LogFactory.getLog(SimpleEntityWriterV1.class);
 
-  public void map(IntWritable key, IntWritable val, Context context) throws IOException {
+  public void map(IntWritable key, IntWritable val, Context context)
+      throws IOException {
     TimelineClient tlc = new TimelineClientImpl();
     Configuration conf = context.getConfiguration();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV1.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV1.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV1.java
index 4d8b74b..dcc3ce0 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV1.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV1.java
@@ -90,9 +90,10 @@ class TimelineEntityConverterV1 {
     return job;
   }
 
-  private Set<TimelineEntity> createTaskAndTaskAttemptEntities(JobInfo jobInfo) {
+  private Set<TimelineEntity>
+      createTaskAndTaskAttemptEntities(JobInfo jobInfo) {
     Set<TimelineEntity> entities = new HashSet<>();
-    Map<TaskID,TaskInfo> taskInfoMap = jobInfo.getAllTasks();
+    Map<TaskID, TaskInfo> taskInfoMap = jobInfo.getAllTasks();
     LOG.info("job " + jobInfo.getJobId()+ " has " + taskInfoMap.size() +
         " tasks");
     for (TaskInfo taskInfo: taskInfoMap.values()) {
@@ -124,7 +125,7 @@ class TimelineEntityConverterV1 {
 
   private Set<TimelineEntity> createTaskAttemptEntities(TaskInfo taskInfo) {
     Set<TimelineEntity> taskAttempts = new HashSet<TimelineEntity>();
-    Map<TaskAttemptID,TaskAttemptInfo> taskAttemptInfoMap =
+    Map<TaskAttemptID, TaskAttemptInfo> taskAttemptInfoMap =
         taskInfo.getAllTaskAttempts();
     LOG.info("task " + taskInfo.getTaskId() + " has " +
         taskAttemptInfoMap.size() + " task attempts");
@@ -135,7 +136,8 @@ class TimelineEntityConverterV1 {
     return taskAttempts;
   }
 
-  private TimelineEntity createTaskAttemptEntity(TaskAttemptInfo taskAttemptInfo) {
+  private TimelineEntity
+      createTaskAttemptEntity(TaskAttemptInfo taskAttemptInfo) {
     TimelineEntity taskAttempt = new TimelineEntity();
     taskAttempt.setEntityType(TASK_ATTEMPT);
     taskAttempt.setEntityId(taskAttemptInfo.getAttemptId().toString());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV2.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV2.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV2.java
index 79633d2..45812fe 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV2.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV2.java
@@ -27,11 +27,6 @@ import java.util.Set;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.Counter;
-import org.apache.hadoop.mapreduce.CounterGroup;
-import org.apache.hadoop.mapreduce.Counters;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.apache.hadoop.mapreduce.TaskID;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
@@ -109,7 +104,7 @@ class TimelineEntityConverterV2 {
   }
 
   private void addConfiguration(TimelineEntity job, Configuration conf) {
-    for (Map.Entry<String,String> e: conf) {
+    for (Map.Entry<String, String> e: conf) {
       job.addConfig(e.getKey(), e.getValue());
     }
   }
@@ -130,7 +125,7 @@ class TimelineEntityConverterV2 {
   private List<TimelineEntity> createTaskAndTaskAttemptEntities(
       JobInfo jobInfo) {
     List<TimelineEntity> entities = new ArrayList<>();
-    Map<TaskID,TaskInfo> taskInfoMap = jobInfo.getAllTasks();
+    Map<TaskID, TaskInfo> taskInfoMap = jobInfo.getAllTasks();
     LOG.info("job " + jobInfo.getJobId()+ " has " + taskInfoMap.size() +
         " tasks");
     for (TaskInfo taskInfo: taskInfoMap.values()) {
@@ -167,7 +162,7 @@ class TimelineEntityConverterV2 {
 
   private Set<TimelineEntity> createTaskAttemptEntities(TaskInfo taskInfo) {
     Set<TimelineEntity> taskAttempts = new HashSet<TimelineEntity>();
-    Map<TaskAttemptID,TaskAttemptInfo> taskAttemptInfoMap =
+    Map<TaskAttemptID, TaskAttemptInfo> taskAttemptInfoMap =
         taskInfo.getAllTaskAttempts();
     LOG.info("task " + taskInfo.getTaskId() + " has " +
         taskAttemptInfoMap.size() + " task attempts");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java
index 1a14137..7fa0444 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java
@@ -137,7 +137,8 @@ public class TimelineServicePerformance extends Configured implements Tool {
     default:
       // use the current timestamp as the "run id" of the test: this will
       // be used as simulating the cluster timestamp for apps
-      conf.setLong(SimpleEntityWriterConstants.TIMELINE_SERVICE_PERFORMANCE_RUN_ID,
+      conf.setLong(
+          SimpleEntityWriterConstants.TIMELINE_SERVICE_PERFORMANCE_RUN_ID,
           System.currentTimeMillis());
       switch (timeline_service_version) {
       case TIMELINE_SERVICE_VERSION_2:

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
index edb825d..2d3d6ed 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
@@ -181,10 +181,11 @@ public class MiniMRYarnCluster extends MiniYARNCluster {
     }
     if (enableTimelineAuxService) {
       conf.setStrings(YarnConfiguration.NM_AUX_SERVICES,
-          new String[] { ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID, TIMELINE_AUX_SERVICE_NAME });
+          new String[] {ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID,
+              TIMELINE_AUX_SERVICE_NAME});
     } else {
       conf.setStrings(YarnConfiguration.NM_AUX_SERVICES,
-          new String[] { ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID });
+          new String[] {ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID});
     }
     conf.setClass(String.format(YarnConfiguration.NM_AUX_SERVICE_FMT,
         ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID), ShuffleHandler.class,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java
index c43bd62..43805c8 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java
@@ -24,4 +24,3 @@
 package org.apache.hadoop.yarn.api.records.timelineservice;
 
 import org.apache.hadoop.classification.InterfaceAudience;
-

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 3787ff6..3bb73f5 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -500,7 +500,8 @@ public class YarnConfiguration extends Configuration {
 
   /**
    *  The setting that controls whether yarn system metrics is published on the
-   *  timeline server or not by RM and NM. This configuration setting is for ATS V2
+   *  timeline server or not by RM and NM. This configuration setting is for
+   *  ATS v2.
    */
   public static final String SYSTEM_METRICS_PUBLISHER_ENABLED = YARN_PREFIX
       + "system-metrics-publisher.enabled";
@@ -840,7 +841,7 @@ public class YarnConfiguration extends Configuration {
   
   /** Number of threads container manager uses.*/
   public static final String NM_COLLECTOR_SERVICE_THREAD_COUNT =
-    NM_PREFIX + "collector-service.thread-count";
+      NM_PREFIX + "collector-service.thread-count";
   public static final int DEFAULT_NM_COLLECTOR_SERVICE_THREAD_COUNT = 5;
 
   /** Number of threads used in cleanup.*/
@@ -872,7 +873,7 @@ public class YarnConfiguration extends Configuration {
   
   /** Address where the collector service IPC is.*/
   public static final String NM_COLLECTOR_SERVICE_ADDRESS =
-    NM_PREFIX + "collector-service.address";
+      NM_PREFIX + "collector-service.address";
   public static final int DEFAULT_NM_COLLECTOR_SERVICE_PORT = 8048;
   public static final String DEFAULT_NM_COLLECTOR_SERVICE_ADDRESS =
       "0.0.0.0:" + DEFAULT_NM_LOCALIZER_PORT;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/TimelineServiceHelper.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/TimelineServiceHelper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/TimelineServiceHelper.java
index ff6ebbd..e0268a6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/TimelineServiceHelper.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/TimelineServiceHelper.java
@@ -36,6 +36,8 @@ public final class TimelineServiceHelper {
   /**
    * Cast map to HashMap for generic type.
    * @param originalMap the map need to be casted
+   * @param <E> key type
+   * @param <V> value type
    * @return casted HashMap object
    */
   public static <E, V> HashMap<E, V> mapCastToHashMap(

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java
index 907d09e..b9949e1 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java
@@ -102,7 +102,6 @@ import org.apache.hadoop.yarn.client.api.async.impl.NMClientAsyncImpl;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
-import org.apache.hadoop.yarn.util.ConverterUtils;
 import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
 import org.apache.log4j.LogManager;
 
@@ -747,7 +746,7 @@ public class ApplicationMaster {
             DSEvent.DS_APP_ATTEMPT_END);
       } else {
         publishApplicationAttemptEvent(timelineClient, appAttemptID.toString(),
-          DSEvent.DS_APP_ATTEMPT_END, domainId, appSubmitterUgi);
+            DSEvent.DS_APP_ATTEMPT_END, domainId, appSubmitterUgi);
       }
     }
 
@@ -858,7 +857,7 @@ public class ApplicationMaster {
             publishContainerEndEventOnTimelineServiceV2(containerStatus);
           } else {
             publishContainerEndEvent(
-              timelineClient, containerStatus, domainId, appSubmitterUgi);
+                timelineClient, containerStatus, domainId, appSubmitterUgi);
           }
         }
       }
@@ -988,8 +987,8 @@ public class ApplicationMaster {
               container);
         } else {
           applicationMaster.publishContainerStartEvent(
-            applicationMaster.timelineClient, container,
-            applicationMaster.domainId, applicationMaster.appSubmitterUgi);
+              applicationMaster.timelineClient, container,
+              applicationMaster.domainId, applicationMaster.appSubmitterUgi);
         }
       }
     }
@@ -1348,8 +1347,10 @@ public class ApplicationMaster {
 
   private void publishContainerStartEventOnTimelineServiceV2(
       Container container) {
-    final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity =
-        new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity();
+    final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity
+        entity =
+            new org.apache.hadoop.yarn.api.records.timelineservice.
+            TimelineEntity();
     entity.setId(container.getId().toString());
     entity.setType(DSEntity.DS_CONTAINER.toString());
     long ts = System.currentTimeMillis();
@@ -1381,8 +1382,10 @@ public class ApplicationMaster {
 
   private void publishContainerEndEventOnTimelineServiceV2(
       final ContainerStatus container) {
-    final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity =
-        new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity();
+    final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity
+        entity =
+            new org.apache.hadoop.yarn.api.records.timelineservice.
+            TimelineEntity();
     entity.setId(container.getContainerId().toString());
     entity.setType(DSEntity.DS_CONTAINER.toString());
     //entity.setDomainId(domainId);
@@ -1412,8 +1415,10 @@ public class ApplicationMaster {
 
   private void publishApplicationAttemptEventOnTimelineServiceV2(
       DSEvent appEvent) {
-    final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity =
-        new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity();
+    final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity
+        entity =
+            new org.apache.hadoop.yarn.api.records.timelineservice.
+            TimelineEntity();
     entity.setId(appAttemptID.toString());
     entity.setType(DSEntity.DS_APP_ATTEMPT.toString());
     long ts = System.currentTimeMillis();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
index a01d21b..b9b8c7f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
@@ -153,7 +153,8 @@ public class TestDistributedShell {
         ProcfsBasedProcessTree.class.getName());
     conf.setBoolean(YarnConfiguration.NM_PMEM_CHECK_ENABLED, true);
     conf.setBoolean(YarnConfiguration.NM_VMEM_CHECK_ENABLED, true);
-    conf.setBoolean(YarnConfiguration.YARN_MINICLUSTER_CONTROL_RESOURCE_MONITORING,
+    conf.setBoolean(
+        YarnConfiguration.YARN_MINICLUSTER_CONTROL_RESOURCE_MONITORING,
         true);
     conf.setBoolean(YarnConfiguration.RM_SYSTEM_METRICS_PUBLISHER_ENABLED,
           true);
@@ -180,11 +181,13 @@ public class TestDistributedShell {
       // disable v1 timeline server since we no longer have a server here
       // enable aux-service based timeline aggregators
       conf.set(YarnConfiguration.NM_AUX_SERVICES, TIMELINE_AUX_SERVICE_NAME);
-      conf.set(YarnConfiguration.NM_AUX_SERVICES + "." + TIMELINE_AUX_SERVICE_NAME
-        + ".class", PerNodeTimelineCollectorsAuxService.class.getName());
+      conf.set(YarnConfiguration.NM_AUX_SERVICES + "." +
+          TIMELINE_AUX_SERVICE_NAME + ".class",
+          PerNodeTimelineCollectorsAuxService.class.getName());
       conf.setClass(YarnConfiguration.TIMELINE_SERVICE_WRITER_CLASS,
           FileSystemTimelineWriterImpl.class,
-          org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter.class);
+          org.apache.hadoop.yarn.server.timelineservice.storage.
+              TimelineWriter.class);
     } else {
       Assert.fail("Wrong timeline version number: " + timelineVersion);
     }
@@ -395,7 +398,8 @@ public class TestDistributedShell {
       }
 
       if (appReport.getYarnApplicationState() == YarnApplicationState.FINISHED
-          && appReport.getFinalApplicationStatus() != FinalApplicationStatus.UNDEFINED) {
+          && appReport.getFinalApplicationStatus() !=
+              FinalApplicationStatus.UNDEFINED) {
         break;
       }
     }
@@ -431,7 +435,7 @@ public class TestDistributedShell {
   }
 
   private void checkTimelineV1(boolean haveDomain) throws Exception {
-        TimelineDomain domain = null;
+    TimelineDomain domain = null;
     if (haveDomain) {
       domain = yarnCluster.getApplicationHistoryServer()
           .getTimelineStore().getDomain("TEST_DOMAIN");
@@ -545,7 +549,7 @@ public class TestDistributedShell {
         if (numOfContainerFinishedOccurences > 0) {
           break;
         } else {
-          Thread.sleep(500l);
+          Thread.sleep(500L);
         }
       }
       Assert.assertEquals(
@@ -577,7 +581,7 @@ public class TestDistributedShell {
         if (numOfStringOccurences > 0) {
           break;
         } else {
-          Thread.sleep(500l);
+          Thread.sleep(500L);
         }
       }
       Assert.assertEquals(
@@ -631,8 +635,9 @@ public class TestDistributedShell {
     try {
       reader = new BufferedReader(new FileReader(entityFile));
       while ((strLine = reader.readLine()) != null) {
-        if (strLine.trim().contains(searchString))
+        if (strLine.trim().contains(searchString)) {
           actualCount++;
+        }
       }
     } finally {
       reader.close();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShellWithNodeLabels.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShellWithNodeLabels.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShellWithNodeLabels.java
index c55f202..c651d32 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShellWithNodeLabels.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShellWithNodeLabels.java
@@ -30,9 +30,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
 import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;
 import org.junit.Assert;
 import org.junit.Before;
-import org.junit.Rule;
 import org.junit.Test;
-import org.junit.rules.TestName;
 
 import com.google.common.collect.ImmutableMap;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/AMRMClient.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/AMRMClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/AMRMClient.java
index 7f64dd5..7acaf11 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/AMRMClient.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/AMRMClient.java
@@ -463,15 +463,15 @@ public abstract class AMRMClient<T extends AMRMClient.ContainerRequest> extends
 
   /**
    * Register TimelineClient to AMRMClient.
-   * @param timelineClient
+   * @param client the timeline client to register
    */
-  public void registerTimelineClient(TimelineClient timelineClient) {
-    this.timelineClient = timelineClient;
+  public void registerTimelineClient(TimelineClient client) {
+    this.timelineClient = client;
   }
 
   /**
    * Get registered timeline client.
-   * @return
+   * @return the registered timeline client
    */
   public TimelineClient getRegisteredTimeineClient() {
     return this.timelineClient;
@@ -481,7 +481,7 @@ public abstract class AMRMClient<T extends AMRMClient.ContainerRequest> extends
    * Wait for <code>check</code> to return true for each 1000 ms.
    * See also {@link #waitFor(com.google.common.base.Supplier, int)}
    * and {@link #waitFor(com.google.common.base.Supplier, int, int)}
-   * @param check
+   * @param check the condition for which it should wait
    */
   public void waitFor(Supplier<Boolean> check) throws InterruptedException {
     waitFor(check, 1000);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/AMRMClientAsync.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/AMRMClientAsync.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/AMRMClientAsync.java
index 7cdda1b..28d20c8 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/AMRMClientAsync.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/AMRMClientAsync.java
@@ -304,7 +304,7 @@ extends AbstractService {
 
   /**
    * Get registered timeline client.
-   * @return
+   * @return the registered timeline client
    */
   public TimelineClient getRegisteredTimeineClient() {
     return client.getRegisteredTimeineClient();
@@ -325,7 +325,7 @@ extends AbstractService {
    * Wait for <code>check</code> to return true for each 1000 ms.
    * See also {@link #waitFor(com.google.common.base.Supplier, int)}
    * and {@link #waitFor(com.google.common.base.Supplier, int, int)}
-   * @param check
+   * @param check the condition for which it should wait
    */
   public void waitFor(Supplier<Boolean> check) throws InterruptedException {
     waitFor(check, 1000);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java
index e043c2f..cc76718 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java
@@ -30,8 +30,6 @@ import org.apache.hadoop.service.AbstractService;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId;
 import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
@@ -55,10 +53,12 @@ public abstract class TimelineClient extends AbstractService implements
    * construct and initialize a timeline client if the following operations are
    * supposed to be conducted by that user.
    */
-  protected ApplicationId contextAppId;
+  private ApplicationId contextAppId;
 
   /**
    * Creates an instance of the timeline v.1.x client.
+   *
+   * @return the created timeline client instance
    */
   @Public
   public static TimelineClient createTimelineClient() {
@@ -68,6 +68,10 @@ public abstract class TimelineClient extends AbstractService implements
 
   /**
    * Creates an instance of the timeline v.2 client.
+   *
+   * @param appId the application id with which the timeline client is
+   * associated
+   * @return the created timeline client instance
    */
   @Public
   public static TimelineClient createTimelineClient(ApplicationId appId) {
@@ -91,8 +95,8 @@ public abstract class TimelineClient extends AbstractService implements
    * @param entities
    *          the collection of {@link TimelineEntity}
    * @return the error information if the sent entities are not correctly stored
-   * @throws IOException
-   * @throws YarnException
+   * @throws IOException if there are I/O errors
+   * @throws YarnException if entities are incomplete/invalid
    */
   @Public
   public abstract TimelinePutResponse putEntities(
@@ -112,8 +116,8 @@ public abstract class TimelineClient extends AbstractService implements
    * @param entities
    *          the collection of {@link TimelineEntity}
    * @return the error information if the sent entities are not correctly stored
-   * @throws IOException
-   * @throws YarnException
+   * @throws IOException if there are I/O errors
+   * @throws YarnException if entities are incomplete/invalid
    */
   @Public
   public abstract TimelinePutResponse putEntities(
@@ -212,15 +216,15 @@ public abstract class TimelineClient extends AbstractService implements
    * for a non-v.2 timeline client instance, a YarnException is thrown.
    * </p>
    *
-   * @param entities
-   *          the collection of {@link org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity}
+   * @param entities the collection of {@link
+   * org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity}
    * @throws IOException
    * @throws YarnException
    */
   @Public
   public abstract void putEntities(
-      org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity... entities)
-      throws IOException, YarnException;
+      org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity...
+          entities) throws IOException, YarnException;
 
   /**
    * <p>
@@ -230,15 +234,15 @@ public abstract class TimelineClient extends AbstractService implements
    * non-v.2 timeline client instance, a YarnException is thrown.
    * </p>
    *
-   * @param entities
-   *          the collection of {@link org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity}
+   * @param entities the collection of {@link
+   * org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity}
    * @throws IOException
    * @throws YarnException
    */
   @Public
   public abstract void putEntitiesAsync(
-      org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity... entities)
-      throws IOException, YarnException;
+      org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity...
+          entities) throws IOException, YarnException;
 
   /**
    * <p>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
index 478efc4..4a5a443 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
@@ -391,8 +391,8 @@ public class TimelineClientImpl extends TimelineClient {
 
   @Override
   public void putEntities(
-      org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity... entities)
-          throws IOException, YarnException {
+      org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity...
+          entities) throws IOException, YarnException {
     if (!timelineServiceV2) {
       throw new YarnException("v.2 method is invoked on a v.1.x client");
     }
@@ -401,8 +401,8 @@ public class TimelineClientImpl extends TimelineClient {
 
   @Override
   public void putEntitiesAsync(
-      org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity... entities)
-      throws IOException, YarnException {
+      org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity...
+          entities) throws IOException, YarnException {
     if (!timelineServiceV2) {
       throw new YarnException("v.2 method is invoked on a v.1.x client");
     }
@@ -494,7 +494,8 @@ public class TimelineClientImpl extends TimelineClient {
       throw new IOException(re);
     }
     if (resp == null ||
-        resp.getClientResponseStatus() != ClientResponse.Status.OK) {
+        resp.getStatusInfo().getStatusCode() !=
+            ClientResponse.Status.OK.getStatusCode()) {
       String msg = "Response from the timeline server is " +
           ((resp == null) ? "null":
           "not successful," + " HTTP error code: " + resp.getStatus()
@@ -530,7 +531,8 @@ public class TimelineClientImpl extends TimelineClient {
             // TODO we should add retry logic here if timelineServiceAddress is
             // not available immediately.
             return (Token) authUrl.getDelegationToken(
-                constructResURI(getConfig(), getTimelineServiceAddress(), false).toURL(),
+                constructResURI(getConfig(),
+                    getTimelineServiceAddress(), false).toURL(),
                 token, renewer, doAsUser);
           }
         };
@@ -911,17 +913,21 @@ public class TimelineClientImpl extends TimelineClient {
   }
 
   private final class EntitiesHolder extends FutureTask<Void> {
-    private final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities entities;
+    private final
+        org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities
+            entities;
     private final boolean isSync;
 
     EntitiesHolder(
-        final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities entities,
+        final
+            org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities
+                entities,
         final boolean isSync) {
       super(new Callable<Void>() {
         // publishEntities()
         public Void call() throws Exception {
           MultivaluedMap<String, String> params = new MultivaluedMapImpl();
-          params.add("appid", contextAppId.toString());
+          params.add("appid", getContextAppId().toString());
           params.add("async", Boolean.toString(!isSync));
           putObjects("entities", params, entities);
           return null;
@@ -935,7 +941,8 @@ public class TimelineClientImpl extends TimelineClient {
       return isSync;
     }
 
-    public org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities getEntities() {
+    public org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities
+        getEntities() {
       return entities;
     }
   }
@@ -947,7 +954,7 @@ public class TimelineClientImpl extends TimelineClient {
   private class TimelineEntityDispatcher {
     /**
      * Time period for which the timelineclient will wait for draining after
-     * stop
+     * stop.
      */
     private static final long DRAIN_TIME_PERIOD = 2000L;
 
@@ -1063,17 +1070,20 @@ public class TimelineClientImpl extends TimelineClient {
     }
 
     public void dispatchEntities(boolean sync,
-        org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity[] entitiesTobePublished)
-            throws YarnException {
+        org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity[]
+            entitiesTobePublished) throws YarnException {
       if (executor.isShutdown()) {
         throw new YarnException("Timeline client is in the process of stopping,"
             + " not accepting any more TimelineEntities");
       }
 
       // wrap all TimelineEntity into TimelineEntities object
-      org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities entities =
-          new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities();
-      for (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity : entitiesTobePublished) {
+      org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities
+          entities =
+              new org.apache.hadoop.yarn.api.records.timelineservice.
+                  TimelineEntities();
+      for (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity
+               entity : entitiesTobePublished) {
         entities.addEntity(entity);
       }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java
index 592bfa3..221969b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java
@@ -100,7 +100,8 @@ public class TestTimelineServiceRecords {
     }
     entity.addMetric(metric2);
 
-    TimelineMetric metric3 = new TimelineMetric(TimelineMetric.Type.SINGLE_VALUE);
+    TimelineMetric metric3 =
+        new TimelineMetric(TimelineMetric.Type.SINGLE_VALUE);
     metric3.setId("test metric id 1");
     metric3.addValue(4L, (short) 4);
     Assert.assertEquals("metric3 should equal to metric2! ", metric3, metric2);
@@ -212,18 +213,22 @@ public class TestTimelineServiceRecords {
         ApplicationAttemptId.newInstance(
             ApplicationId.newInstance(0, 1), 1), 1).toString());
 
-    cluster.addChild(TimelineEntityType.YARN_FLOW_RUN.toString(), flow1.getId());
+    cluster.addChild(TimelineEntityType.YARN_FLOW_RUN.toString(),
+        flow1.getId());
     flow1
         .setParent(TimelineEntityType.YARN_CLUSTER.toString(), cluster.getId());
     flow1.addChild(TimelineEntityType.YARN_FLOW_RUN.toString(), flow2.getId());
     flow2.setParent(TimelineEntityType.YARN_FLOW_RUN.toString(), flow1.getId());
-    flow2.addChild(TimelineEntityType.YARN_APPLICATION.toString(), app1.getId());
-    flow2.addChild(TimelineEntityType.YARN_APPLICATION.toString(), app2.getId());
+    flow2.addChild(TimelineEntityType.YARN_APPLICATION.toString(),
+        app1.getId());
+    flow2.addChild(TimelineEntityType.YARN_APPLICATION.toString(),
+        app2.getId());
     app1.setParent(TimelineEntityType.YARN_FLOW_RUN.toString(), flow2.getId());
     app1.addChild(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(),
         appAttempt.getId());
     appAttempt
-        .setParent(TimelineEntityType.YARN_APPLICATION.toString(), app1.getId());
+        .setParent(TimelineEntityType.YARN_APPLICATION.toString(),
+            app1.getId());
     app2.setParent(TimelineEntityType.YARN_FLOW_RUN.toString(), flow2.getId());
     appAttempt.addChild(TimelineEntityType.YARN_CONTAINER.toString(),
         container.getId());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java
index 71dafdc..5813340 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java
@@ -43,7 +43,7 @@ public class TestTimelineClientV2Impl {
   private static final Log LOG =
       LogFactory.getLog(TestTimelineClientV2Impl.class);
   private TestV2TimelineClient client;
-  private static long TIME_TO_SLEEP = 150;
+  private static final long TIME_TO_SLEEP = 150L;
   private static final String EXCEPTION_MSG = "Exception in the content";
 
   @Before
@@ -62,12 +62,12 @@ public class TestTimelineClientV2Impl {
   public TestName currTestName = new TestName();
   private YarnConfiguration conf;
 
-  private TestV2TimelineClient createTimelineClient(YarnConfiguration conf) {
+  private TestV2TimelineClient createTimelineClient(YarnConfiguration config) {
     ApplicationId id = ApplicationId.newInstance(0, 0);
-    TestV2TimelineClient client = new TestV2TimelineClient(id);
-    client.init(conf);
-    client.start();
-    return client;
+    TestV2TimelineClient tc = new TestV2TimelineClient(id);
+    tc.init(config);
+    tc.start();
+    return tc;
   }
 
   private class TestV2TimelineClientForExceptionHandling
@@ -76,12 +76,16 @@ public class TestTimelineClientV2Impl {
       super(id);
     }
 
-    protected boolean throwYarnException;
+    private boolean throwYarnException;
 
     public void setThrowYarnException(boolean throwYarnException) {
       this.throwYarnException = throwYarnException;
     }
 
+    public boolean isThrowYarnException() {
+      return throwYarnException;
+    }
+
     @Override
     protected void putObjects(URI base, String path,
         MultivaluedMap<String, String> params, Object obj)
@@ -123,7 +127,7 @@ public class TestTimelineClientV2Impl {
     protected void putObjects(String path,
         MultivaluedMap<String, String> params, Object obj)
             throws IOException, YarnException {
-      if (throwYarnException) {
+      if (isThrowYarnException()) {
         throw new YarnException("ActualException");
       }
       publishedEntities.add((TimelineEntities) obj);
@@ -139,17 +143,17 @@ public class TestTimelineClientV2Impl {
 
   @Test
   public void testExceptionMultipleRetry() {
-    TestV2TimelineClientForExceptionHandling client =
+    TestV2TimelineClientForExceptionHandling c =
         new TestV2TimelineClientForExceptionHandling(
             ApplicationId.newInstance(0, 0));
     int maxRetries = 2;
     conf.setInt(YarnConfiguration.TIMELINE_SERVICE_CLIENT_MAX_RETRIES,
         maxRetries);
-    client.init(conf);
-    client.start();
-    client.setTimelineServiceAddress("localhost:12345");
+    c.init(conf);
+    c.start();
+    c.setTimelineServiceAddress("localhost:12345");
     try {
-      client.putEntities(new TimelineEntity());
+      c.putEntities(new TimelineEntity());
     } catch (IOException e) {
       Assert.fail("YARN exception is expected");
     } catch (YarnException e) {
@@ -161,9 +165,9 @@ public class TestTimelineClientV2Impl {
               "TimelineClient has reached to max retry times : " + maxRetries));
     }
 
-    client.setThrowYarnException(true);
+    c.setThrowYarnException(true);
     try {
-      client.putEntities(new TimelineEntity());
+      c.putEntities(new TimelineEntity());
     } catch (IOException e) {
       Assert.fail("YARN exception is expected");
     } catch (YarnException e) {
@@ -173,7 +177,7 @@ public class TestTimelineClientV2Impl {
       Assert.assertTrue("YARN exception is expected",
           cause.getMessage().contains(EXCEPTION_MSG));
     }
-    client.stop();
+    c.stop();
   }
 
   @Test
@@ -348,7 +352,6 @@ public class TestTimelineClientV2Impl {
     for (int i = 0; i < client.getNumOfTimelineEntitiesPublished(); i++) {
       TimelineEntities publishedEntities = client.getPublishedEntities(i);
       StringBuilder entitiesPerPublish = new StringBuilder();
-      ;
       for (TimelineEntity entity : publishedEntities.getEntities()) {
         entitiesPerPublish.append(entity.getId());
         entitiesPerPublish.append(",");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestTimelineServiceHelper.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestTimelineServiceHelper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestTimelineServiceHelper.java
index 34b9497..d3d815b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestTimelineServiceHelper.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestTimelineServiceHelper.java
@@ -37,18 +37,21 @@ public class TestTimelineServiceHelper {
 
     // Test empty hashmap be casted to a empty hashmap
     Map<String, String> emptyHashMap = new HashMap<String, String>();
-    Assert.assertEquals(TimelineServiceHelper.mapCastToHashMap(emptyHashMap).size(), 0);
+    Assert.assertEquals(
+        TimelineServiceHelper.mapCastToHashMap(emptyHashMap).size(), 0);
 
     // Test empty non-hashmap be casted to a empty hashmap
     Map<String, String> emptyTreeMap = new TreeMap<String, String>();
-    Assert.assertEquals(TimelineServiceHelper.mapCastToHashMap(emptyTreeMap).size(), 0);
+    Assert.assertEquals(
+        TimelineServiceHelper.mapCastToHashMap(emptyTreeMap).size(), 0);
 
     // Test non-empty hashmap be casted to hashmap correctly
     Map<String, String> firstHashMap = new HashMap<String, String>();
     String key = "KEY";
     String value = "VALUE";
     firstHashMap.put(key, value);
-    Assert.assertEquals(TimelineServiceHelper.mapCastToHashMap(firstHashMap), firstHashMap);
+    Assert.assertEquals(
+        TimelineServiceHelper.mapCastToHashMap(firstHashMap), firstHashMap);
 
     // Test non-empty non-hashmap is casted correctly.
     Map<String, String> firstTreeMap = new TreeMap<String, String>();
@@ -59,17 +62,21 @@ public class TestTimelineServiceHelper {
     Assert.assertEquals(alternateHashMap.get(key), value);
 
     // Test complicated hashmap be casted correctly
-    Map<String, Set<String>> complicatedHashMap = new HashMap<String, Set<String>>();
+    Map<String, Set<String>> complicatedHashMap =
+        new HashMap<String, Set<String>>();
     Set<String> hashSet = new HashSet<String>();
     hashSet.add(value);
     complicatedHashMap.put(key, hashSet);
-    Assert.assertEquals(TimelineServiceHelper.mapCastToHashMap(complicatedHashMap),
+    Assert.assertEquals(
+        TimelineServiceHelper.mapCastToHashMap(complicatedHashMap),
         complicatedHashMap);
 
     // Test complicated non-hashmap get casted correctly
-    Map<String, Set<String>> complicatedTreeMap = new TreeMap<String, Set<String>>();
+    Map<String, Set<String>> complicatedTreeMap =
+        new TreeMap<String, Set<String>>();
     complicatedTreeMap.put(key, hashSet);
-    Assert.assertEquals(TimelineServiceHelper.mapCastToHashMap(complicatedTreeMap).get(key),
+    Assert.assertEquals(
+        TimelineServiceHelper.mapCastToHashMap(complicatedTreeMap).get(key),
         hashSet);
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocol.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocol.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocol.java
index d23c04a..64eea63 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocol.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocol.java
@@ -48,9 +48,9 @@ public interface CollectorNodemanagerProtocol {
    *
    * @param request the request of registering a new collector or a list of
    *                collectors
-   * @return
-   * @throws YarnException
-   * @throws IOException
+   * @return the response for registering the new collector
+   * @throws YarnException if the request is invalid
+   * @throws IOException if there are I/O errors
    */
   ReportNewCollectorInfoResponse reportNewCollectorInfo(
       ReportNewCollectorInfoRequest request)
@@ -63,9 +63,9 @@ public interface CollectorNodemanagerProtocol {
    * </p>
    * @param request the request of getting the aggregator context information of
    *                the given application
-   * @return
-   * @throws YarnException
-   * @throws IOException
+   * @return the response for registering the new collector
+   * @throws YarnException if the request is invalid
+   * @throws IOException if there are I/O errors
    */
   GetTimelineCollectorContextResponse getTimelineCollectorContext(
       GetTimelineCollectorContextRequest request)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocolPB.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocolPB.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocolPB.java
index 655e989..24f7c3d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocolPB.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocolPB.java
@@ -25,7 +25,8 @@ import org.apache.hadoop.yarn.proto.CollectorNodemanagerProtocol.CollectorNodema
 @Private
 @Unstable
 @ProtocolInfo(
-    protocolName = "org.apache.hadoop.yarn.server.api.CollectorNodemanagerProtocolPB",
+    protocolName =
+        "org.apache.hadoop.yarn.server.api.CollectorNodemanagerProtocolPB",
     protocolVersion = 1)
 public interface CollectorNodemanagerProtocolPB extends
     CollectorNodemanagerProtocolService.BlockingInterface {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/CollectorNodemanagerProtocolPBClientImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/CollectorNodemanagerProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/CollectorNodemanagerProtocolPBClientImpl.java
index b9e17f2..bc50ac5 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/CollectorNodemanagerProtocolPBClientImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/CollectorNodemanagerProtocolPBClientImpl.java
@@ -53,7 +53,7 @@ public class CollectorNodemanagerProtocolPBClientImpl implements
       + "rpc.nm-command-timeout";
 
   /**
-   * Maximum of 1 minute timeout for a Node to react to the command
+   * Maximum of 1 minute timeout for a Node to react to the command.
    */
   static final int DEFAULT_COMMAND_TIMEOUT = 60000;
 
@@ -63,7 +63,7 @@ public class CollectorNodemanagerProtocolPBClientImpl implements
   public CollectorNodemanagerProtocolPBClientImpl(long clientVersion,
       InetSocketAddress addr, Configuration conf) throws IOException {
     RPC.setProtocolEngine(conf, CollectorNodemanagerProtocolPB.class,
-      ProtobufRpcEngine.class);
+        ProtobufRpcEngine.class);
     UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
 
     int expireIntvl = conf.getInt(NM_COMMAND_TIMEOUT, DEFAULT_COMMAND_TIMEOUT);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/CollectorNodemanagerProtocolPBServiceImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/CollectorNodemanagerProtocolPBServiceImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/CollectorNodemanagerProtocolPBServiceImpl.java
index 21fb270..7b93a68 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/CollectorNodemanagerProtocolPBServiceImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/CollectorNodemanagerProtocolPBServiceImpl.java
@@ -41,7 +41,8 @@ public class CollectorNodemanagerProtocolPBServiceImpl implements
 
   private CollectorNodemanagerProtocol real;
 
-  public CollectorNodemanagerProtocolPBServiceImpl(CollectorNodemanagerProtocol impl) {
+  public CollectorNodemanagerProtocolPBServiceImpl(
+      CollectorNodemanagerProtocol impl) {
     this.real = impl;
   }
 
@@ -52,7 +53,8 @@ public class CollectorNodemanagerProtocolPBServiceImpl implements
     ReportNewCollectorInfoRequestPBImpl request =
         new ReportNewCollectorInfoRequestPBImpl(proto);
     try {
-      ReportNewCollectorInfoResponse response = real.reportNewCollectorInfo(request);
+      ReportNewCollectorInfoResponse response =
+          real.reportNewCollectorInfo(request);
       return ((ReportNewCollectorInfoResponsePBImpl)response).getProto();
     } catch (YarnException e) {
       throw new ServiceException(e);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextRequestPBImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextRequestPBImpl.java
index b53b55b..7014388 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextRequestPBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextRequestPBImpl.java
@@ -29,10 +29,10 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.GetTimelineCollectorCon
 public class GetTimelineCollectorContextRequestPBImpl extends
     GetTimelineCollectorContextRequest {
 
-  GetTimelineCollectorContextRequestProto
+  private GetTimelineCollectorContextRequestProto
       proto = GetTimelineCollectorContextRequestProto.getDefaultInstance();
-  GetTimelineCollectorContextRequestProto.Builder builder = null;
-  boolean viaProto = false;
+  private GetTimelineCollectorContextRequestProto.Builder builder = null;
+  private boolean viaProto = false;
 
   private ApplicationId appId = null;
 
@@ -60,8 +60,9 @@ public class GetTimelineCollectorContextRequestPBImpl extends
 
   @Override
   public boolean equals(Object other) {
-    if (other == null)
+    if (other == null) {
       return false;
+    }
     if (other.getClass().isAssignableFrom(this.getClass())) {
       return this.getProto().equals(this.getClass().cast(other).getProto());
     }
@@ -80,8 +81,9 @@ public class GetTimelineCollectorContextRequestPBImpl extends
   }
 
   private void mergeLocalToProto() {
-    if (viaProto)
+    if (viaProto) {
       maybeInitBuilder();
+    }
     mergeLocalToBuilder();
     proto = builder.build();
     viaProto = true;
@@ -100,7 +102,8 @@ public class GetTimelineCollectorContextRequestPBImpl extends
       return this.appId;
     }
 
-    GetTimelineCollectorContextRequestProtoOrBuilder p = viaProto ? proto : builder;
+    GetTimelineCollectorContextRequestProtoOrBuilder p =
+        viaProto ? proto : builder;
     if (!p.hasAppId()) {
       return null;
     }
@@ -110,14 +113,16 @@ public class GetTimelineCollectorContextRequestPBImpl extends
   }
 
   @Override
-  public void setApplicationId(ApplicationId appId) {
+  public void setApplicationId(ApplicationId id) {
     maybeInitBuilder();
-    if (appId == null)
+    if (id == null) {
       builder.clearAppId();
-    this.appId = appId;
+    }
+    this.appId = id;
   }
 
-  private ApplicationIdPBImpl convertFromProtoFormat(YarnProtos.ApplicationIdProto p) {
+  private ApplicationIdPBImpl convertFromProtoFormat(
+      YarnProtos.ApplicationIdProto p) {
     return new ApplicationIdPBImpl(p);
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextResponsePBImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextResponsePBImpl.java
index 34713cb..151b036 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextResponsePBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextResponsePBImpl.java
@@ -26,10 +26,10 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.GetTimelineCollectorCon
 public class GetTimelineCollectorContextResponsePBImpl extends
     GetTimelineCollectorContextResponse {
 
-  GetTimelineCollectorContextResponseProto proto =
+  private GetTimelineCollectorContextResponseProto proto =
       GetTimelineCollectorContextResponseProto.getDefaultInstance();
-  GetTimelineCollectorContextResponseProto.Builder builder = null;
-  boolean viaProto = false;
+  private GetTimelineCollectorContextResponseProto.Builder builder = null;
+  private boolean viaProto = false;
 
   public GetTimelineCollectorContextResponsePBImpl() {
     builder = GetTimelineCollectorContextResponseProto.newBuilder();
@@ -55,8 +55,9 @@ public class GetTimelineCollectorContextResponsePBImpl extends
 
   @Override
   public boolean equals(Object other) {
-    if (other == null)
+    if (other == null) {
       return false;
+    }
     if (other.getClass().isAssignableFrom(this.getClass())) {
       return this.getProto().equals(this.getClass().cast(other).getProto());
     }
@@ -69,8 +70,9 @@ public class GetTimelineCollectorContextResponsePBImpl extends
   }
 
   private void mergeLocalToProto() {
-    if (viaProto)
+    if (viaProto) {
       maybeInitBuilder();
+    }
     proto = builder.build();
     viaProto = true;
   }
@@ -84,7 +86,8 @@ public class GetTimelineCollectorContextResponsePBImpl extends
 
   @Override
   public String getUserId() {
-    GetTimelineCollectorContextResponseProtoOrBuilder p = viaProto ? proto : builder;
+    GetTimelineCollectorContextResponseProtoOrBuilder p =
+        viaProto ? proto : builder;
     if (!p.hasUserId()) {
       return null;
     }
@@ -103,7 +106,8 @@ public class GetTimelineCollectorContextResponsePBImpl extends
 
   @Override
   public String getFlowName() {
-    GetTimelineCollectorContextResponseProtoOrBuilder p = viaProto ? proto : builder;
+    GetTimelineCollectorContextResponseProtoOrBuilder p =
+        viaProto ? proto : builder;
     if (!p.hasFlowName()) {
       return null;
     }
@@ -122,7 +126,8 @@ public class GetTimelineCollectorContextResponsePBImpl extends
 
   @Override
   public String getFlowVersion() {
-    GetTimelineCollectorContextResponseProtoOrBuilder p = viaProto ? proto : builder;
+    GetTimelineCollectorContextResponseProtoOrBuilder p =
+        viaProto ? proto : builder;
     if (!p.hasFlowVersion()) {
       return null;
     }
@@ -141,7 +146,8 @@ public class GetTimelineCollectorContextResponsePBImpl extends
 
   @Override
   public long getFlowRunId() {
-    GetTimelineCollectorContextResponseProtoOrBuilder p = viaProto ? proto : builder;
+    GetTimelineCollectorContextResponseProtoOrBuilder p =
+        viaProto ? proto : builder;
     return p.getFlowRunId();
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cf6ab7b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java
index 912a7a3..d0c1198 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java
@@ -58,7 +58,7 @@ public class NodeHeartbeatRequestPBImpl extends NodeHeartbeatRequest {
   private Set<NodeLabel> labels = null;
   private List<LogAggregationReport> logAggregationReportsForApps = null;
 
-  Map<ApplicationId, String> registeredCollectors = null;
+  private Map<ApplicationId, String> registeredCollectors = null;
 
   public NodeHeartbeatRequestPBImpl() {
     builder = NodeHeartbeatRequestProto.newBuilder();
@@ -161,10 +161,11 @@ public class NodeHeartbeatRequestPBImpl extends NodeHeartbeatRequest {
   private void addRegisteredCollectorsToProto() {
     maybeInitBuilder();
     builder.clearRegisteredCollectors();
-    for (Map.Entry<ApplicationId, String> entry : registeredCollectors.entrySet()) {
+    for (Map.Entry<ApplicationId, String> entry :
+        registeredCollectors.entrySet()) {
       builder.addRegisteredCollectors(AppCollectorsMapProto.newBuilder()
-        .setAppId(convertToProtoFormat(entry.getKey()))
-        .setAppCollectorAddr(entry.getValue()));
+          .setAppId(convertToProtoFormat(entry.getKey()))
+          .setAppCollectorAddr(entry.getValue()));
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message