hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rohithsharm...@apache.org
Subject hadoop git commit: MAPREDUCE-5803. Counters page display all task nevertheless of task type( Map or Reduce). Contributed by Kai Sasaki.
Date Tue, 28 Jun 2016 04:16:16 GMT
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 22eb53eb7 -> 0a1a2ce07


MAPREDUCE-5803. Counters page display all task nevertheless of task type( Map or Reduce).
Contributed by Kai Sasaki.

(cherry picked from commit 4fd37eed9054cd292a30bb1e71bb72efee458419)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0a1a2ce0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0a1a2ce0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0a1a2ce0

Branch: refs/heads/branch-2
Commit: 0a1a2ce0702b772c674493e736ca6aabdde34a6f
Parents: 22eb53e
Author: Rohith Sharma K S <rohithsharmaks@apache.org>
Authored: Tue Jun 28 09:42:42 2016 +0530
Committer: Rohith Sharma K S <rohithsharmaks@apache.org>
Committed: Tue Jun 28 09:45:15 2016 +0530

----------------------------------------------------------------------
 .../v2/app/webapp/SingleCounterBlock.java       |  14 ++-
 .../mapreduce/v2/app/webapp/TestBlocks.java     | 119 +++++++++++++++++++
 2 files changed, 132 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a1a2ce0/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/SingleCounterBlock.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/SingleCounterBlock.java
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/SingleCounterBlock.java
index 974b3ff..c4311e9 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/SingleCounterBlock.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/SingleCounterBlock.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
@@ -51,6 +52,7 @@ public class SingleCounterBlock extends HtmlBlock {
   protected TreeMap<String, Long> values = new TreeMap<String, Long>(); 
   protected Job job;
   protected Task task;
+  private TaskType counterType;
   
   @Inject SingleCounterBlock(AppContext appCtx, ViewContext ctx) {
     super(ctx);
@@ -101,6 +103,13 @@ public class SingleCounterBlock extends HtmlBlock {
     JobId jobID = null;
     TaskId taskID = null;
     String tid = $(TASK_ID);
+    if ($(TITLE).contains("MAPS")) {
+      counterType = TaskType.MAP;
+    } else if ($(TITLE).contains("REDUCES")) {
+      counterType = TaskType.REDUCE;
+    } else {
+      counterType = null;
+    }
     if (!tid.isEmpty()) {
       taskID = MRApps.toTaskID(tid);
       jobID = taskID.getJobId();
@@ -152,7 +161,10 @@ public class SingleCounterBlock extends HtmlBlock {
           value = c.getValue();
         }
       }
-      values.put(MRApps.toString(entry.getKey()), value);
+      if (counterType == null ||
+              counterType == entry.getValue().getType()) {
+        values.put(MRApps.toString(entry.getKey()), value);
+      }
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0a1a2ce0/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestBlocks.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestBlocks.java
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestBlocks.java
index 3876fe8..0e250f4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestBlocks.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestBlocks.java
@@ -23,6 +23,7 @@ import java.io.PrintWriter;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.hadoop.yarn.webapp.View;
 import org.junit.Test;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
@@ -206,6 +207,68 @@ public class TestBlocks {
         +"attempt_0_0001_r_000000_0</a>"));
   }
 
+  @Test
+  public void testSingleCounterBlock() {
+    AppContext appCtx = mock(AppContext.class);
+    View.ViewContext ctx = mock(View.ViewContext.class);
+    JobId jobId = new JobIdPBImpl();
+    jobId.setId(0);
+    jobId.setAppId(ApplicationIdPBImpl.newInstance(0, 1));
+
+    TaskId mapTaskId = new TaskIdPBImpl();
+    mapTaskId.setId(0);
+    mapTaskId.setTaskType(TaskType.MAP);
+    mapTaskId.setJobId(jobId);
+    Task mapTask = mock(Task.class);
+    when(mapTask.getID()).thenReturn(mapTaskId);
+    TaskReport mapReport = mock(TaskReport.class);
+    when(mapTask.getReport()).thenReturn(mapReport);
+    when(mapTask.getType()).thenReturn(TaskType.MAP);
+
+    TaskId reduceTaskId = new TaskIdPBImpl();
+    reduceTaskId.setId(0);
+    reduceTaskId.setTaskType(TaskType.REDUCE);
+    reduceTaskId.setJobId(jobId);
+    Task reduceTask = mock(Task.class);
+    when(reduceTask.getID()).thenReturn(reduceTaskId);
+    TaskReport reduceReport = mock(TaskReport.class);
+    when(reduceTask.getReport()).thenReturn(reduceReport);
+    when(reduceTask.getType()).thenReturn(TaskType.REDUCE);
+
+    Map<TaskId, Task> tasks =
+            new HashMap<TaskId, Task>();
+    tasks.put(mapTaskId, mapTask);
+    tasks.put(reduceTaskId, reduceTask);
+
+    Job job = mock(Job.class);
+    when(job.getTasks()).thenReturn(tasks);
+    when(appCtx.getJob(any(JobId.class))).thenReturn(job);
+
+    // SingleCounter for map task
+    SingleCounterBlockForMapTest blockForMapTest
+            = spy(new SingleCounterBlockForMapTest(appCtx, ctx));
+    PrintWriter pWriterForMapTest = new PrintWriter(data);
+    Block htmlForMapTest = new BlockForTest(new HtmlBlockForTest(),
+            pWriterForMapTest, 0, false);
+    blockForMapTest.render(htmlForMapTest);
+    pWriterForMapTest.flush();
+    assertTrue(data.toString().contains("task_0_0001_m_000000"));
+    assertFalse(data.toString().contains("task_0_0001_r_000000"));
+
+    data.reset();
+    // SingleCounter for reduce task
+    SingleCounterBlockForReduceTest blockForReduceTest
+            = spy(new SingleCounterBlockForReduceTest(appCtx, ctx));
+    PrintWriter pWriterForReduceTest = new PrintWriter(data);
+    Block htmlForReduceTest = new BlockForTest(new HtmlBlockForTest(),
+            pWriterForReduceTest, 0, false);
+    blockForReduceTest.render(htmlForReduceTest);
+    pWriterForReduceTest.flush();
+    System.out.println(data.toString());
+    assertFalse(data.toString().contains("task_0_0001_m_000000"));
+    assertTrue(data.toString().contains("task_0_0001_r_000000"));
+  }
+
   private class ConfBlockForTest extends ConfBlock {
     private final Map<String, String> params = new HashMap<String, String>();
 
@@ -258,4 +321,60 @@ public class TestBlocks {
       return result;
     }
   }
+
+  private class SingleCounterBlockForMapTest extends SingleCounterBlock {
+
+    public SingleCounterBlockForMapTest(AppContext appCtx, ViewContext ctx) {
+      super(appCtx, ctx);
+    }
+
+    public String $(String key, String defaultValue) {
+      if (key.equals(TITLE)) {
+        return "org.apache.hadoop.mapreduce.JobCounter DATA_LOCAL_MAPS for " +
+                "job_12345_0001";
+      } else if (key.equals(AMParams.JOB_ID)) {
+        return "job_12345_0001";
+      } else if (key.equals(AMParams.TASK_ID)) {
+        return "";
+      }
+      return "";
+    }
+
+    @Override
+    public String url(String... parts) {
+      String result = "url://";
+      for (String string : parts) {
+        result += string + ":";
+      }
+      return result;
+    }
+  }
+
+  private class SingleCounterBlockForReduceTest extends SingleCounterBlock {
+
+    public SingleCounterBlockForReduceTest(AppContext appCtx, ViewContext ctx) {
+      super(appCtx, ctx);
+    }
+
+    public String $(String key, String defaultValue) {
+      if (key.equals(TITLE)) {
+        return "org.apache.hadoop.mapreduce.JobCounter DATA_LOCAL_REDUCES " +
+            "for job_12345_0001";
+      } else if (key.equals(AMParams.JOB_ID)) {
+        return "job_12345_0001";
+      } else if (key.equals(AMParams.TASK_ID)) {
+        return "";
+      }
+      return "";
+    }
+
+    @Override
+    public String url(String... parts) {
+      String result = "url://";
+      for (String string : parts) {
+        result += string + ":";
+      }
+      return result;
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message