hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From d...@apache.org
Subject svn commit: r816720 - in /hadoop/mapreduce/trunk: ./ src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/statistics/job/ src/java/org/apache/hadoop/mapred/ src/java/org/apache/hadoop/mapreduce/jobhistory/ src/java/org/apache/hadoop/mapreduce/task/redu...
Date Fri, 18 Sep 2009 17:38:17 GMT
Author: ddas
Date: Fri Sep 18 17:38:16 2009
New Revision: 816720

URL: http://svn.apache.org/viewvc?rev=816720&view=rev
Log:
MAPREDUCE-277. Makes job history counters available on the job history viewers. Contributed
by Jothi Padmanabhan.

Added:
    hadoop/mapreduce/trunk/src/webapps/job/taskstatshistory.jsp
Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/statistics/job/JobStatistics.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/EventWriter.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/HistoryViewer.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobFinishedEvent.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistory.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistoryParsing.java
    hadoop/mapreduce/trunk/src/webapps/job/jobdetailshistory.jsp
    hadoop/mapreduce/trunk/src/webapps/job/taskdetailshistory.jsp

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=816720&r1=816719&r2=816720&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Fri Sep 18 17:38:16 2009
@@ -387,6 +387,9 @@
     MAPREDUCE-905. Add Eclipse launch tasks for MapReduce. (Philip Zeyliger
     via tomwhite)
 
+    MAPREDUCE-277. Makes job history counters available on the job history
+    viewers. (Jothi Padmanabhan via ddas)
+
   BUG FIXES
 
     MAPREDUCE-878. Rename fair scheduler design doc to 

Modified: hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/statistics/job/JobStatistics.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/statistics/job/JobStatistics.java?rev=816720&r1=816719&r2=816720&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/statistics/job/JobStatistics.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/statistics/job/JobStatistics.java
Fri Sep 18 17:38:16 2009
@@ -296,7 +296,7 @@
         String.valueOf(jobInfo.getFinishedReduces())); 
     job.put(JobKeys.STATUS, jobInfo.getJobStatus().toString()); 
     job.put(JobKeys.JOB_PRIORITY, jobInfo.getPriority()); 
-    parseAndAddJobCounters(job, jobInfo.getCounters().toString());
+    parseAndAddJobCounters(job, jobInfo.getTotalCounters().toString());
   }
   
   

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java?rev=816720&r1=816719&r2=816720&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java Fri Sep 18
17:38:16 2009
@@ -2676,6 +2676,8 @@
           this.finishTime,
           this.finishedMapTasks,this.finishedReduceTasks, failedMapTasks, 
           failedReduceTasks, 
+          new org.apache.hadoop.mapreduce.Counters(getMapCounters()),
+          new org.apache.hadoop.mapreduce.Counters(getReduceCounters()),
           new org.apache.hadoop.mapreduce.Counters(getCounters()));
       
       jobHistory.logEvent(jfe, this.status.getJobID());

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/EventWriter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/EventWriter.java?rev=816720&r1=816719&r2=816720&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/EventWriter.java
(original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/EventWriter.java
Fri Sep 18 17:38:16 2009
@@ -85,7 +85,12 @@
   
   static void writeCounters(Counters counters, JsonGenerator gen)
   throws IOException {
-    gen.writeFieldName("COUNTERS");
+    writeCounters("COUNTERS", counters, gen);
+  }
+  
+  static void writeCounters(String name, Counters counters, JsonGenerator gen)
+  throws IOException {
+    gen.writeFieldName(name);
     gen.writeStartArray(); // Start of all groups
     Iterator<CounterGroup> groupItr = counters.iterator();
     while (groupItr.hasNext()) {

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/HistoryViewer.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/HistoryViewer.java?rev=816720&r1=816719&r2=816720&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/HistoryViewer.java
(original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/HistoryViewer.java
Fri Sep 18 17:38:16 2009
@@ -18,10 +18,13 @@
 package org.apache.hadoop.mapreduce.jobhistory;
 
 import java.io.IOException;
+import java.text.DecimalFormat;
+import java.text.Format;
 import java.text.SimpleDateFormat;
 import java.util.Arrays;
 import java.util.Comparator;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeSet;
@@ -33,6 +36,8 @@
 import org.apache.hadoop.mapred.JobStatus;
 import org.apache.hadoop.mapred.TaskLogServlet;
 import org.apache.hadoop.mapred.TaskStatus;
+import org.apache.hadoop.mapreduce.CounterGroup;
+import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.TaskID;
 import org.apache.hadoop.mapreduce.TaskType;
@@ -139,9 +144,54 @@
                         job.getLaunchTime()));
     jobDetails.append("\nStatus: ").append(((job.getJobStatus() == null) ? 
                       "Incomplete" :job.getJobStatus()));
+    printCounters(jobDetails, job.getTotalCounters(), job.getMapCounters(),
+        job.getReduceCounters());
+    jobDetails.append("\n");
     jobDetails.append("\n=====================================");
     System.out.println(jobDetails.toString());
   }
+
+  private void printCounters(StringBuffer buff, Counters totalCounters,
+      Counters mapCounters, Counters reduceCounters) {
+    // Killed jobs might not have counters
+    if (totalCounters == null) {
+      return;
+    }
+    buff.append("\nCounters: \n\n");
+    buff.append(String.format("|%1$-30s|%2$-30s|%3$-10s|%4$-10s|%5$-10s|", 
+        "Group Name",
+        "Counter name",
+        "Map Value",
+        "Reduce Value",
+        "Total Value"));
+    buff.append("\n------------------------------------------"+
+        "---------------------------------------------");
+    for (String groupName : totalCounters.getGroupNames()) {
+         CounterGroup totalGroup = totalCounters.getGroup(groupName);
+         CounterGroup mapGroup = mapCounters.getGroup(groupName);
+         CounterGroup reduceGroup = reduceCounters.getGroup(groupName);
+      
+         Format decimal = new DecimalFormat();
+         Iterator<org.apache.hadoop.mapreduce.Counter> ctrItr =
+           totalGroup.iterator();
+         while(ctrItr.hasNext()) {
+           org.apache.hadoop.mapreduce.Counter counter = ctrItr.next();
+           String name = counter.getName();
+           String mapValue = 
+             decimal.format(mapGroup.findCounter(name).getValue());
+           String reduceValue = 
+             decimal.format(reduceGroup.findCounter(name).getValue());
+           String totalValue = 
+             decimal.format(counter.getValue());
+
+           buff.append(
+               String.format("\n|%1$-30s|%2$-30s|%3$-10s|%4$-10s|%5$-10s", 
+                   totalGroup.getDisplayName(),
+                   counter.getDisplayName(),
+                   mapValue, reduceValue, totalValue));
+      }
+    }
+  }
   
   private void printAllTaskAttempts(TaskType taskType) {
     Map<TaskID, TaskInfo> tasks = job.getAllTasks();

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobFinishedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobFinishedEvent.java?rev=816720&r1=816719&r2=816720&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobFinishedEvent.java
(original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobFinishedEvent.java
Fri Sep 18 17:38:16 2009
@@ -25,7 +25,6 @@
 import org.codehaus.jackson.JsonGenerator;
 import org.codehaus.jackson.JsonParser;
 import org.codehaus.jackson.JsonToken;
-
 /**
  * Event to record successful completion of job
  *
@@ -39,7 +38,9 @@
   private int finishedReduces;
   private int failedMaps;
   private int failedReduces;
-  private Counters counters;
+  private Counters totalCounters;
+  private Counters mapCounters;
+  private Counters reduceCounters;
 
   enum EventFields { EVENT_CATEGORY,
     JOB_ID,
@@ -48,7 +49,9 @@
     FINISHED_REDUCES,
     FAILED_MAPS,
     FAILED_REDUCES,
-    COUNTERS }
+    MAP_COUNTERS,
+    REDUCE_COUNTERS,
+    TOTAL_COUNTERS }
 
   JobFinishedEvent() {
   }
@@ -61,19 +64,24 @@
    * @param finishedReduces The number of finished reduces
    * @param failedMaps The number of failed maps
    * @param failedReduces The number of failed reduces
-   * @param counters Counters for the job
+   * @param mapCounters Map Counters for the job
+   * @param reduceCounters Reduce Counters for the job
+   * @param totalCounters Total Counters for the job
    */
   public JobFinishedEvent(JobID id, long finishTime,
       int finishedMaps, int finishedReduces,
       int failedMaps, int failedReduces,
-      Counters counters) {
+      Counters mapCounters, Counters reduceCounters,
+      Counters totalCounters) {
     this.jobid = id;
     this.finishTime = finishTime;
     this.finishedMaps = finishedMaps;
     this.finishedReduces = finishedReduces;
     this.failedMaps = failedMaps;
     this.failedReduces = failedReduces;
-    this.counters = counters;
+    this.mapCounters = mapCounters;
+    this.reduceCounters = reduceCounters;
+    this.totalCounters = totalCounters;
     this.category = EventCategory.JOB;
   }
 
@@ -92,7 +100,11 @@
   /** Get the number of failed reducers for the job */
   public int getFailedReduces() { return failedReduces; }
   /** Get the counters for the job */
-  public Counters getCounters() { return counters; }
+  public Counters getTotalCounters() { return totalCounters; }
+  /** Get the Map counters for the job */
+  public Counters getMapCounters() { return mapCounters; }
+  /** Get the reduce counters for the job */
+  public Counters getReduceCounters() { return reduceCounters; }
   /** Get the event type */
   public EventType getEventType() { 
     return EventType.JOB_FINISHED;
@@ -128,8 +140,14 @@
       case FAILED_REDUCES:
         failedReduces = jp.getIntValue();
         break;
-      case COUNTERS:
-        counters = EventReader.readCounters(jp);
+      case MAP_COUNTERS:
+        mapCounters = EventReader.readCounters(jp);
+        break;
+      case REDUCE_COUNTERS:
+        reduceCounters = EventReader.readCounters(jp);
+        break;
+      case TOTAL_COUNTERS:
+        totalCounters = EventReader.readCounters(jp);
         break;
       default: 
         throw new IOException("Unrecognized field '"+fieldname+"'!");
@@ -149,7 +167,12 @@
     gen.writeNumberField(EventFields.FAILED_MAPS.toString(), failedMaps);
     gen.writeNumberField(EventFields.FAILED_REDUCES.toString(),
         failedReduces);
-    EventWriter.writeCounters(counters, gen);
+    EventWriter.writeCounters(EventFields.MAP_COUNTERS.toString(),
+        mapCounters, gen);
+    EventWriter.writeCounters(EventFields.REDUCE_COUNTERS.toString(),
+        reduceCounters, gen);
+    EventWriter.writeCounters(EventFields.TOTAL_COUNTERS.toString(),
+        totalCounters, gen);
     gen.writeEndObject();
   }
 }

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java?rev=816720&r1=816719&r2=816720&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java
(original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java
Fri Sep 18 17:38:16 2009
@@ -219,6 +219,9 @@
     attemptInfo.error = event.getError();
     attemptInfo.status = event.getTaskStatus();
     attemptInfo.hostname = event.getHostname();
+    attemptInfo.shuffleFinishTime = event.getFinishTime();
+    attemptInfo.sortFinishTime = event.getFinishTime();
+    attemptInfo.mapFinishTime = event.getFinishTime();
   }
 
   private void handleTaskAttemptStartedEvent(TaskAttemptStartedEvent event) {
@@ -277,7 +280,9 @@
     info.finishedReduces = event.getFinishedReduces();
     info.failedMaps = event.getFailedMaps();
     info.failedReduces = event.getFailedReduces();
-    info.counters = event.getCounters();
+    info.totalCounters = event.getTotalCounters();
+    info.mapCounters = event.getMapCounters();
+    info.reduceCounters = event.getReduceCounters();
     info.jobStatus = JobStatus.getJobRunState(JobStatus.SUCCEEDED);
   }
 
@@ -322,7 +327,9 @@
     int finishedMaps;
     int finishedReduces;
     String jobStatus;
-    Counters counters;
+    Counters totalCounters;
+    Counters mapCounters;
+    Counters reduceCounters;
     JobPriority priority;
     
     Map<TaskID, TaskInfo> tasksMap;
@@ -348,7 +355,9 @@
       System.out.println("PRIORITY: " + priority);
       System.out.println("TOTAL_MAPS: " + totalMaps);
       System.out.println("TOTAL_REDUCES: " + totalReduces);
-      System.out.println("COUNTERS: " + counters.toString());
+      System.out.println("MAP_COUNTERS:" + mapCounters.toString());
+      System.out.println("REDUCE_COUNTERS:" + reduceCounters.toString());
+      System.out.println("TOTAL_COUNTERS: " + totalCounters.toString());
       
       for (TaskInfo ti: tasksMap.values()) {
         ti.printAll();
@@ -384,7 +393,11 @@
     /** Get the job status */
     public String getJobStatus() { return jobStatus; }
     /** Get the counters for the job */
-    public Counters getCounters() { return counters; }
+    public Counters getTotalCounters() { return totalCounters; }
+    /** Get the map counters for the job */
+    public Counters getMapCounters() { return mapCounters; }
+    /** Get the reduce counters for the job */
+    public Counters getReduceCounters() { return reduceCounters; }
     /** Get the map of all tasks in this job */
     public Map<TaskID, TaskInfo> getAllTasks() { return tasksMap; }
     /** Get the priority of this job */
@@ -417,7 +430,9 @@
       System.out.println("START_TIME: " + startTime);
       System.out.println("FINISH_TIME:" + finishTime);
       System.out.println("TASK_TYPE:" + taskType);
-      System.out.println("COUNTERS:" + counters.toString());
+      if (counters != null) {
+        System.out.println("COUNTERS:" + counters.toString());
+      }
       
       for (TaskAttemptInfo tinfo: attemptsMap.values()) {
         tinfo.printAll();
@@ -491,7 +506,9 @@
       System.out.println("TASK_TYPE:" + taskType);
       System.out.println("TRACKER_NAME:" + trackerName);
       System.out.println("HTTP_PORT:" + httpPort);
-      System.out.println("COUNTERS:" + counters.toString());
+      if (counters != null) {
+        System.out.println("COUNTERS:" + counters.toString());
+      }
     }
 
     /** Get the attempt Id */

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java?rev=816720&r1=816719&r2=816720&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java Fri
Sep 18 17:38:16 2009
@@ -31,8 +31,6 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.io.compress.CodecPool;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.Decompressor;
@@ -63,6 +61,8 @@
   private final Progressable reporter;
   private static enum ShuffleErrors{IO_ERROR, WRONG_LENGTH, BAD_ID, WRONG_MAP,
                                     CONNECTION, WRONG_REDUCE}
+  
+  private final static String SHUFFLE_ERR_GRP_NAME = "Shuffle Errors";
   private final Counters.Counter connectionErrs;
   private final Counters.Counter ioErrs;
   private final Counters.Counter wrongLengthErrs;
@@ -95,12 +95,18 @@
     this.exceptionReporter = exceptionReporter;
     this.id = ++nextId;
     this.reduce = reduceId.getTaskID().getId();
-    ioErrs = reporter.getCounter(ShuffleErrors.IO_ERROR);
-    wrongLengthErrs = reporter.getCounter(ShuffleErrors.WRONG_LENGTH);
-    badIdErrs = reporter.getCounter(ShuffleErrors.BAD_ID);
-    wrongMapErrs = reporter.getCounter(ShuffleErrors.WRONG_MAP);
-    connectionErrs = reporter.getCounter(ShuffleErrors.CONNECTION);
-    wrongReduceErrs = reporter.getCounter(ShuffleErrors.WRONG_REDUCE);
+    ioErrs = reporter.getCounter(SHUFFLE_ERR_GRP_NAME,
+        ShuffleErrors.IO_ERROR.toString());
+    wrongLengthErrs = reporter.getCounter(SHUFFLE_ERR_GRP_NAME,
+        ShuffleErrors.WRONG_LENGTH.toString());
+    badIdErrs = reporter.getCounter(SHUFFLE_ERR_GRP_NAME,
+        ShuffleErrors.BAD_ID.toString());
+    wrongMapErrs = reporter.getCounter(SHUFFLE_ERR_GRP_NAME,
+        ShuffleErrors.WRONG_MAP.toString());
+    connectionErrs = reporter.getCounter(SHUFFLE_ERR_GRP_NAME,
+        ShuffleErrors.CONNECTION.toString());
+    wrongReduceErrs = reporter.getCounter(SHUFFLE_ERR_GRP_NAME,
+        ShuffleErrors.WRONG_REDUCE.toString());
     
     if (job.getCompressMapOutput()) {
       Class<? extends CompressionCodec> codecClass =

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistory.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistory.java?rev=816720&r1=816719&r2=816720&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistory.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistory.java Fri
Sep 18 17:38:16 2009
@@ -355,7 +355,7 @@
 
     // Validate job counters
     Counters c = new Counters(jip.getCounters());
-    Counters jiCounters = jobInfo.getCounters();
+    Counters jiCounters = jobInfo.getTotalCounters();
     assertTrue("Counters of job obtained from history file did not " +
                "match the expected value",
                c.equals(jiCounters));

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistoryParsing.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistoryParsing.java?rev=816720&r1=816719&r2=816720&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistoryParsing.java
(original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistoryParsing.java
Fri Sep 18 17:38:16 2009
@@ -75,7 +75,8 @@
     jh.logEvent(jse, jobId);
 
     JobFinishedEvent jfe =
-      new JobFinishedEvent(jobId, 12346, 1, 1, 0, 0, new Counters());
+      new JobFinishedEvent(jobId, 12346, 1, 1, 0, 0, new Counters(),
+          new Counters(), new Counters());
     jh.logEvent(jfe, jobId);
     jh.closeWriter(jobId);
 

Modified: hadoop/mapreduce/trunk/src/webapps/job/jobdetailshistory.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/webapps/job/jobdetailshistory.jsp?rev=816720&r1=816719&r2=816720&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/webapps/job/jobdetailshistory.jsp (original)
+++ hadoop/mapreduce/trunk/src/webapps/job/jobdetailshistory.jsp Fri Sep 18 17:38:16 2009
@@ -25,9 +25,12 @@
   import="org.apache.hadoop.fs.*"
   import="org.apache.hadoop.mapreduce.TaskAttemptID"
   import="org.apache.hadoop.mapreduce.TaskID"
+  import="org.apache.hadoop.mapreduce.Counter"
+  import="org.apache.hadoop.mapreduce.Counters"
+  import="org.apache.hadoop.mapreduce.CounterGroup"
   import="org.apache.hadoop.mapred.*"
   import="org.apache.hadoop.util.*"
-  import="java.text.SimpleDateFormat"
+  import="java.text.*"
   import="org.apache.hadoop.mapreduce.jobhistory.*"
 %>
 <%!private static final long serialVersionUID = 1L;
@@ -45,7 +48,14 @@
     FileSystem fs = (FileSystem) application.getAttribute("fileSys");
     JobHistoryParser.JobInfo job = JSPUtil.getJobInfo(request, fs);
 %>
-<html><body>
+
+<html>
+<head>
+<title>Hadoop Job <%=jobid%> on History Viewer</title>
+<link rel="stylesheet" type="text/css" href="/static/hadoop.css">
+</head>
+<body>
+
 <h2>Hadoop Job <%=jobid %> on <a href="jobhistory.jsp">History Viewer</a></h2>
 
 <b>User: </b> <%=job.getUsername() %><br/> 
@@ -120,6 +130,66 @@
 </tr>
 </table>
 
+<br>
+<br>
+
+<table border=2 cellpadding="5" cellspacing="2">
+  <tr>
+  <th><br/></th>
+  <th>Counter</th>
+  <th>Map</th>
+  <th>Reduce</th>
+  <th>Total</th>
+</tr>
+
+<%  
+
+ Counters totalCounters = job.getTotalCounters();
+ Counters mapCounters = job.getMapCounters();
+ Counters reduceCounters = job.getReduceCounters();
+
+ if (totalCounters != null) {
+   for (String groupName : totalCounters.getGroupNames()) {
+     CounterGroup totalGroup = totalCounters.getGroup(groupName);
+     CounterGroup mapGroup = mapCounters.getGroup(groupName);
+     CounterGroup reduceGroup = reduceCounters.getGroup(groupName);
+  
+     Format decimal = new DecimalFormat();
+  
+     boolean isFirst = true;
+     Iterator<Counter> ctrItr = totalGroup.iterator();
+     while(ctrItr.hasNext()) {
+       Counter counter = ctrItr.next();
+       String name = counter.getName();
+       String mapValue = 
+        decimal.format(mapGroup.findCounter(name).getValue());
+       String reduceValue = 
+        decimal.format(reduceGroup.findCounter(name).getValue());
+       String totalValue = 
+        decimal.format(counter.getValue());
+%>
+       <tr>
+<%
+       if (isFirst) {
+         isFirst = false;
+%>
+         <td rowspan="<%=totalGroup.size()%>"><%=totalGroup.getDisplayName()%></td>
+<%
+       }
+%>
+       <td><%=counter.getDisplayName()%></td>
+       <td align="right"><%=mapValue%></td>
+       <td align="right"><%=reduceValue%></td>
+       <td align="right"><%=totalValue%></td>
+     </tr>
+<%
+      }
+    }
+  }
+%>
+</table>
+<br>
+
 <br/>
  <%
     HistoryViewer.FilteredJob filter = new HistoryViewer.FilteredJob(job,TaskStatus.State.FAILED.toString());

@@ -160,6 +230,7 @@
  %>
 </table>
 <br/>
+
  <%
     filter = new HistoryViewer.FilteredJob(job, TaskStatus.State.KILLED.toString());
     badNodes = filter.getFilteredMap(); 

Modified: hadoop/mapreduce/trunk/src/webapps/job/taskdetailshistory.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/webapps/job/taskdetailshistory.jsp?rev=816720&r1=816719&r2=816720&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/webapps/job/taskdetailshistory.jsp (original)
+++ hadoop/mapreduce/trunk/src/webapps/job/taskdetailshistory.jsp Fri Sep 18 17:38:16 2009
@@ -27,6 +27,9 @@
   import="org.apache.hadoop.util.*"
   import="java.text.SimpleDateFormat"
   import="org.apache.hadoop.mapreduce.TaskType"
+  import="org.apache.hadoop.mapreduce.Counters"
+  import="org.apache.hadoop.mapreduce.TaskID"
+  import="org.apache.hadoop.mapreduce.TaskAttemptID"
   import="org.apache.hadoop.mapreduce.jobhistory.*"
 %>
 
@@ -56,10 +59,12 @@
 <%
   }
 %>
-<td>Finish Time</td><td>Host</td><td>Error</td><td>Task
Logs</td></tr>
+<td>Finish Time</td><td>Host</td><td>Error</td><td>Task
Logs</td>
+<td>Counters</td></tr>
+
 <%
   for (JobHistoryParser.TaskAttemptInfo attempt : task.getAllTaskAttempts().values()) {
-    printTaskAttempt(attempt, type, out);
+    printTaskAttempt(attempt, type, out, logFile);
   }
 %>
 </table>
@@ -81,7 +86,7 @@
 %>
 <%!
   private void printTaskAttempt(JobHistoryParser.TaskAttemptInfo taskAttempt,
-                                TaskType type, JspWriter out) 
+                                TaskType type, JspWriter out, String logFile) 
   throws IOException {
     out.print("<tr>"); 
     out.print("<td>" + taskAttempt.getAttemptId() + "</td>");
@@ -116,6 +121,20 @@
         out.print("n/a");
     }
     out.print("</td>");
+    Counters counters = taskAttempt.getCounters();
+    if (counters != null) {
+      TaskAttemptID attemptId = taskAttempt.getAttemptId();
+      TaskID taskId = attemptId.getTaskID();
+      org.apache.hadoop.mapreduce.JobID jobId = taskId.getJobID();
+      out.print("<td>" 
+       + "<a href=\"/taskstatshistory.jsp?jobid=" + jobId
+           + "&taskid=" + taskId
+           + "&attemptid=" + attemptId
+           + "&logFile=" + logFile + "\">"
+           + counters.countCounters() + "</a></td>");
+    } else {
+      out.print("<td></td>");
+    }
     out.print("</tr>"); 
   }
 %>

Added: hadoop/mapreduce/trunk/src/webapps/job/taskstatshistory.jsp
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/webapps/job/taskstatshistory.jsp?rev=816720&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/webapps/job/taskstatshistory.jsp (added)
+++ hadoop/mapreduce/trunk/src/webapps/job/taskstatshistory.jsp Fri Sep 18 17:38:16 2009
@@ -0,0 +1,111 @@
+<%
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file 
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+%>
+<%@ page
+  contentType="text/html; charset=UTF-8"
+  import="javax.servlet.http.*"
+  import="java.io.*"
+  import="java.util.*"
+  import="org.apache.hadoop.mapred.*"
+  import="org.apache.hadoop.fs.*"
+  import="org.apache.hadoop.util.*"
+  import="java.text.*"
+  import="org.apache.hadoop.mapreduce.jobhistory.*" 
+  import="org.apache.hadoop.mapreduce.TaskID" 
+  import="org.apache.hadoop.mapreduce.TaskAttemptID" 
+  import="org.apache.hadoop.mapreduce.Counter" 
+  import="org.apache.hadoop.mapreduce.Counters" 
+  import="org.apache.hadoop.mapreduce.CounterGroup" 
+%>
+<%! private static SimpleDateFormat dateFormat = new SimpleDateFormat("d/MM HH:mm:ss")
;
+    private static final long serialVersionUID = 1L;
+%>
+
+<%
+  String jobid = request.getParameter("jobid");
+  String attemptid = request.getParameter("attemptid");
+  String taskid = request.getParameter("taskid");
+  String logFile = request.getParameter("logFile");
+
+  Format decimal = new DecimalFormat();
+
+  FileSystem fs = (FileSystem) application.getAttribute("fileSys");
+  JobHistoryParser.JobInfo job = JSPUtil.getJobInfo(request, fs);
+
+  Map<TaskID, JobHistoryParser.TaskInfo> tasks = job.getAllTasks();
+  JobHistoryParser.TaskInfo task = tasks.get(TaskID.forName(taskid));
+
+  Map<TaskAttemptID, JobHistoryParser.TaskAttemptInfo> attempts = task.getAllTaskAttempts();
+  JobHistoryParser.TaskAttemptInfo attempt = attempts.get(TaskAttemptID.forName(attemptid));
+
+  Counters counters = attempt.getCounters();
+%>
+
+<html>
+  <head>
+    <title>Counters for <%=attemptid%></title>
+  </head>
+<body>
+<h1>Counters for <%=attemptid%></h1>
+
+<hr>
+
+<%
+  if (counters == null) {
+%>
+    <h3>No counter information found for this attempt</h3>
+<%
+  } else {    
+%>
+    <table>
+<%
+      for (String groupName : counters.getGroupNames()) {
+        CounterGroup group = counters.getGroup(groupName);
+        String displayGroupName = group.getDisplayName();
+%>
+        <tr>
+          <td colspan="3"><br/><b><%=displayGroupName%></b></td>
+        </tr>
+<%
+        Iterator<Counter> ctrItr = group.iterator();
+        while(ctrItr.hasNext()) {
+          Counter counter = ctrItr.next();
+          String displayCounterName = counter.getDisplayName();
+          long value = counter.getValue();
+%>
+          <tr>
+            <td width="50"></td>
+            <td><%=displayCounterName%></td>
+            <td align="right"><%=decimal.format(value)%></td>
+          </tr>
+<%
+        }
+      }
+%>
+    </table>
+<%
+  }
+%>
+
+<hr>
+<a href="jobdetailshistory.jsp?jobid=<%=jobid%>&logFile=<%=logFile%>">Go
back to the job</a><br>
+<a href="jobtracker.jsp">Go back to JobTracker</a><br>
+<%
+out.println(ServletUtil.htmlFooter());
+%>



Mime
View raw message