hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sha...@apache.org
Subject svn commit: r816052 [1/5] - in /hadoop/mapreduce/trunk: ./ src/contrib/capacity-scheduler/ src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ src/contrib/fairscheduler/ src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/ src/con...
Date Thu, 17 Sep 2009 05:04:27 GMT
Author: sharad
Date: Thu Sep 17 05:04:21 2009
New Revision: 816052

URL: http://svn.apache.org/viewvc?rev=816052&view=rev
Log:
MAPREDUCE-157. Refactor job history APIs and change the history format to JSON. Contributed by Jothi Padmanabhan.

Added:
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/EventReader.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/EventType.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/EventWriter.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/HistoryEvent.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/HistoryViewer.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobFinishedEvent.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistory.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobInfoChangeEvent.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobInitedEvent.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobPriorityChangeEvent.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobStatusChangedEvent.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobSubmittedEvent.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobUnsuccessfulCompletionEvent.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/MapAttemptFinishedEvent.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/ReduceAttemptFinishedEvent.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptFinishedEvent.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptUnsuccessfulCompletionEvent.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskFailedEvent.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskFinishedEvent.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskStartedEvent.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskUpdatedEvent.java
    hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/Pre21JobHistoryConstants.java
Removed:
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/DefaultJobHistoryParser.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/HistoryViewer.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistoryParsing.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistoryVersion.java
Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/ivy.xml
    hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/CapacityTestUtils.java
    hadoop/mapreduce/trunk/src/contrib/fairscheduler/ivy.xml
    hadoop/mapreduce/trunk/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java
    hadoop/mapreduce/trunk/src/contrib/gridmix/ivy.xml
    hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/JobFactory.java
    hadoop/mapreduce/trunk/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/DebugJobFactory.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/ivy.xml
    hadoop/mapreduce/trunk/src/contrib/streaming/ivy.xml
    hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/PostExPerformanceDiagnoser.java
    hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/statistics/job/JobStatistics.java
    hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/commands_manual.xml
    hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/mapred_tutorial.xml
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JSPUtil.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobClient.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/CounterGroup.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/Counters.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/FakeObjectUtilities.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/MiniMRCluster.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistory.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobRetire.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestParallelInitialization.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestRackAwareTaskPlacement.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSeveral.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/tools/TestCopyFiles.java
    hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java
    hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/JobStory.java
    hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/LoggedJob.java
    hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/LoggedTask.java
    hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
    hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/ZombieJob.java
    hadoop/mapreduce/trunk/src/webapps/job/analysejobhistory.jsp
    hadoop/mapreduce/trunk/src/webapps/job/jobconf.jsp
    hadoop/mapreduce/trunk/src/webapps/job/jobconf_history.jsp
    hadoop/mapreduce/trunk/src/webapps/job/jobdetailshistory.jsp
    hadoop/mapreduce/trunk/src/webapps/job/jobhistory.jsp
    hadoop/mapreduce/trunk/src/webapps/job/jobtaskshistory.jsp
    hadoop/mapreduce/trunk/src/webapps/job/taskdetailshistory.jsp

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Thu Sep 17 05:04:21 2009
@@ -36,6 +36,9 @@
     HADOOP-6230. Moved process tree and memory calculator related classes from
     Common to Map/Reduce. (Vinod Kumar Vavilapalli via yhemanth)
 
+    MAPREDUCE-157. Refactor job history APIs and change the history format to 
+    JSON. (Jothi Padmanabhan via sharad)
+
   NEW FEATURES
 
     MAPREDUCE-706. Support for FIFO pools in the fair scheduler.

Modified: hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/ivy.xml?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/ivy.xml (original)
+++ hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/ivy.xml Thu Sep 17 05:04:21 2009
@@ -68,5 +68,13 @@
       name="avro"
       rev="1.0.0"
       conf="common->default"/>
+    <dependency org="org.codehaus.jackson"
+      name="jackson-mapper-asl"
+      rev="${jackson.version}"
+      conf="common->default"/>
+    <dependency org="org.codehaus.jackson"
+      name="jackson-core-asl"
+      rev="${jackson.version}"
+      conf="common->default"/>
   </dependencies>
 </ivy-module>

Modified: hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/CapacityTestUtils.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/CapacityTestUtils.java?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/CapacityTestUtils.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/CapacityTestUtils.java Thu Sep 17 05:04:21 2009
@@ -17,17 +17,30 @@
  */
 package org.apache.hadoop.mapred;
 
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.mapreduce.TaskType;
-import org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker;
-import org.apache.hadoop.conf.Configuration;
-import static org.junit.Assert.*;
-import java.util.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.TreeMap;
 
-import org.apache.hadoop.security.SecurityUtil.AccessControlList;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.mapred.FakeObjectUtilities.FakeJobHistory;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker;
+import org.apache.hadoop.security.SecurityUtil.AccessControlList;
 
 
 public class CapacityTestUtils {
@@ -216,6 +229,7 @@
       }
       mapTaskCtr = 0;
       redTaskCtr = 0;
+      this.jobHistory = new FakeJobHistory();
     }
 
     @Override

Modified: hadoop/mapreduce/trunk/src/contrib/fairscheduler/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/fairscheduler/ivy.xml?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/fairscheduler/ivy.xml (original)
+++ hadoop/mapreduce/trunk/src/contrib/fairscheduler/ivy.xml Thu Sep 17 05:04:21 2009
@@ -38,5 +38,13 @@
       name="junit"
       rev="${junit.version}"
       conf="common->default"/>
+    <dependency org="org.codehaus.jackson"
+      name="jackson-mapper-asl"
+      rev="${jackson.version}"
+      conf="common->default"/>
+    <dependency org="org.codehaus.jackson"
+      name="jackson-core-asl"
+      rev="${jackson.version}"
+      conf="common->default"/>
   </dependencies>
 </ivy-module>

Modified: hadoop/mapreduce/trunk/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java Thu Sep 17 05:04:21 2009
@@ -38,10 +38,11 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.mapred.FairScheduler.JobInfo;
+import org.apache.hadoop.mapred.FakeObjectUtilities.FakeJobHistory;
+import org.apache.hadoop.mapred.UtilsForTests.FakeClock;
 import org.apache.hadoop.mapreduce.TaskType;
-import org.apache.hadoop.net.Node;
 import org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker;
-import org.apache.hadoop.mapred.UtilsForTests.FakeClock;
+import org.apache.hadoop.net.Node;
 
 public class TestFairScheduler extends TestCase {
   final static String TEST_DIR = new File(System.getProperty("test.build.data",
@@ -74,6 +75,7 @@
       this.runningMapCache = new IdentityHashMap<Node, Set<TaskInProgress>>();
       this.nonRunningReduces = new LinkedList<TaskInProgress>();   
       this.runningReduces = new LinkedHashSet<TaskInProgress>();
+      this.jobHistory = new FakeJobHistory();
       initTasks();
     }
     

Modified: hadoop/mapreduce/trunk/src/contrib/gridmix/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/gridmix/ivy.xml?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/gridmix/ivy.xml (original)
+++ hadoop/mapreduce/trunk/src/contrib/gridmix/ivy.xml Thu Sep 17 05:04:21 2009
@@ -89,5 +89,13 @@
       name="avro"
       rev="1.0.0"
       conf="common->default"/>
+    <dependency org="org.codehaus.jackson"
+      name="jackson-mapper-asl"
+      rev="${jackson.version}"
+      conf="common->default"/>
+    <dependency org="org.codehaus.jackson"
+      name="jackson-core-asl"
+      rev="${jackson.version}"
+      conf="common->default"/>
   </dependencies>
 </ivy-module>

Modified: hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/JobFactory.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/JobFactory.java?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/JobFactory.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/JobFactory.java Thu Sep 17 05:04:21 2009
@@ -26,9 +26,9 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.mapred.JobHistory;
 import org.apache.hadoop.tools.rumen.JobStory;
 import org.apache.hadoop.tools.rumen.JobStoryProducer;
+import org.apache.hadoop.tools.rumen.Pre21JobHistoryConstants;
 import org.apache.hadoop.tools.rumen.ZombieJobProducer;
 
 import org.apache.commons.logging.Log;
@@ -105,7 +105,7 @@
       do {
         job = jobProducer.getNextJob();
       } while (job != null
-          && (job.getOutcome() != JobHistory.Values.SUCCESS ||
+          && (job.getOutcome() != Pre21JobHistoryConstants.Values.SUCCESS ||
               job.getSubmissionTime() < 0));
       return job;
     }

Modified: hadoop/mapreduce/trunk/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/DebugJobFactory.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/DebugJobFactory.java?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/DebugJobFactory.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/gridmix/src/test/org/apache/hadoop/mapred/gridmix/DebugJobFactory.java Thu Sep 17 05:04:21 2009
@@ -28,7 +28,6 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.mapred.JobHistory;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.TaskType;
@@ -36,6 +35,8 @@
 import org.apache.hadoop.tools.rumen.JobStoryProducer;
 import org.apache.hadoop.tools.rumen.TaskAttemptInfo;
 import org.apache.hadoop.tools.rumen.TaskInfo;
+import org.apache.hadoop.tools.rumen.Pre21JobHistoryConstants.Values;;
+
 
 /**
  * Component generating random job traces for testing on a single node.
@@ -213,8 +214,8 @@
     }
 
     @Override
-    public JobHistory.Values getOutcome() {
-      return JobHistory.Values.SUCCESS;
+    public Values getOutcome() {
+      return Values.SUCCESS;
     }
 
     @Override

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/ivy.xml?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/ivy.xml (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/ivy.xml Thu Sep 17 05:04:21 2009
@@ -80,5 +80,13 @@
       name="jetty-util"
       rev="${jetty-util.version}"
       conf="common->master"/>
+    <dependency org="org.codehaus.jackson"
+      name="jackson-mapper-asl"
+      rev="${jackson.version}"
+      conf="common->default"/>
+    <dependency org="org.codehaus.jackson"
+      name="jackson-core-asl"
+      rev="${jackson.version}"
+      conf="common->default"/>
     </dependencies>
 </ivy-module>

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/ivy.xml?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/ivy.xml (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/ivy.xml Thu Sep 17 05:04:21 2009
@@ -68,5 +68,13 @@
       name="avro"
       rev="1.0.0"
       conf="common->default"/>
+    <dependency org="org.codehaus.jackson"
+      name="jackson-mapper-asl"
+      rev="${jackson.version}"
+      conf="common->default"/>
+    <dependency org="org.codehaus.jackson"
+      name="jackson-core-asl"
+      rev="${jackson.version}"
+      conf="common->default"/>
     </dependencies>
 </ivy-module>

Modified: hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/PostExPerformanceDiagnoser.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/PostExPerformanceDiagnoser.java?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/PostExPerformanceDiagnoser.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/PostExPerformanceDiagnoser.java Thu Sep 17 05:04:21 2009
@@ -19,20 +19,21 @@
 package org.apache.hadoop.vaidya.postexdiagnosis;
 
 
-import java.net.URL;
-import java.io.InputStream;
 import java.io.FileInputStream;
+import java.io.InputStream;
+import java.net.URL;
+
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobHistory.JobInfo;
-import org.apache.hadoop.mapred.DefaultJobHistoryParser;
-import org.apache.hadoop.vaidya.util.XMLUtils;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
 import org.apache.hadoop.vaidya.DiagnosticTest;
 import org.apache.hadoop.vaidya.JobDiagnoser;
 import org.apache.hadoop.vaidya.statistics.job.JobStatistics;
-import org.w3c.dom.NodeList;
+import org.apache.hadoop.vaidya.util.XMLUtils;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
+import org.w3c.dom.NodeList;
 
 
 /**
@@ -108,15 +109,14 @@
      * Read the job information necessary for post performance analysis
      */
     JobConf jobConf = new JobConf();
-    JobInfo jobInfo = new JobInfo("");
-    readJobInformation(jobConf, jobInfo);
+    JobInfo jobInfo = readJobInformation(jobConf);
     this._jobExecutionStatistics = new JobStatistics(jobConf, jobInfo);
   }
 
   /**
    * read and populate job statistics information.
    */
-  private void readJobInformation(JobConf jobConf, JobInfo jobInfo) throws Exception {
+  private JobInfo readJobInformation(JobConf jobConf) throws Exception {
   
     /*
      * Convert the input strings to URL
@@ -132,13 +132,21 @@
     /* 
      * Read JobHistoryFile and build job counters to evaluate diagnostic rules
      */
+    JobHistoryParser parser;
+    JobInfo jobInfo;
     if (jobHistoryFileUrl.getProtocol().equals("hdfs")) {
-      DefaultJobHistoryParser.parseJobTasks (jobHistoryFileUrl.getPath(), jobInfo, FileSystem.get(jobConf));
+      parser = new JobHistoryParser(FileSystem.get(jobConf),
+                                    jobHistoryFileUrl.getPath());
+      jobInfo = parser.parse();
     } else if (jobHistoryFileUrl.getProtocol().equals("file")) {
-      DefaultJobHistoryParser.parseJobTasks (jobHistoryFileUrl.getPath(), jobInfo, FileSystem.getLocal(jobConf));
+      parser = new JobHistoryParser(FileSystem.getLocal(jobConf),
+                                    jobHistoryFileUrl.getPath());
+      jobInfo = parser.parse();
     } else {
-      throw new Exception("Malformed URL. Protocol: "+jobHistoryFileUrl.getProtocol());
+      throw new Exception("Malformed URL. Protocol: "+ 
+          jobHistoryFileUrl.getProtocol());
     }
+    return jobInfo;
   }
   
   /*

Modified: hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/statistics/job/JobStatistics.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/statistics/job/JobStatistics.java?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/statistics/job/JobStatistics.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/statistics/job/JobStatistics.java Thu Sep 17 05:04:21 2009
@@ -17,26 +17,23 @@
  */
 package org.apache.hadoop.vaidya.statistics.job;
 
-import java.util.ArrayList;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobHistory;
-import org.apache.hadoop.mapred.JobHistory.JobInfo;
-import org.apache.hadoop.mapred.JobHistory.Keys;
-import org.apache.hadoop.mapred.Counters;
-import org.apache.hadoop.mapred.Counters.Counter;
 import java.text.ParseException;
-
-//import org.apache.hadoop.vaidya.statistics.job.JobStatisticsInterface.JobKeys;
-
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
 import java.util.Hashtable;
 import java.util.Map;
 import java.util.regex.Pattern;
-import java.util.regex.Matcher;
 
-import java.util.Arrays;
-import java.util.Comparator;
-import java.util.List;
-import java.util.Collections;
+import org.apache.hadoop.mapred.Counters;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapred.TaskStatus;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
 
 /**
  *
@@ -65,7 +62,7 @@
   /*
    * Aggregated Job level counters 
    */
-  private JobHistory.JobInfo _jobInfo;
+  private JobHistoryParser.JobInfo _jobInfo;
   
   /*
    * Job stats 
@@ -152,8 +149,9 @@
     this._jobConf = jobConf;
     this._jobInfo = jobInfo;
     this._job = new Hashtable<Enum, String>();
-    populate_Job(this._job, this._jobInfo.getValues());  
-    populate_MapReduceTaskLists(this._mapTaskList, this._reduceTaskList, this._jobInfo.getAllTasks());
+    populate_Job(this._job, jobInfo);  
+    populate_MapReduceTaskLists(this._mapTaskList, this._reduceTaskList, 
+        jobInfo.getAllTasks());
 
     // Add the Job Type: MAP_REDUCE, MAP_ONLY
     if (getLongValue(JobKeys.TOTAL_REDUCES) == 0) {
@@ -167,120 +165,100 @@
    * 
    */
   private void populate_MapReduceTaskLists (ArrayList<MapTaskStatistics> mapTaskList, 
-                              ArrayList<ReduceTaskStatistics> reduceTaskList, 
-                              java.util.Map<String, JobHistory.Task> taskMap) throws ParseException {
-    /*
-     * 
-     */
+                     ArrayList<ReduceTaskStatistics> reduceTaskList, 
+                     Map<TaskID, TaskInfo> taskMap)
+  throws ParseException {
     int num_tasks = taskMap.entrySet().size();
-    java.util.Iterator<Map.Entry<String, JobHistory.Task>> ti = taskMap.entrySet().iterator();
-    for (int i = 0; i < num_tasks; i++)
-    {
-      Map.Entry<String, JobHistory.Task> entry = (Map.Entry<String, JobHistory.Task>) ti.next();
-      JobHistory.Task task = entry.getValue();
-      if (task.get(Keys.TASK_TYPE).equals("MAP")) {
-      MapTaskStatistics mapT = new MapTaskStatistics();
-      java.util.Map<JobHistory.Keys, String> mapTask = task.getValues();
-      java.util.Map<JobHistory.Keys, String> successTaskAttemptMap  =  getLastSuccessfulTaskAttempt(task);
-      // NOTE: Following would lead to less number of actual tasks collected in the tasklist array
-      if (successTaskAttemptMap != null) {
-        mapTask.putAll(successTaskAttemptMap);
-      } else {
-        System.err.println("Task:<"+task.get(Keys.TASKID)+"> is not successful - SKIPPING");
-      }
-      int size = mapTask.size();
-      java.util.Iterator<Map.Entry<JobHistory.Keys, String>> kv = mapTask.entrySet().iterator();
-      for (int j = 0; j < size; j++)
-      {
-        Map.Entry<JobHistory.Keys, String> mtc = kv.next();
-        JobHistory.Keys key = mtc.getKey();
-        String value = mtc.getValue();
-        //System.out.println("JobHistory.MapKeys."+key+": "+value);
-        switch (key) {
-        case TASKID: mapT.setValue(MapTaskKeys.TASK_ID, value); break;
-        case TASK_ATTEMPT_ID: mapT.setValue(MapTaskKeys.ATTEMPT_ID, value); break;
-        case HOSTNAME: mapT.setValue(MapTaskKeys.HOSTNAME, value); break;
-        case TASK_TYPE: mapT.setValue(MapTaskKeys.TASK_TYPE, value); break;
-        case TASK_STATUS: mapT.setValue(MapTaskKeys.STATUS, value); break;
-        case START_TIME: mapT.setValue(MapTaskKeys.START_TIME, value); break;
-        case FINISH_TIME: mapT.setValue(MapTaskKeys.FINISH_TIME, value); break;
-        case SPLITS: mapT.setValue(MapTaskKeys.SPLITS, value); break;
-        case TRACKER_NAME: mapT.setValue(MapTaskKeys.TRACKER_NAME, value); break;
-        case STATE_STRING: mapT.setValue(MapTaskKeys.STATE_STRING, value); break;
-        case HTTP_PORT: mapT.setValue(MapTaskKeys.HTTP_PORT, value); break;
-        case ERROR: mapT.setValue(MapTaskKeys.ERROR, value); break;
-        case COUNTERS:
-          value.concat(",");
-          parseAndAddMapTaskCounters(mapT, value);
-          mapTaskList.add(mapT);
-          break;
-        default: System.err.println("JobHistory.MapKeys."+key+" : NOT INCLUDED IN PERFORMANCE ADVISOR MAP COUNTERS");
-          break;
-        }
-      }
-      
-      // Add number of task attempts
-      mapT.setValue(MapTaskKeys.NUM_ATTEMPTS, (new Integer(task.getTaskAttempts().size())).toString());
+// DO we need these lists?
+//    List<TaskAttemptInfo> successfulMapAttemptList = 
+//      new ArrayList<TaskAttemptInfo>();
+//    List<TaskAttemptInfo> successfulReduceAttemptList = 
+//      new ArrayList<TaskAttemptInfo>();
+    for (JobHistoryParser.TaskInfo taskInfo: taskMap.values()) {
+      if (taskInfo.getTaskType().equals(TaskType.MAP)) {
+        MapTaskStatistics mapT = new MapTaskStatistics();
+        TaskAttemptInfo successfulAttempt  =  
+          getLastSuccessfulTaskAttempt(taskInfo);
+        mapT.setValue(MapTaskKeys.TASK_ID, 
+            successfulAttempt.getAttemptId().getTaskID().toString()); 
+        mapT.setValue(MapTaskKeys.ATTEMPT_ID, 
+            successfulAttempt.getAttemptId().toString()); 
+        mapT.setValue(MapTaskKeys.HOSTNAME, 
+            successfulAttempt.getTrackerName()); 
+        mapT.setValue(MapTaskKeys.TASK_TYPE, 
+            successfulAttempt.getTaskType().toString()); 
+        mapT.setValue(MapTaskKeys.STATUS, 
+            successfulAttempt.getTaskStatus().toString()); 
+        mapT.setValue(MapTaskKeys.START_TIME, successfulAttempt.getStartTime()); 
+        mapT.setValue(MapTaskKeys.FINISH_TIME, successfulAttempt.getFinishTime()); 
+        mapT.setValue(MapTaskKeys.SPLITS, taskInfo.getSplitLocations()); 
+        mapT.setValue(MapTaskKeys.TRACKER_NAME, successfulAttempt.getTrackerName()); 
+        mapT.setValue(MapTaskKeys.STATE_STRING, successfulAttempt.getState()); 
+        mapT.setValue(MapTaskKeys.HTTP_PORT, successfulAttempt.getHttpPort()); 
+        mapT.setValue(MapTaskKeys.ERROR, successfulAttempt.getError()); 
+        parseAndAddMapTaskCounters(mapT, 
+            successfulAttempt.getCounters().toString());
+        mapTaskList.add(mapT);
 
-      // Add EXECUTION_TIME = FINISH_TIME - START_TIME
-      long etime = mapT.getLongValue(MapTaskKeys.FINISH_TIME) - mapT.getLongValue(MapTaskKeys.START_TIME);
-      mapT.setValue(MapTaskKeys.EXECUTION_TIME, (new Long(etime)).toString());
-      
-      }else if (task.get(Keys.TASK_TYPE).equals("REDUCE")) {
+        // Add number of task attempts
+        mapT.setValue(MapTaskKeys.NUM_ATTEMPTS, 
+            (new Integer(taskInfo.getAllTaskAttempts().size())).toString());
+
+        // Add EXECUTION_TIME = FINISH_TIME - START_TIME
+        long etime = mapT.getLongValue(MapTaskKeys.FINISH_TIME) - 
+          mapT.getLongValue(MapTaskKeys.START_TIME);
+        mapT.setValue(MapTaskKeys.EXECUTION_TIME, (new Long(etime)).toString());
+
+      }else if (taskInfo.getTaskType().equals(TaskType.REDUCE)) {
 
         ReduceTaskStatistics reduceT = new ReduceTaskStatistics();
-        java.util.Map<JobHistory.Keys, String> reduceTask = task.getValues();
-        java.util.Map<JobHistory.Keys, String> successTaskAttemptMap  =  getLastSuccessfulTaskAttempt(task);
-        // NOTE: Following would lead to less number of actual tasks collected in the tasklist array
-        if (successTaskAttemptMap != null) {
-          reduceTask.putAll(successTaskAttemptMap);
-        } else {
-          System.err.println("Task:<"+task.get(Keys.TASKID)+"> is not successful - SKIPPING");
-        }
-        int size = reduceTask.size();
-        java.util.Iterator<Map.Entry<JobHistory.Keys, String>> kv = reduceTask.entrySet().iterator();
-        for (int j = 0; j < size; j++)
-        {
-          Map.Entry<JobHistory.Keys, String> rtc = kv.next();
-          JobHistory.Keys key = rtc.getKey();
-          String value = rtc.getValue();
-          //System.out.println("JobHistory.ReduceKeys."+key+": "+value);
-          switch (key) {
-          case TASKID: reduceT.setValue(ReduceTaskKeys.TASK_ID, value); break;
-          case TASK_ATTEMPT_ID: reduceT.setValue(ReduceTaskKeys.ATTEMPT_ID, value); break;
-          case HOSTNAME: reduceT.setValue(ReduceTaskKeys.HOSTNAME, value); break;
-          case TASK_TYPE: reduceT.setValue(ReduceTaskKeys.TASK_TYPE, value); break;
-          case TASK_STATUS: reduceT.setValue(ReduceTaskKeys.STATUS, value); break;
-          case START_TIME: reduceT.setValue(ReduceTaskKeys.START_TIME, value); break;
-          case FINISH_TIME: reduceT.setValue(ReduceTaskKeys.FINISH_TIME, value); break;
-          case SHUFFLE_FINISHED: reduceT.setValue(ReduceTaskKeys.SHUFFLE_FINISH_TIME, value); break;
-          case SORT_FINISHED: reduceT.setValue(ReduceTaskKeys.SORT_FINISH_TIME, value); break;
-          case SPLITS: reduceT.setValue(ReduceTaskKeys.SPLITS, value); break;
-          case TRACKER_NAME: reduceT.setValue(ReduceTaskKeys.TRACKER_NAME, value); break;
-          case STATE_STRING: reduceT.setValue(ReduceTaskKeys.STATE_STRING, value); break;
-          case HTTP_PORT: reduceT.setValue(ReduceTaskKeys.HTTP_PORT, value); break;
-          case COUNTERS:
-            value.concat(",");
-            parseAndAddReduceTaskCounters(reduceT, value);
-            reduceTaskList.add(reduceT);
-            break;
-          default: System.err.println("JobHistory.ReduceKeys."+key+" : NOT INCLUDED IN PERFORMANCE ADVISOR REDUCE COUNTERS");
-            break;
-          }
-        }
+        TaskAttemptInfo successfulAttempt  = 
+          getLastSuccessfulTaskAttempt(taskInfo);
+        reduceT.setValue(ReduceTaskKeys.TASK_ID,
+            successfulAttempt.getAttemptId().getTaskID().toString()); 
+        reduceT.setValue(ReduceTaskKeys.ATTEMPT_ID,
+            successfulAttempt.getAttemptId().toString()); 
+        reduceT.setValue(ReduceTaskKeys.HOSTNAME,
+            successfulAttempt.getTrackerName()); 
+        reduceT.setValue(ReduceTaskKeys.TASK_TYPE, 
+            successfulAttempt.getTaskType().toString()); 
+        reduceT.setValue(ReduceTaskKeys.STATUS, 
+            successfulAttempt.getTaskStatus().toString()); 
+        reduceT.setValue(ReduceTaskKeys.START_TIME,
+            successfulAttempt.getStartTime()); 
+        reduceT.setValue(ReduceTaskKeys.FINISH_TIME,
+            successfulAttempt.getFinishTime()); 
+        reduceT.setValue(ReduceTaskKeys.SHUFFLE_FINISH_TIME,
+            successfulAttempt.getShuffleFinishTime()); 
+        reduceT.setValue(ReduceTaskKeys.SORT_FINISH_TIME,
+            successfulAttempt.getSortFinishTime()); 
+        reduceT.setValue(ReduceTaskKeys.SPLITS, ""); 
+        reduceT.setValue(ReduceTaskKeys.TRACKER_NAME,
+            successfulAttempt.getTrackerName()); 
+        reduceT.setValue(ReduceTaskKeys.STATE_STRING,
+            successfulAttempt.getState()); 
+        reduceT.setValue(ReduceTaskKeys.HTTP_PORT,
+            successfulAttempt.getHttpPort()); 
+        parseAndAddReduceTaskCounters(reduceT,
+            successfulAttempt.getCounters().toString());
+
+        reduceTaskList.add(reduceT);
 
         // Add number of task attempts
-        reduceT.setValue(ReduceTaskKeys.NUM_ATTEMPTS, (new Integer(task.getTaskAttempts().size())).toString());
+        reduceT.setValue(ReduceTaskKeys.NUM_ATTEMPTS, 
+            (new Integer(taskInfo.getAllTaskAttempts().size())).toString());
 
         // Add EXECUTION_TIME = FINISH_TIME - START_TIME
-        long etime1 = reduceT.getLongValue(ReduceTaskKeys.FINISH_TIME) - reduceT.getLongValue(ReduceTaskKeys.START_TIME);
-        reduceT.setValue(ReduceTaskKeys.EXECUTION_TIME, (new Long(etime1)).toString());
+        long etime1 = reduceT.getLongValue(ReduceTaskKeys.FINISH_TIME) - 
+        reduceT.getLongValue(ReduceTaskKeys.START_TIME);
+        reduceT.setValue(ReduceTaskKeys.EXECUTION_TIME,
+            (new Long(etime1)).toString());
 
-      } else if (task.get(Keys.TASK_TYPE).equals("CLEANUP") ||
-                 task.get(Keys.TASK_TYPE).equals("SETUP")) {
+      } else if (taskInfo.getTaskType().equals(TaskType.JOB_CLEANUP) ||
+                 taskInfo.getTaskType().equals(TaskType.JOB_SETUP)) {
         //System.out.println("INFO: IGNORING TASK TYPE : "+task.get(Keys.TASK_TYPE));
       } else {
-        System.err.println("UNKNOWN TASK TYPE : "+task.get(Keys.TASK_TYPE));
+        System.err.println("UNKNOWN TASK TYPE : "+taskInfo.getTaskType());
       }
     }
   }
@@ -288,62 +266,40 @@
   /*
    * Get last successful task attempt to be added in the stats
    */
-  private java.util.Map<JobHistory.Keys, String> getLastSuccessfulTaskAttempt(JobHistory.Task task) {
+  private TaskAttemptInfo getLastSuccessfulTaskAttempt(TaskInfo task) {
     
-    Map<String, JobHistory.TaskAttempt> taskAttempts = task.getTaskAttempts();
-    int size = taskAttempts.size();
-    java.util.Iterator<Map.Entry<String, JobHistory.TaskAttempt>> kv = taskAttempts.entrySet().iterator();
-    for (int i=0; i<size; i++) {
-      // CHECK_IT: Only one SUCCESSFUL TASK ATTEMPT
-      Map.Entry<String, JobHistory.TaskAttempt> tae = kv.next();
-      JobHistory.TaskAttempt attempt = tae.getValue();
-      if (attempt.getValues().get(JobHistory.Keys.TASK_STATUS).equals("SUCCESS")) {
-        return attempt.getValues();
+    for (TaskAttemptInfo ai: task.getAllTaskAttempts().values()) {
+      if (ai.getTaskStatus().equals(TaskStatus.State.SUCCEEDED.toString())) {
+        return ai;
       }
     }
-    
     return null;
   }
   
   /*
    * Popuate the job stats 
    */
-  private void populate_Job (Hashtable<Enum, String> job, java.util.Map<JobHistory.Keys, String> jobC) throws ParseException {
-    int size = jobC.size(); 
-    java.util.Iterator<Map.Entry<JobHistory.Keys, String>> kv = jobC.entrySet().iterator();
-    for (int i = 0; i < size; i++)
-    {
-      Map.Entry<JobHistory.Keys, String> entry = (Map.Entry<JobHistory.Keys, String>) kv.next();
-      JobHistory.Keys key = entry.getKey();
-      String value = entry.getValue();
-      //System.out.println("JobHistory.JobKeys."+key+": "+value);
-      switch (key) {
-      case JOBTRACKERID: job.put(JobKeys.JOBTRACKERID, value); break;
-      case FINISH_TIME: job.put(JobKeys.FINISH_TIME, value); break;
-      case JOBID: job.put(JobKeys.JOBID, value); break;
-      case JOBNAME: job.put(JobKeys.JOBNAME, value); break;
-      case USER: job.put(JobKeys.USER, value); break;
-      case JOBCONF: job.put(JobKeys.JOBCONF, value); break;
-      case SUBMIT_TIME: job.put(JobKeys.SUBMIT_TIME, value); break;
-      case LAUNCH_TIME: job.put(JobKeys.LAUNCH_TIME, value); break;
-      case TOTAL_MAPS: job.put(JobKeys.TOTAL_MAPS, value); break;
-      case TOTAL_REDUCES: job.put(JobKeys.TOTAL_REDUCES, value); break;
-      case FAILED_MAPS: job.put(JobKeys.FAILED_MAPS, value); break;
-      case FAILED_REDUCES: job.put(JobKeys.FAILED_REDUCES, value); break;
-      case FINISHED_MAPS: job.put(JobKeys.FINISHED_MAPS, value); break;
-      case FINISHED_REDUCES: job.put(JobKeys.FINISHED_REDUCES, value); break;
-      case JOB_STATUS: job.put(JobKeys.STATUS, value); break;
-      case JOB_PRIORITY: job.put(JobKeys.JOB_PRIORITY, value); break;
-      case COUNTERS:
-        value.concat(",");
-        parseAndAddJobCounters(job, value);
-        break;
-      default:   System.err.println("JobHistory.Keys."+key+" : NOT INCLUDED IN PERFORMANCE ADVISOR COUNTERS");
-               break;
-      }
-    }
+  private void populate_Job (Hashtable<Enum, String> job, JobInfo jobInfo) throws ParseException {
+    job.put(JobKeys.FINISH_TIME, String.valueOf(jobInfo.getFinishTime()));
+    job.put(JobKeys.JOBID, jobInfo.getJobId().toString()); 
+    job.put(JobKeys.JOBNAME, jobInfo.getJobname()); 
+    job.put(JobKeys.USER, jobInfo.getUsername()); 
+    job.put(JobKeys.JOBCONF, jobInfo.getJobConfPath()); 
+    job.put(JobKeys.SUBMIT_TIME, String.valueOf(jobInfo.getSubmitTime())); 
+    job.put(JobKeys.LAUNCH_TIME, String.valueOf(jobInfo.getLaunchTime())); 
+    job.put(JobKeys.TOTAL_MAPS, String.valueOf(jobInfo.getTotalMaps())); 
+    job.put(JobKeys.TOTAL_REDUCES, String.valueOf(jobInfo.getTotalReduces())); 
+    job.put(JobKeys.FAILED_MAPS, String.valueOf(jobInfo.getFailedMaps())); 
+    job.put(JobKeys.FAILED_REDUCES, String.valueOf(jobInfo.getFailedReduces())); 
+    job.put(JobKeys.FINISHED_MAPS, String.valueOf(jobInfo.getFinishedMaps())); 
+    job.put(JobKeys.FINISHED_REDUCES, 
+        String.valueOf(jobInfo.getFinishedReduces())); 
+    job.put(JobKeys.STATUS, jobInfo.getJobStatus().toString()); 
+    job.put(JobKeys.JOB_PRIORITY, jobInfo.getPriority()); 
+    parseAndAddJobCounters(job, jobInfo.getCounters().toString());
   }
   
+  
   /*
    * Parse and add the job counters
    */

Modified: hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/commands_manual.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/commands_manual.xml?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/commands_manual.xml (original)
+++ hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/commands_manual.xml Thu Sep 17 05:04:21 2009
@@ -238,7 +238,7 @@
 					<code>Usage: hadoop job [</code><a href="commands_manual.html#Generic+Options">GENERIC_OPTIONS</a><code>] 
 					[-submit &lt;job-file&gt;] | [-status &lt;job-id&gt;] | 
 					[-counter &lt;job-id&gt; &lt;group-name&gt; &lt;counter-name&gt;] | [-kill &lt;job-id&gt;] | 
-					[-events &lt;job-id&gt; &lt;from-event-#&gt; &lt;#-of-events&gt;] | [-history [all] &lt;jobOutputDir&gt;] |
+					[-events &lt;job-id&gt; &lt;from-event-#&gt; &lt;#-of-events&gt;] | [-history [all] &lt;historyFile&gt;] |
 					[-list [all]] | [-kill-task &lt;task-id&gt;] | [-fail-task &lt;task-id&gt;] | 
           [-set-priority &lt;job-id&gt; &lt;priority&gt;]</code>
 				</p>
@@ -266,8 +266,8 @@
 			            <td>Prints the events' details received by jobtracker for the given range.</td>
 			           </tr>
 			           <tr>
-			          	<td><code>-history [all] &lt;jobOutputDir&gt;</code></td>
-			            <td>-history &lt;jobOutputDir&gt; prints job details, failed and killed tip details. More details 
+			          	<td><code>-history [all] &lt;historyFile&gt;</code></td>
+			            <td>-history &lt;historyFile&gt; prints job details, failed and killed tip details. More details 
 			            about the job such as successful tasks and task attempts made for each task can be viewed by 
 			            specifying the [all] option. </td>
 			           </tr>

Modified: hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/mapred_tutorial.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/mapred_tutorial.xml?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/mapred_tutorial.xml (original)
+++ hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/mapred_tutorial.xml Thu Sep 17 05:04:21 2009
@@ -1462,23 +1462,16 @@
             monitoring it's status.
           </li>
         </ol>
-        <p> Job history files are also logged to user specified directory
-        <code>hadoop.job.history.user.location</code> 
-        which defaults to job output directory. The files are stored in
-        "_logs/history/" in the specified directory. Hence, by default they
-        will be in mapred.output.dir/_logs/history. User can stop
-        logging by giving the value <code>none</code> for 
-        <code>hadoop.job.history.user.location</code></p>
 
-        <p> User can view the history logs summary in specified directory 
+        <p> User can view the history log summary for a given history file
         using the following command <br/>
-        <code>$ bin/hadoop job -history output-dir</code><br/> 
+        <code>$ bin/hadoop job -history history-file</code><br/> 
         This command will print job details, failed and killed tip
         details. <br/>
         More details about the job such as successful tasks and 
         task attempts made for each task can be viewed using the  
         following command <br/>
-       <code>$ bin/hadoop job -history all output-dir</code><br/></p> 
+       <code>$ bin/hadoop job -history all history-file</code><br/></p> 
             
         <p> User can use 
         <a href="ext:api/org/apache/hadoop/mapred/outputlogfilter">OutputLogFilter</a>

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JSPUtil.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JSPUtil.java?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JSPUtil.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JSPUtil.java Thu Sep 17 05:04:21 2009
@@ -32,7 +32,8 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.mapred.JobHistory.JobInfo;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
 import org.apache.hadoop.util.ServletUtil;
 import org.apache.hadoop.util.StringUtils;
 
@@ -268,10 +269,10 @@
     synchronized(jobHistoryCache) {
       JobInfo jobInfo = jobHistoryCache.remove(jobid);
       if (jobInfo == null) {
-        jobInfo = new JobHistory.JobInfo(jobid);
+        JobHistoryParser parser = new JobHistoryParser(fs, logFile);
+        jobInfo = parser.parse();
         LOG.info("Loading Job History file "+jobid + ".   Cache size is " +
             jobHistoryCache.size());
-        DefaultJobHistoryParser.parseJobTasks( logFile, jobInfo, fs) ; 
       }
       jobHistoryCache.put(jobid, jobInfo);
       if (jobHistoryCache.size() > CACHE_SIZE) {

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobClient.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobClient.java?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobClient.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobClient.java Thu Sep 17 05:04:21 2009
@@ -67,6 +67,7 @@
 import org.apache.hadoop.mapred.ClusterStatus.BlackListInfo;
 import org.apache.hadoop.mapred.Counters.Counter;
 import org.apache.hadoop.mapred.Counters.Group;
+import org.apache.hadoop.mapreduce.jobhistory.HistoryViewer;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UnixUserGroupInformation;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -1539,7 +1540,7 @@
     } else if ("-events".equals(cmd)) {
       System.err.println(prefix + "[" + cmd + " <job-id> <from-event-#> <#-of-events>]");
     } else if ("-history".equals(cmd)) {
-      System.err.println(prefix + "[" + cmd + " <jobOutputDir>]");
+      System.err.println(prefix + "[" + cmd + " <jobHistoryFile>]");
     } else if ("-list".equals(cmd)) {
       System.err.println(prefix + "[" + cmd + " [all]]");
     } else if ("-kill-task".equals(cmd) || "-fail-task".equals(cmd)) {
@@ -1567,7 +1568,7 @@
                                       "Valid values for priorities are: " +
                                       jobPriorityValues + "\n");
       System.err.printf("\t[-events <job-id> <from-event-#> <#-of-events>]\n");
-      System.err.printf("\t[-history <jobOutputDir>]\n");
+      System.err.printf("\t[-history <jobHistoryFile>]\n");
       System.err.printf("\t[-list [all]]\n");
       System.err.printf("\t[-list-active-trackers]\n");
       System.err.printf("\t[-list-blacklisted-trackers]\n");
@@ -1590,7 +1591,7 @@
     String submitJobFile = null;
     String jobid = null;
     String taskid = null;
-    String outputDir = null;
+    String historyFile = null;
     String counterGroupName = null;
     String counterName = null;
     String newPriority = null;
@@ -1673,9 +1674,9 @@
       viewHistory = true;
       if (argv.length == 3 && "all".equals(argv[1])) {
          viewAllHistory = true;
-         outputDir = argv[2];
+         historyFile = argv[2];
       } else {
-         outputDir = argv[1];
+         historyFile = argv[1];
       }
     } else if ("-list".equals(cmd)) {
       if (argv.length != 1 && !(argv.length == 2 && "all".equals(argv[1]))) {
@@ -1793,7 +1794,7 @@
           exitCode = 0;
         } 
       } else if (viewHistory) {
-        viewHistory(outputDir, viewAllHistory);
+        viewHistory(historyFile, viewAllHistory);
         exitCode = 0;
       } else if (listEvents) {
         listEvents(JobID.forName(jobid), fromEvent, nEvents);
@@ -1835,9 +1836,9 @@
     return exitCode;
   }
 
-  private void viewHistory(String outputDir, boolean all) 
+  private void viewHistory(String historyFile, boolean all) 
     throws IOException {
-    HistoryViewer historyViewer = new HistoryViewer(outputDir,
+    HistoryViewer historyViewer = new HistoryViewer(historyFile,
                                         getConf(), all);
     historyViewer.print();
   }

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java Thu Sep 17 05:04:21 2009
@@ -18,13 +18,15 @@
 package org.apache.hadoop.mapred;
 
 import java.io.DataInputStream;
+import java.io.File;
+import java.io.FileOutputStream;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Comparator;
-import java.util.HashSet;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.IdentityHashMap;
 import java.util.Iterator;
 import java.util.LinkedHashSet;
@@ -41,10 +43,25 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.JobHistory.Values;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.JobCounter;
 import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.jobhistory.JobFinishedEvent;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistory;
+import org.apache.hadoop.mapreduce.jobhistory.JobInfoChangeEvent;
+import org.apache.hadoop.mapreduce.jobhistory.JobInitedEvent;
+import org.apache.hadoop.mapreduce.jobhistory.JobPriorityChangeEvent;
+import org.apache.hadoop.mapreduce.jobhistory.JobStatusChangedEvent;
+import org.apache.hadoop.mapreduce.jobhistory.JobSubmittedEvent;
+import org.apache.hadoop.mapreduce.jobhistory.JobUnsuccessfulCompletionEvent;
+import org.apache.hadoop.mapreduce.jobhistory.MapAttemptFinishedEvent;
+import org.apache.hadoop.mapreduce.jobhistory.ReduceAttemptFinishedEvent;
+import org.apache.hadoop.mapreduce.jobhistory.TaskAttemptStartedEvent;
+import org.apache.hadoop.mapreduce.jobhistory.TaskAttemptUnsuccessfulCompletionEvent;
+import org.apache.hadoop.mapreduce.jobhistory.TaskFailedEvent;
+import org.apache.hadoop.mapreduce.jobhistory.TaskFinishedEvent;
+import org.apache.hadoop.mapreduce.jobhistory.TaskStartedEvent;
+import org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker;
 import org.apache.hadoop.metrics.MetricsContext;
 import org.apache.hadoop.metrics.MetricsRecord;
 import org.apache.hadoop.metrics.MetricsUtil;
@@ -52,7 +69,6 @@
 import org.apache.hadoop.net.NetworkTopology;
 import org.apache.hadoop.net.Node;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker;
 
 /*************************************************************
  * JobInProgress maintains all the info for keeping
@@ -119,6 +135,8 @@
 
   JobPriority priority = JobPriority.NORMAL;
   protected JobTracker jobtracker;
+  
+  JobHistory jobHistory;
 
   // NetworkTopology Node to the set of TIPs
   Map<Node, List<TaskInProgress>> nonRunningMapCache;
@@ -314,6 +332,9 @@
         "mapred.speculative.execution.slowNodeThreshold",1.0f);
     this.jobSetupCleanupNeeded = conf.getBoolean(
         "mapred.committer.job.setup.cleanup.needed", true);
+    if (tracker != null) { // Some mock tests have null tracker
+      this.jobHistory = tracker.getJobHistory();
+    }
   }
   
   /**
@@ -327,7 +348,7 @@
     String url = "http://" + jobtracker.getJobTrackerMachine() + ":" 
         + jobtracker.getInfoPort() + "/jobdetails.jsp?jobid=" + jobid;
     this.jobtracker = jobtracker;
-    
+    this.jobHistory = jobtracker.getJobHistory();
     this.startTime = System.currentTimeMillis();
     
     this.localFs = jobtracker.getLocalFileSystem();
@@ -540,7 +561,7 @@
 
     LOG.info("Initializing " + jobId);
 
-    logToJobHistory();
+    logSubmissionToJobHistory();
     
     // log the job priority
     setPriority(this.priority);
@@ -588,8 +609,13 @@
     }
     
     tasksInited.set(true);
-    JobHistory.JobInfo.logInited(profile.getJobID(), this.launchTime, 
-                                 numMapTasks, numReduceTasks);
+    JobInitedEvent jie = new JobInitedEvent(
+        profile.getJobID(),  this.launchTime,
+        numMapTasks, numReduceTasks,
+        JobStatus.getJobRunState(JobStatus.PREP));
+    
+    jobHistory.logEvent(jie, jobId);
+   
   }
 
   // Returns true if the job is empty (0 maps, 0 reduces and no setup-cleanup)
@@ -612,10 +638,18 @@
     setupComplete();
   }
 
-  void logToJobHistory() throws IOException {
+  void logSubmissionToJobHistory() throws IOException {
     // log job info
-    JobHistory.JobInfo.logSubmitted(getJobID(), conf, jobFile.toString(), 
-        this.startTime);
+    String username = conf.getUser();
+    if (username == null) { username = ""; }
+    String jobname = conf.getJobName();
+    if (jobname == null) { jobname = ""; }
+    setUpLocalizedJobConf(conf, jobId);
+    jobHistory.setupEventWriter(jobId, conf);
+    JobSubmittedEvent jse = new JobSubmittedEvent(jobId, jobname, username,
+        this.startTime, jobFile.toString());
+    jobHistory.logEvent(jse, jobId);
+    
   }
 
   JobClient.RawSplit[] createSplits() throws IOException {
@@ -708,7 +742,10 @@
     status.setSetupProgress(1.0f);
     if (this.status.getRunState() == JobStatus.PREP) {
       this.status.setRunState(JobStatus.RUNNING);
-      JobHistory.JobInfo.logStarted(profile.getJobID());
+      JobStatusChangedEvent jse = 
+        new JobStatusChangedEvent(profile.getJobID(),
+         JobStatus.getJobRunState(JobStatus.RUNNING));
+      jobHistory.logEvent(jse, profile.getJobID());
     }
   }
 
@@ -756,6 +793,7 @@
     return numReduceTasks - runningReduceTasks - failedReduceTIPs - 
     finishedReduceTasks + speculativeReduceTasks;
   }
+ 
   public synchronized int getNumSlotsPerTask(TaskType taskType) {
     if (taskType == TaskType.MAP) {
       return numSlotsPerMap;
@@ -776,7 +814,11 @@
       this.priority = priority;
       status.setJobPriority(priority);
       // log and change to the job's priority
-      JobHistory.JobInfo.logJobPriority(jobId, priority);
+      JobPriorityChangeEvent prEvent = 
+        new JobPriorityChangeEvent(jobId, priority);
+       
+      jobHistory.logEvent(prEvent, jobId);
+      
     }
   }
 
@@ -785,7 +827,11 @@
     // log and change to the job's start/launch time
     this.startTime = startTime;
     this.launchTime = launchTime;
-    JobHistory.JobInfo.logJobInfo(jobId, startTime, launchTime);
+    JobInfoChangeEvent event = 
+      new JobInfoChangeEvent(jobId, startTime, launchTime);
+     
+    jobHistory.logEvent(event, jobId);
+    
   }
 
   /**
@@ -1465,18 +1511,18 @@
     final JobTrackerInstrumentation metrics = jobtracker.getInstrumentation();
 
     // keeping the earlier ordering intact
-    String name;
+    TaskType name;
     String splits = "";
     Enum counter = null;
     if (tip.isJobSetupTask()) {
       launchedSetup = true;
-      name = Values.SETUP.name();
+      name = TaskType.JOB_SETUP;
     } else if (tip.isJobCleanupTask()) {
       launchedCleanup = true;
-      name = Values.CLEANUP.name();
+      name = TaskType.JOB_CLEANUP;
     } else if (tip.isMapTask()) {
       ++runningMapTasks;
-      name = Values.MAP.name();
+      name = TaskType.MAP;
       counter = JobCounter.TOTAL_LAUNCHED_MAPS;
       splits = tip.getSplitNodes();
       if (tip.isSpeculating()) {
@@ -1487,7 +1533,7 @@
       metrics.launchMap(id);
     } else {
       ++runningReduceTasks;
-      name = Values.REDUCE.name();
+      name = TaskType.REDUCE;
       counter = JobCounter.TOTAL_LAUNCHED_REDUCES;
       if (tip.isSpeculating()) {
         speculativeReduceTasks++;
@@ -1499,8 +1545,12 @@
     // Note that the logs are for the scheduled tasks only. Tasks that join on 
     // restart has already their logs in place.
     if (tip.isFirstAttempt(id)) {
-      JobHistory.Task.logStarted(tip.getTIPId(), name,
-                                 tip.getExecStartTime(), splits);
+      TaskStartedEvent tse = new TaskStartedEvent(tip.getTIPId(), 
+          tip.getExecStartTime(),
+          name, splits);
+      
+      jobHistory.logEvent(tse, tip.getJob().jobId);
+      
     }
     if (!tip.isJobSetupTask() && !tip.isJobCleanupTask()) {
       jobCounters.incrCounter(counter, 1);
@@ -1540,7 +1590,7 @@
     }
   }
     
-  static String convertTrackerNameToHostName(String trackerName) {
+  public static String convertTrackerNameToHostName(String trackerName) {
     // Ugly!
     // Convert the trackerName to it's host name
     int indexOfColon = trackerName.indexOf(":");
@@ -2446,35 +2496,45 @@
     TaskTrackerStatus ttStatus = 
       this.jobtracker.getTaskTrackerStatus(status.getTaskTracker());
     String trackerHostname = jobtracker.getNode(ttStatus.getHost()).toString();
-    String taskType = getTaskType(tip);
+    TaskType taskType = getTaskType(tip);
+
+    TaskAttemptStartedEvent tse = new TaskAttemptStartedEvent(
+        status.getTaskID(), taskType, status.getStartTime(), 
+        status.getTaskTracker(),  ttStatus.getHttpPort());
+    
+    jobHistory.logEvent(tse, status.getTaskID().getJobID());
+    
+
     if (status.getIsMap()){
-      JobHistory.MapAttempt.logStarted(status.getTaskID(), status.getStartTime(), 
-                                       status.getTaskTracker(), 
-                                       ttStatus.getHttpPort(), 
-                                       taskType); 
-      JobHistory.MapAttempt.logFinished(status.getTaskID(),
-                                        status.getMapFinishTime(),
-                                        status.getFinishTime(), 
-                                        trackerHostname, taskType,
-                                        status.getStateString(), 
-                                        status.getCounters()); 
+      MapAttemptFinishedEvent mfe = new MapAttemptFinishedEvent(
+          status.getTaskID(), taskType, TaskStatus.State.SUCCEEDED.toString(),
+          status.getMapFinishTime(),
+          status.getFinishTime(),  trackerHostname,
+          status.getStateString(), 
+          new org.apache.hadoop.mapreduce.Counters(status.getCounters()));
+      
+      jobHistory.logEvent(mfe,  status.getTaskID().getJobID());
+      
     }else{
-      JobHistory.ReduceAttempt.logStarted( status.getTaskID(), status.getStartTime(), 
-                                          status.getTaskTracker(),
-                                          ttStatus.getHttpPort(), 
-                                          taskType); 
-      JobHistory.ReduceAttempt.logFinished(status.getTaskID(), status.getShuffleFinishTime(),
-                                           status.getSortFinishTime(), status.getFinishTime(), 
-                                           trackerHostname, 
-                                           taskType,
-                                           status.getStateString(), 
-                                           status.getCounters()); 
-    }
-    JobHistory.Task.logFinished(tip.getTIPId(), 
-                                taskType,
-                                tip.getExecFinishTime(),
-                                status.getCounters()); 
-        
+      ReduceAttemptFinishedEvent rfe = new ReduceAttemptFinishedEvent(
+          status.getTaskID(), taskType, TaskStatus.State.SUCCEEDED.toString(), 
+          status.getShuffleFinishTime(),
+          status.getSortFinishTime(), status.getFinishTime(),
+          trackerHostname, status.getStateString(),
+          new org.apache.hadoop.mapreduce.Counters(status.getCounters()));
+      
+      jobHistory.logEvent(rfe,  status.getTaskID().getJobID());
+      
+    }
+
+    TaskFinishedEvent tfe = new TaskFinishedEvent(tip.getTIPId(),
+        tip.getExecFinishTime(), taskType, 
+        TaskStatus.State.SUCCEEDED.toString(),
+        new org.apache.hadoop.mapreduce.Counters(status.getCounters()));
+    
+    jobHistory.logEvent(tfe, tip.getJob().getJobID());
+    
+   
     if (tip.isJobSetupTask()) {
       // setup task has finished. kill the extra setup tip
       killSetupTip(!tip.isMapTask());
@@ -2610,10 +2670,16 @@
       JobSummary.logJobSummary(this, jobtracker.getClusterStatus(false));
 
       // Log job-history
-      JobHistory.JobInfo.logFinished(this.status.getJobID(), finishTime, 
-                                     this.finishedMapTasks, 
-                                     this.finishedReduceTasks, failedMapTasks, 
-                                     failedReduceTasks, getCounters());
+      JobFinishedEvent jfe = 
+        new JobFinishedEvent(this.status.getJobID(),
+          this.finishTime,
+          this.finishedMapTasks,this.finishedReduceTasks, failedMapTasks, 
+          failedReduceTasks, 
+          new org.apache.hadoop.mapreduce.Counters(getCounters()));
+      
+      jobHistory.logEvent(jfe, this.status.getJobID());
+      jobHistory.closeWriter(this.status.getJobID());
+
       // Note that finalize will close the job history handles which garbage collect
       // might try to finalize
       garbageCollect();
@@ -2634,27 +2700,24 @@
 
       if (jobTerminationState == JobStatus.FAILED) {
         this.status.setRunState(JobStatus.FAILED);
-        
-        // Log the job summary
-        JobSummary.logJobSummary(this, jobtracker.getClusterStatus(false));
-
-        // Log to job-history
-        JobHistory.JobInfo.logFailed(this.status.getJobID(), finishTime, 
-                                     this.finishedMapTasks, 
-                                     this.finishedReduceTasks);
       } else {
         this.status.setRunState(JobStatus.KILLED);
+      }
+      // Log the job summary
+      JobSummary.logJobSummary(this, jobtracker.getClusterStatus(false));
 
-        // Log the job summary
-        JobSummary.logJobSummary(this, jobtracker.getClusterStatus(false));
+      JobUnsuccessfulCompletionEvent failedEvent = 
+        new JobUnsuccessfulCompletionEvent(this.status.getJobID(),
+            finishTime,
+            this.finishedMapTasks, 
+            this.finishedReduceTasks,
+            JobStatus.getJobRunState(jobTerminationState));
+      
+      jobHistory.logEvent(failedEvent, this.status.getJobID());
+      jobHistory.closeWriter(this.status.getJobID());
 
-        // Log to job-history
-        JobHistory.JobInfo.logKilled(this.status.getJobID(), finishTime, 
-                                     this.finishedMapTasks, 
-                                     this.finishedReduceTasks);
-      }
       garbageCollect();
-      
+
       jobtracker.getInstrumentation().terminateJob(
           this.conf, this.status.getJobID());
     }
@@ -2882,28 +2945,18 @@
     List<String> taskDiagnosticInfo = tip.getDiagnosticInfo(taskid);
     String diagInfo = taskDiagnosticInfo == null ? "" :
       StringUtils.arrayToString(taskDiagnosticInfo.toArray(new String[0]));
-    String taskType = getTaskType(tip);
-    if (taskStatus.getIsMap()) {
-      JobHistory.MapAttempt.logStarted(taskid, startTime, 
-        taskTrackerName, taskTrackerPort, taskType);
-      if (taskStatus.getRunState() == TaskStatus.State.FAILED) {
-        JobHistory.MapAttempt.logFailed(taskid, finishTime,
-          taskTrackerHostName, diagInfo, taskType);
-      } else {
-        JobHistory.MapAttempt.logKilled(taskid, finishTime,
-          taskTrackerHostName, diagInfo, taskType);
-      }
-    } else {
-      JobHistory.ReduceAttempt.logStarted(taskid, startTime, 
-        taskTrackerName, taskTrackerPort, taskType);
-      if (taskStatus.getRunState() == TaskStatus.State.FAILED) {
-        JobHistory.ReduceAttempt.logFailed(taskid, finishTime,
-          taskTrackerHostName, diagInfo, taskType);
-      } else {
-        JobHistory.ReduceAttempt.logKilled(taskid, finishTime,
-          taskTrackerHostName, diagInfo, taskType);
-      }
-    }
+    TaskType taskType = getTaskType(tip);
+    TaskAttemptStartedEvent tse = new TaskAttemptStartedEvent(
+        taskid, taskType, startTime, taskTrackerName, taskTrackerPort);
+    
+    jobHistory.logEvent(tse, taskid.getJobID());
+   
+    TaskAttemptUnsuccessfulCompletionEvent tue = 
+      new TaskAttemptUnsuccessfulCompletionEvent(taskid, 
+          taskType, taskStatus.getRunState().toString(),
+          finishTime, 
+          taskTrackerHostName, diagInfo);
+    jobHistory.logEvent(tue, taskid.getJobID());
         
     // After this, try to assign tasks with the one after this, so that
     // the failed task goes to the end of the list.
@@ -2944,10 +2997,13 @@
       
       if (killJob) {
         LOG.info("Aborting job " + profile.getJobID());
-        JobHistory.Task.logFailed(tip.getTIPId(), 
-                                  taskType,  
-                                  finishTime, 
-                                  diagInfo);
+        TaskFailedEvent tfe = 
+          new TaskFailedEvent(tip.getTIPId(), finishTime, taskType, diagInfo,
+              TaskStatus.State.FAILED.toString(),
+              null);
+        
+        jobHistory.logEvent(tfe, tip.getJob().getJobID());
+        
         if (tip.isJobCleanupTask()) {
           // kill the other tip
           if (tip.isMapTask()) {
@@ -3031,9 +3087,14 @@
     updateTaskStatus(tip, status);
     boolean isComplete = tip.isComplete();
     if (wasComplete && !isComplete) { // mark a successful tip as failed
-      String taskType = getTaskType(tip);
-      JobHistory.Task.logFailed(tip.getTIPId(), taskType, 
-                                tip.getExecFinishTime(), reason, taskid);
+      TaskType taskType = getTaskType(tip);
+      TaskFailedEvent tfe = 
+        new TaskFailedEvent(tip.getTIPId(), tip.getExecFinishTime(), taskType,
+            reason, TaskStatus.State.FAILED.toString(),
+            taskid);
+      
+        jobHistory.logEvent(tfe, tip.getJob().getJobID());
+      
     }
   }
        
@@ -3221,15 +3282,15 @@
   /**
    * Get the task type for logging it to {@link JobHistory}.
    */
-  private String getTaskType(TaskInProgress tip) {
+  private TaskType getTaskType(TaskInProgress tip) {
     if (tip.isJobCleanupTask()) {
-      return Values.CLEANUP.name();
+      return TaskType.JOB_CLEANUP;
     } else if (tip.isJobSetupTask()) {
-      return Values.SETUP.name();
+      return TaskType.JOB_SETUP;
     } else if (tip.isMapTask()) {
-      return Values.MAP.name();
+      return TaskType.MAP;
     } else {
-      return Values.REDUCE.name();
+      return TaskType.REDUCE;
     }
   }
   
@@ -3326,4 +3387,47 @@
       );
     }
   }
+  
+  /**
+   * Creates the localized copy of job conf
+   * @param jobConf
+   * @param id
+   */
+  void setUpLocalizedJobConf(JobConf jobConf, 
+      org.apache.hadoop.mapreduce.JobID id) {
+    String localJobFilePath = jobtracker.getLocalJobFilePath(id); 
+    File localJobFile = new File(localJobFilePath);
+    FileOutputStream jobOut = null;
+    try {
+      jobOut = new FileOutputStream(localJobFile);
+      jobConf.writeXml(jobOut);
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Job conf for " + id + " stored at " 
+            + localJobFile.getAbsolutePath());
+      }
+    } catch (IOException ioe) {
+      LOG.error("Failed to store job conf on the local filesystem ", ioe);
+    } finally {
+      if (jobOut != null) {
+        try {
+          jobOut.close();
+        } catch (IOException ie) {
+          LOG.info("Failed to close the job configuration file " 
+              + StringUtils.stringifyException(ie));
+        }
+      }
+    }
+  }
+
+  /**
+   * Deletes localized copy of job conf
+   */
+  void cleanupLocalizedJobConf(org.apache.hadoop.mapreduce.JobID id) {
+    String localJobFilePath = jobtracker.getLocalJobFilePath(id);
+    File f = new File (localJobFilePath);
+    LOG.info("Deleting localized job conf at " + f);
+    if (!f.delete()) {
+      LOG.debug("Failed to delete file " + f);
+    }
+  }
 }

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java Thu Sep 17 05:04:21 2009
@@ -18,7 +18,9 @@
 package org.apache.hadoop.mapred;
 
 
+import java.io.File;
 import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.PrintWriter;
 import java.io.UnsupportedEncodingException;
@@ -71,6 +73,7 @@
 import org.apache.hadoop.mapred.JobStatusChangeEvent.EventType;
 import org.apache.hadoop.mapred.JobTrackerStatistics.TaskTrackerStat;
 import org.apache.hadoop.mapred.TaskTrackerStatus.TaskTrackerHealthStatus;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistory;
 import org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker;
 import org.apache.hadoop.net.DNSToSwitchMapping;
 import org.apache.hadoop.net.NetUtils;
@@ -149,6 +152,8 @@
   
   static final Clock DEFAULT_CLOCK = new Clock();
 
+  private JobHistory jobHistory = null;
+
   /**
    * A client tried to submit a job before the Job Tracker was ready.
    */
@@ -175,6 +180,11 @@
   }
   
   /**
+   * Return the JT's job history handle.
+   * @return the jobhistory handle
+   */
+  JobHistory getJobHistory() { return jobHistory; }
+  /**
    * Start the JobTracker with given configuration.
    * 
    * The conf will be modified to reflect the actual ports on which 
@@ -418,7 +428,7 @@
     }
   }
 
-  synchronized void retireJob(JobID jobid, String historyFile) {
+  public synchronized void retireJob(JobID jobid, String historyFile) {
     synchronized (jobs) {
       JobInProgress job = jobs.get(jobid);
       if (job != null) {
@@ -438,7 +448,7 @@
         }
         status.setTrackingUrl(trackingUrl);
         // clean up job files from the local disk
-        JobHistory.JobInfo.cleanupJob(job.getProfile().getJobID());
+        job.cleanupLocalizedJobConf(job.getProfile().getJobID());
 
         //this configuration is primarily for testing
         //test cases can set this to false to validate job data structures on 
@@ -1346,8 +1356,8 @@
         tmpInfoPort == 0, conf);
     infoServer.setAttribute("job.tracker", this);
     // initialize history parameters.
-    boolean historyInitialized = JobHistory.init(this, conf, this.localMachine,
-                                                 this.startTime);
+    jobHistory = new JobHistory();
+    jobHistory.init(this, conf, this.localMachine, this.startTime);
     
     infoServer.addServlet("reducegraph", "/taskgraph", TaskGraphServlet.class);
     infoServer.start();
@@ -1448,14 +1458,12 @@
     jobConf.deleteLocalFiles(SUBDIR);
 
     // Initialize history DONE folder
-    if (historyInitialized) {
-      JobHistory.initDone(conf, fs);
-      String historyLogDir = 
-        JobHistory.getCompletedJobHistoryLocation().toString();
-      infoServer.setAttribute("historyLogDir", historyLogDir);
-      FileSystem historyFS = new Path(historyLogDir).getFileSystem(conf);
-      infoServer.setAttribute("fileSys", historyFS);
-    }
+    jobHistory.initDone(conf, fs);
+    String historyLogDir = 
+      jobHistory.getCompletedJobHistoryLocation().toString();
+    infoServer.setAttribute("historyLogDir", historyLogDir);
+    FileSystem historyFS = new Path(historyLogDir).getFileSystem(conf);
+    infoServer.setAttribute("fileSys", historyFS);
 
     this.dnsToSwitchMapping = ReflectionUtils.newInstance(
         conf.getClass("topology.node.switch.mapping.impl", ScriptBasedMapping.class,
@@ -1645,6 +1653,11 @@
         ex.printStackTrace();
       }
     }
+    
+    if (jobHistory != null) {
+      jobHistory.shutDown();
+    }
+    
     LOG.info("stopped all jobtracker services");
     return;
   }
@@ -1830,7 +1843,7 @@
 
     // mark the job as completed
     try {
-      JobHistory.JobInfo.markCompleted(id);
+      jobHistory.markCompleted(id);
     } catch (IOException ioe) {
       LOG.info("Failed to mark job " + id + " as completed!", ioe);
     }
@@ -3541,14 +3554,6 @@
     return hostsReader.getExcludedHosts();
   }
 
-  /**
-   * Get the localized job file path on the job trackers local file system
-   * @param jobId id of the job
-   * @return the path of the job conf file on the local file system
-   */
-  public static String getLocalJobFilePath(JobID jobId){
-    return JobHistory.JobInfo.getLocalJobFilePath(jobId);
-  }
   ////////////////////////////////////////////////////////////
   // main()
   ////////////////////////////////////////////////////////////
@@ -3831,5 +3836,13 @@
     faultyTrackers.incrementFaults(hostName);
   }
   
-  
+  /**
+   * Get the path of the locally stored job file
+   * @param jobId id of the job
+   * @return the path of the job file on the local file system 
+   */
+  String getLocalJobFilePath(org.apache.hadoop.mapreduce.JobID jobId){
+    return System.getProperty("hadoop.log.dir") + 
+           File.separator + jobId + "_conf.xml";
+  }
 }

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java Thu Sep 17 05:04:21 2009
@@ -36,6 +36,8 @@
 import org.apache.hadoop.mapred.JobInProgress.DataStatistics;
 import org.apache.hadoop.mapred.SortedRanges.Range;
 import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistory;
+import org.apache.hadoop.mapreduce.jobhistory.TaskUpdatedEvent;
 import org.apache.hadoop.net.Node;
 
 
@@ -67,6 +69,7 @@
   private int numMaps;
   private int partition;
   private JobTracker jobtracker;
+  private JobHistory jobHistory;
   private TaskID id;
   private JobInProgress job;
   private final int numSlotsRequired;
@@ -151,6 +154,9 @@
     this.numSlotsRequired = numSlotsRequired;
     setMaxTaskAttempts();
     init(jobid);
+    if (jobtracker != null) {
+      this.jobHistory = jobtracker.getJobHistory();
+    }
   }
         
   /**
@@ -170,6 +176,9 @@
     this.numSlotsRequired = numSlotsRequired;
     setMaxTaskAttempts();
     init(jobid);
+    if (jobtracker != null) {
+      this.jobHistory = jobtracker.getJobHistory();
+    }
   }
   
   /**
@@ -287,7 +296,8 @@
    */
   public void setExecFinishTime(long finishTime) {
     execFinishTime = finishTime;
-    JobHistory.Task.logUpdates(id, execFinishTime); // log the update
+    TaskUpdatedEvent tue = new TaskUpdatedEvent(id, execFinishTime);
+    jobHistory.logEvent(tue, id.getJobID());
   }
   
   /**

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/CounterGroup.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/CounterGroup.java?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/CounterGroup.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/CounterGroup.java Thu Sep 17 05:04:21 2009
@@ -86,12 +86,12 @@
   }
 
   /**
-   * Internal to find a counter in a group.
+   * Find a counter in a group.
    * @param counterName the name of the counter
    * @param displayName the display name of the counter
    * @return the counter that was found or added
    */
-  protected Counter findCounter(String counterName, String displayName) {
+  public Counter findCounter(String counterName, String displayName) {
     Counter result = counters.get(counterName);
     if (result == null) {
       result = new Counter(counterName, displayName);

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/Counters.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/Counters.java?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/Counters.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/Counters.java Thu Sep 17 05:04:21 2009
@@ -25,7 +25,12 @@
   public Counters() {
   }
   
-  Counters(org.apache.hadoop.mapred.Counters counters) {
+  /**
+   * Utility method to  create a Counters object from the 
+   * org.apache.hadoop.mapred counters
+   * @param counters
+   */
+  public Counters(org.apache.hadoop.mapred.Counters counters) {
     for(org.apache.hadoop.mapred.Counters.Group group: counters) {
       String name = group.getName();
       CounterGroup newGroup = new CounterGroup(name, group.getDisplayName());

Added: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/EventReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/EventReader.java?rev=816052&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/EventReader.java (added)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/EventReader.java Thu Sep 17 05:04:21 2009
@@ -0,0 +1,218 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.jobhistory;
+
+import java.io.IOException;
+
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.Counter;
+import org.apache.hadoop.mapreduce.CounterGroup;
+import org.apache.hadoop.mapreduce.Counters;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonToken;
+
+public class EventReader {
+
+  static final JsonFactory FACTORY = new JsonFactory();
+
+  enum GroupFields { ID, NAME, LIST }
+  enum CounterFields { ID, NAME, VALUE }
+
+  private final JsonParser parser;
+  private FSDataInputStream in;
+  
+  private String version = null;
+
+  /**
+   * Create a new Event Reader
+   * @param fs
+   * @param name
+   * @throws IOException
+   */
+  public EventReader(FileSystem fs, Path name) throws IOException {
+    this (fs.open(name));
+  }
+
+  /**
+   * Create a new Event Reader
+   * @param in
+   * @throws IOException
+   */
+  public EventReader(FSDataInputStream in) throws IOException {
+    this.in = in;
+    parser = FACTORY.createJsonParser(in);
+    readVersionInfo();
+  }
+
+  private void readVersionInfo() throws IOException {
+    if (parser.nextToken() != JsonToken.START_OBJECT) {
+      throw new IOException("Unexpected Token while reading");
+    }
+    
+    parser.nextToken(); // Key
+    parser.nextToken(); // Value
+    
+    this.version = parser.getText();
+    
+    parser.nextToken(); // Consume the End Object
+  }
+  
+  /**
+   * Return the current history version
+   */
+  public String getHistoryVersion() { return version; }
+  
+  /**
+   * Get the next event from the stream
+   * @return the next event
+   * @throws IOException
+   */
+  public HistoryEvent getNextEvent() throws IOException {
+    EventType type = getHistoryEventType();
+
+    if (type == null) {
+      return null;
+    }
+
+    Class<? extends HistoryEvent> clazz = type.getKlass();
+
+    if (clazz == null) {
+      throw new IOException("CLass not known for " + type);
+    }
+
+    HistoryEvent ev = null;
+    try {
+      ev = clazz.newInstance();
+    } catch (Exception e) {
+      e.printStackTrace();
+      throw new IOException("Error Instantiating new object");
+    }
+
+    ev.readFields(parser);
+    return ev;
+  }
+
+  /**
+   * Close the Event reader
+   * @throws IOException
+   */
+  public void close() throws IOException {
+    if (in != null) {
+      in.close();
+    }
+    in = null;
+  }
+
+ 
+  /**
+   * Read the next JSON Object  to identify the event type.
+   * @param jp
+   * @return EventType
+   * @throws IOException
+   */
+  private EventType getHistoryEventType()
+  throws IOException {
+
+    if (parser.nextToken() == null) { // Verify the Start Object
+      return null; 
+    }
+
+    parser.nextToken();// Get the Event type
+
+    String fieldname = parser.getCurrentName();
+
+    if (!"EVENT_TYPE".equals(fieldname)) {
+      throw new IOException("Unexpected event type: " + fieldname);
+    }
+
+    parser.nextToken(); // Go to the value
+    String type = parser.getText();
+
+    parser.nextToken(); // Consume the end object
+
+    return Enum.valueOf(EventType.class, type);
+  }
+
+
+  static Counters readCounters(JsonParser jp) throws IOException {
+    Counters counters = new Counters();
+    while (jp.nextToken() !=JsonToken.END_ARRAY) {
+      readOneGroup(counters, jp);
+    }
+    return counters;
+  }
+
+  static void readOneGroup(Counters counters, JsonParser jp)
+  throws IOException {
+
+    jp.nextToken(); 
+
+    String fieldname = jp.getCurrentName();
+
+    if (!Enum.valueOf(GroupFields.class, fieldname).equals(GroupFields.ID)) {
+      throw new IOException("Internal error");
+    }
+    
+    jp.nextToken(); // Get the value
+    
+    CounterGroup grp = counters.getGroup(jp.getText());
+
+    while (jp.nextToken() != JsonToken.END_OBJECT) {
+      fieldname = jp.getCurrentName();
+      jp.nextToken(); // move to value
+      switch(Enum.valueOf(GroupFields.class, fieldname)) {
+      case NAME: 
+        break;
+      case LIST: 
+        while (jp.nextToken() != JsonToken.END_ARRAY) {
+          readOneCounter(grp, jp);
+        }
+        break;
+      default:
+        throw new IOException("Unrecognized field '" + fieldname + "'!");
+      }
+    }    
+  }
+
+  static void readOneCounter(CounterGroup grp, JsonParser jp)
+  throws IOException {
+    String name = null;
+    String displayName = null;
+    long value = 0;
+    
+    while (jp.nextToken() != JsonToken.END_OBJECT) {
+      String fieldname = jp.getCurrentName();
+      jp.nextToken();
+      switch (Enum.valueOf(CounterFields.class, fieldname)) {
+      case ID: name = jp.getText(); break;
+      case NAME: displayName = jp.getText(); break;
+      case VALUE: value = jp.getLongValue(); break;
+      default:
+        throw new IOException("Unrecognized field '"+ fieldname + "'!");
+      }
+    }
+    
+    Counter ctr = grp.findCounter(name, displayName);
+    ctr.increment(value);
+  }
+
+}

Added: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/EventType.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/EventType.java?rev=816052&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/EventType.java (added)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/EventType.java Thu Sep 17 05:04:21 2009
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.jobhistory;
+
+/**
+ * List of all event type currently supported by Job History module
+ *
+ */
+public  enum EventType {
+    JOB_SUBMITTED (JobSubmittedEvent.class),
+    JOB_INITED (JobInitedEvent.class),
+    JOB_FINISHED (JobFinishedEvent.class),
+    JOB_PRIORITY_CHANGED (JobPriorityChangeEvent.class),
+    JOB_STATUS_CHANGED (JobStatusChangedEvent.class),
+    JOB_FAILED (JobUnsuccessfulCompletionEvent.class),
+    JOB_KILLED (JobUnsuccessfulCompletionEvent.class),
+    JOB_INFO_CHANGED (JobInfoChangeEvent.class),
+    TASK_STARTED (TaskStartedEvent.class),
+    TASK_FINISHED (TaskFinishedEvent.class),
+    TASK_FAILED (TaskFailedEvent.class),
+    TASK_UPDATED (TaskUpdatedEvent.class),
+    MAP_ATTEMPT_STARTED (TaskAttemptStartedEvent.class),
+    MAP_ATTEMPT_FINISHED (MapAttemptFinishedEvent.class),
+    MAP_ATTEMPT_FAILED (TaskAttemptUnsuccessfulCompletionEvent.class), 
+    MAP_ATTEMPT_KILLED (TaskAttemptUnsuccessfulCompletionEvent.class),
+    REDUCE_ATTEMPT_STARTED (TaskAttemptStartedEvent.class),
+    REDUCE_ATTEMPT_FINISHED (ReduceAttemptFinishedEvent.class), 
+    REDUCE_ATTEMPT_FAILED (TaskAttemptUnsuccessfulCompletionEvent.class), 
+    REDUCE_ATTEMPT_KILLED (TaskAttemptUnsuccessfulCompletionEvent.class),
+    SETUP_ATTEMPT_STARTED (TaskAttemptStartedEvent.class),
+    SETUP_ATTEMPT_FINISHED (TaskAttemptFinishedEvent.class), 
+    SETUP_ATTEMPT_FAILED (TaskAttemptUnsuccessfulCompletionEvent.class), 
+    SETUP_ATTEMPT_KILLED (TaskAttemptUnsuccessfulCompletionEvent.class),
+    CLEANUP_ATTEMPT_STARTED (TaskAttemptStartedEvent.class),
+    CLEANUP_ATTEMPT_FINISHED (TaskAttemptFinishedEvent.class), 
+    CLEANUP_ATTEMPT_FAILED (TaskAttemptUnsuccessfulCompletionEvent.class), 
+    CLEANUP_ATTEMPT_KILLED (TaskAttemptUnsuccessfulCompletionEvent.class); 
+   
+    Class<? extends HistoryEvent> klass;
+   
+    EventType(Class< ? extends HistoryEvent> klass) {
+      this.klass = klass;
+    }
+   
+    Class<? extends HistoryEvent> getKlass() {
+      return this.klass;
+    }
+  }



Mime
View raw message