hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sha...@apache.org
Subject svn commit: r816052 [3/5] - in /hadoop/mapreduce/trunk: ./ src/contrib/capacity-scheduler/ src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ src/contrib/fairscheduler/ src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/ src/con...
Date Thu, 17 Sep 2009 05:04:27 GMT
Added: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobInitedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobInitedEvent.java?rev=816052&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobInitedEvent.java (added)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobInitedEvent.java Thu Sep 17 05:04:21 2009
@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.jobhistory;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapreduce.JobID;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonToken;
+
+/**
+ * Event to record the initialization of a job
+ *
+ */
+public class JobInitedEvent implements HistoryEvent {
+
+  private EventCategory category;
+  private JobID jobid;
+  private  long launchTime;
+  private  int totalMaps;
+  private  int totalReduces;
+  private  String jobStatus;
+
+  enum EventFields { EVENT_CATEGORY,
+                     JOB_ID,
+                     LAUNCH_TIME,
+                     TOTAL_MAPS,
+                     TOTAL_REDUCES,
+                     JOB_STATUS }
+/**
+ * Create an event to record job initialization
+ * @param id
+ * @param launchTime
+ * @param totalMaps
+ * @param totalReduces
+ * @param jobStatus
+ */
+public JobInitedEvent(JobID id, long launchTime, int totalMaps,
+      int totalReduces, String jobStatus) {
+    this.jobid = id;
+    this.launchTime = launchTime;
+    this.totalMaps = totalMaps;
+    this.totalReduces = totalReduces;
+    this.jobStatus = jobStatus;
+    this.category = EventCategory.JOB;
+  }
+
+  JobInitedEvent() { }
+
+  /** Get the job ID */
+  public JobID getJobId() { return jobid; }
+  /** Get the launch time */
+  public long getLaunchTime() { return launchTime; }
+  /** Get the total number of maps */
+  public int getTotalMaps() { return totalMaps; }
+  /** Get the total number of reduces */
+  public int getTotalReduces() { return totalReduces; }
+  /** Get the event category */
+  public EventCategory getEventCategory() { return category; }
+  /** Get the status */
+  public String getStatus() { return jobStatus; }
+ /** Get the event type */
+  public EventType getEventType() {
+    return EventType.JOB_INITED;
+  }
+
+  public void readFields(JsonParser jp) throws IOException {
+    if (jp.nextToken() != JsonToken.START_OBJECT) {
+      throw new IOException("Unexpected Token while reading, +" +
+      		" expected a Start Object");
+    }
+    
+    while (jp.nextToken() != JsonToken.END_OBJECT) {
+      String fieldName = jp.getCurrentName();
+      jp.nextToken(); // Move to the value
+      switch (Enum.valueOf(EventFields.class, fieldName)) {
+        case EVENT_CATEGORY: 
+          category = Enum.valueOf(EventCategory.class, jp.getText());
+          break;
+        case JOB_ID: jobid = JobID.forName(jp.getText()); break;
+        case LAUNCH_TIME: launchTime = jp.getLongValue(); break;
+        case TOTAL_MAPS: totalMaps = jp.getIntValue(); break;
+        case TOTAL_REDUCES: totalReduces =  jp.getIntValue(); break;
+        case JOB_STATUS: jobStatus = jp.getText(); break;
+        default: 
+        throw new IOException("Unrecognized field '"+ fieldName + "'!");
+      }
+    }
+  }
+
+  public void writeFields(JsonGenerator gen) throws IOException {
+    gen.writeStartObject();
+    gen.writeStringField(EventFields.EVENT_CATEGORY.toString(),
+        category.toString());
+    gen.writeStringField(EventFields.JOB_ID.toString(), jobid.toString());
+    gen.writeNumberField(EventFields.LAUNCH_TIME.toString(), launchTime);
+    gen.writeNumberField(EventFields.TOTAL_MAPS.toString(), totalMaps);
+    gen.writeNumberField(EventFields.TOTAL_REDUCES.toString(), totalReduces);
+    gen.writeStringField(EventFields.JOB_STATUS.toString(),
+        jobStatus);
+    gen.writeEndObject();
+  }
+}

Added: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobPriorityChangeEvent.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobPriorityChangeEvent.java?rev=816052&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobPriorityChangeEvent.java (added)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobPriorityChangeEvent.java Thu Sep 17 05:04:21 2009
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.jobhistory;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapred.JobPriority;
+import org.apache.hadoop.mapreduce.JobID;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonToken;
+
+/**
+ * Event to record the change of priority of a job
+ *
+ */
+public class JobPriorityChangeEvent implements HistoryEvent {
+
+  private EventCategory category;
+  private JobID jobid;
+  private JobPriority priority;
+
+
+  enum EventFields { EVENT_CATEGORY,
+    JOB_ID,
+    PRIORITY }
+
+  /** Generate an event to record changes in Job priority
+   * @param id Job Id
+   * @param priority The new priority of the job
+   */
+  public JobPriorityChangeEvent(JobID id, JobPriority priority) {
+    this.jobid = id;
+    this.priority = priority;
+    this.category = EventCategory.JOB;
+  }
+
+  JobPriorityChangeEvent() { }
+
+  /** Get the Job ID */
+  public JobID getJobId() { return jobid; }
+  /** Get the job priority */
+  public JobPriority getPriority() { return priority; }
+  /** Get the event category */
+  public EventCategory getEventCategory() { return category; }
+  /** Get the event type */
+  public EventType getEventType() {
+    return EventType.JOB_PRIORITY_CHANGED;
+  }
+
+  public void readFields(JsonParser jp) throws IOException {
+    if (jp.nextToken() != JsonToken.START_OBJECT) {
+      throw new IOException("Unexpected Token while reading");
+    }
+
+    while (jp.nextToken() != JsonToken.END_OBJECT) {
+      String fieldName = jp.getCurrentName();
+      jp.nextToken(); // Move to the value
+      switch (Enum.valueOf(EventFields.class, fieldName)) {
+      case EVENT_CATEGORY: 
+        category = Enum.valueOf(EventCategory.class, jp.getText());
+        break;
+      case JOB_ID:
+        jobid = JobID.forName(jp.getText());
+        break;
+      case PRIORITY: 
+        priority = Enum.valueOf(JobPriority.class, jp.getText());
+        break;
+      default: 
+        throw new IOException("Unrecognized field '"+fieldName+"'!");
+      }
+    }
+  }
+
+  public void writeFields(JsonGenerator gen) throws IOException {
+    gen.writeStartObject();
+    gen.writeStringField(EventFields.EVENT_CATEGORY.toString(),
+        category.toString());
+    gen.writeStringField(EventFields.JOB_ID.toString(), jobid.toString());
+    gen.writeStringField(EventFields.PRIORITY.toString(), 
+        priority.toString());
+    gen.writeEndObject();
+  }
+}

Added: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobStatusChangedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobStatusChangedEvent.java?rev=816052&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobStatusChangedEvent.java (added)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobStatusChangedEvent.java Thu Sep 17 05:04:21 2009
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.jobhistory;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapreduce.JobID;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonToken;
+
+/**
+ * Event to record the change of status for a job
+ *
+ */
+public class JobStatusChangedEvent implements HistoryEvent {
+
+  private EventCategory category;
+  private JobID jobid;
+  private  String jobStatus;
+
+  enum EventFields { EVENT_CATEGORY,
+    JOB_ID,
+    JOB_STATUS }
+
+  /**
+   * Create an event to record the change in the Job Status
+   * @param id Job ID
+   * @param jobStatus The new job status
+   */
+  public JobStatusChangedEvent(JobID id, String jobStatus) {
+    this.jobid = id;
+    this.jobStatus = jobStatus;
+    this.category = EventCategory.JOB;
+  }
+
+  JobStatusChangedEvent() {
+  }
+
+  /** Get the Job Id */
+  public JobID getJobId() { return jobid; }
+  /** Get the event category */
+  public EventCategory getEventCategory() { return category; }
+  /** Get the event status */
+  public String getStatus() { return jobStatus; }
+  /** Get the event type */
+  public EventType getEventType() {
+    return EventType.JOB_STATUS_CHANGED;
+  }
+
+  public void readFields(JsonParser jp) throws IOException {
+    if (jp.nextToken() != JsonToken.START_OBJECT) {
+      throw new IOException("Unexpected Token while reading");
+    }
+
+    while (jp.nextToken() != JsonToken.END_OBJECT) {
+      String fieldName = jp.getCurrentName();
+      jp.nextToken(); // Move to the value
+      switch (Enum.valueOf(EventFields.class, fieldName)) {
+      case EVENT_CATEGORY:
+        category = Enum.valueOf(EventCategory.class, jp.getText());
+        break;
+      case JOB_ID:
+        jobid = JobID.forName(jp.getText());
+        break;
+      case JOB_STATUS:
+        jobStatus = jp.getText();
+        break;
+      default: 
+        throw new IOException("Unrecognized field '"+fieldName+"'!");
+      }
+    }
+  }
+
+  public void writeFields(JsonGenerator gen) throws IOException {
+    gen.writeStartObject();
+    gen.writeStringField(EventFields.EVENT_CATEGORY.toString(),
+        category.toString());
+    gen.writeStringField(EventFields.JOB_ID.toString(), jobid.toString());
+    gen.writeStringField(EventFields.JOB_STATUS.toString(), jobStatus);
+    gen.writeEndObject();
+  }
+}

Added: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobSubmittedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobSubmittedEvent.java?rev=816052&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobSubmittedEvent.java (added)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobSubmittedEvent.java Thu Sep 17 05:04:21 2009
@@ -0,0 +1,118 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.jobhistory;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapreduce.JobID;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonToken;
+
+/**
+ * Event to record the submission of a job
+ *
+ */
+public class JobSubmittedEvent implements HistoryEvent {
+
+  private EventCategory category;
+  private JobID jobid;
+  private  String jobName;
+  private  String userName;
+  private  long submitTime;
+  private  String jobConfPath;
+
+  enum EventFields { EVENT_CATEGORY,
+                     JOB_ID,
+                     JOB_NAME,
+                     USER_NAME,
+                     SUBMIT_TIME,
+                     JOB_CONF_PATH }
+
+  /**
+   * Create an event to record job submission
+   * @param id The job Id of the job
+   * @param jobName Name of the job
+   * @param userName Name of the user who submitted the job
+   * @param submitTime Time of submission
+   * @param jobConfPath Path of the Job Configuration file
+   */
+  public JobSubmittedEvent(JobID id, String jobName, String userName,
+      long submitTime, String jobConfPath) {
+    this.jobid = id;
+    this.jobName = jobName;
+    this.userName = userName;
+    this.submitTime = submitTime;
+    this.jobConfPath = jobConfPath;
+    this.category = EventCategory.JOB;
+  }
+
+  JobSubmittedEvent() {
+  }
+
+  /** Get the Job Id */
+  public JobID getJobId() { return jobid; }
+  /** Get the Job name */
+  public String getJobName() { return jobName; }
+  /** Get the user name */
+  public String getUserName() { return userName; }
+  /** Get the event category */
+  public EventCategory getEventCategory() { return category; }
+  /** Get the submit time */
+  public long getSubmitTime() { return submitTime; }
+  /** Get the Path for the Job Configuration file */
+  public String getJobConfPath() { return jobConfPath; }
+  /** Get the event type */
+  public EventType getEventType() { return EventType.JOB_SUBMITTED; }
+
+  public void readFields(JsonParser jp) throws IOException {
+    if (jp.nextToken() != JsonToken.START_OBJECT) {
+      throw new IOException("Unexpected Token while reading");
+    }
+    
+    while (jp.nextToken() != JsonToken.END_OBJECT) {
+      String fieldName = jp.getCurrentName();
+      jp.nextToken(); // Move to the value
+      switch (Enum.valueOf(EventFields.class, fieldName)) {
+      case EVENT_CATEGORY:
+        category = Enum.valueOf(EventCategory.class, jp.getText());
+        break;
+      case JOB_ID: jobid = JobID.forName(jp.getText()); break;
+      case JOB_NAME: jobName = jp.getText(); break;
+      case USER_NAME: userName = jp.getText(); break;
+      case SUBMIT_TIME: submitTime = (long) jp.getLongValue(); break;
+      case JOB_CONF_PATH: jobConfPath = jp.getText(); break;
+      default: 
+        throw new IOException("Unrecognized field '"+fieldName+"'!");
+      }
+    }
+  }
+
+  public void writeFields(JsonGenerator gen) throws IOException {
+    gen.writeStartObject();
+    gen.writeStringField(EventFields.EVENT_CATEGORY.toString(),
+        category.toString());
+    gen.writeStringField(EventFields.JOB_ID.toString(), jobid.toString());
+    gen.writeStringField(EventFields.JOB_NAME.toString(), jobName);
+    gen.writeStringField(EventFields.USER_NAME.toString(), userName);
+    gen.writeNumberField(EventFields.SUBMIT_TIME.toString(), submitTime);
+    gen.writeStringField(EventFields.JOB_CONF_PATH.toString(), jobConfPath);
+    gen.writeEndObject();
+  }
+}

Added: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobUnsuccessfulCompletionEvent.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobUnsuccessfulCompletionEvent.java?rev=816052&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobUnsuccessfulCompletionEvent.java (added)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/JobUnsuccessfulCompletionEvent.java Thu Sep 17 05:04:21 2009
@@ -0,0 +1,135 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.jobhistory;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapreduce.JobID;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonToken;
+
+/**
+ * Event to record Failed and Killed completion of jobs
+ *
+ */
+public class JobUnsuccessfulCompletionEvent implements HistoryEvent {
+
+  private EventCategory category;
+  private JobID jobid;
+  private  long finishTime;
+  private  int finishedMaps;
+  private  int finishedReduces;
+  private  String jobStatus;
+
+  enum EventFields { EVENT_CATEGORY,
+    JOB_ID,
+    FINISH_TIME,
+    FINISHED_MAPS,
+    FINISHED_REDUCES,
+    JOB_STATUS }
+
+  /**
+   * Create an event to record unsuccessful completion (killed/failed) of jobs
+   * @param id Job ID
+   * @param finishTime Finish time of the job
+   * @param finishedMaps Number of finished maps
+   * @param finishedReduces Number of finished reduces
+   * @param status Status of the job
+   */
+  public JobUnsuccessfulCompletionEvent(JobID id, long finishTime,
+      int finishedMaps,
+      int finishedReduces, String status) {
+    this.jobid = id;
+    this.finishTime = finishTime;
+    this.finishedMaps = finishedMaps;
+    this.finishedReduces = finishedReduces;
+    this.jobStatus = status;
+    this.category = EventCategory.JOB;
+  }
+
+  JobUnsuccessfulCompletionEvent() {
+  }
+
+  /** Get the Job ID */
+  public JobID getJobId() { return jobid; }
+  /** Get the job finish time */
+  public long getFinishTime() { return finishTime; }
+  /** Get the number of finished maps */
+  public int getFinishedMaps() { return finishedMaps; }
+  /** Get the number of finished reduces */
+  public int getFinishedReduces() { return finishedReduces; }
+  /** Get the event category */
+  public EventCategory getEventCategory() { return category; }
+  /** Get the status */
+  public String getStatus() { return jobStatus; }
+  /** Get the event type */
+  public EventType getEventType() {
+    if ("FAILED".equals(jobStatus)) {
+      return EventType.JOB_FAILED;
+    } else
+      return EventType.JOB_KILLED;
+  }
+
+  public void readFields(JsonParser jp) throws IOException {
+    if (jp.nextToken() != JsonToken.START_OBJECT) {
+      throw new IOException("Unexpected Token while reading");
+    }
+
+    while (jp.nextToken() != JsonToken.END_OBJECT) {
+      String fieldName = jp.getCurrentName();
+      jp.nextToken(); // Move to the value
+      switch (Enum.valueOf(EventFields.class, fieldName)) {
+      case EVENT_CATEGORY:
+        category = Enum.valueOf(EventCategory.class, jp.getText());
+        break;
+      case JOB_ID:
+        jobid = JobID.forName(jp.getText());
+        break;
+      case FINISH_TIME:
+        finishTime = jp.getLongValue();
+        break;
+      case FINISHED_MAPS:
+        finishedMaps = jp.getIntValue();
+        break;
+      case FINISHED_REDUCES:
+        finishedReduces =  jp.getIntValue();
+        break;
+      case JOB_STATUS:
+        jobStatus = jp.getText();
+        break;
+      default: 
+        throw new IOException("Unrecognized field '"+fieldName+"'!");
+      }
+    }
+  }
+
+  public void writeFields(JsonGenerator gen) throws IOException {
+    gen.writeStartObject();
+    gen.writeStringField(EventFields.EVENT_CATEGORY.toString(),
+        category.toString());
+    gen.writeStringField(EventFields.JOB_ID.toString(), jobid.toString());
+    gen.writeNumberField(EventFields.FINISH_TIME.toString(), finishTime);
+    gen.writeNumberField(EventFields.FINISHED_MAPS.toString(), finishedMaps);
+    gen.writeNumberField(EventFields.FINISHED_REDUCES.toString(),
+        finishedReduces);
+    gen.writeStringField(EventFields.JOB_STATUS.toString(), jobStatus);
+    gen.writeEndObject();
+  }
+}

Added: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/MapAttemptFinishedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/MapAttemptFinishedEvent.java?rev=816052&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/MapAttemptFinishedEvent.java (added)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/MapAttemptFinishedEvent.java Thu Sep 17 05:04:21 2009
@@ -0,0 +1,178 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.jobhistory;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapreduce.Counters;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonToken;
+
+/**
+ * Event to record successful completion of a map attempt
+ *
+ */
+public class MapAttemptFinishedEvent  implements HistoryEvent {
+
+  private EventCategory category;
+  private TaskID taskid;
+  private TaskAttemptID attemptId;
+  private TaskType taskType;
+  private String taskStatus;
+  private long mapFinishTime;
+  private long finishTime;
+  private String hostname;
+  private String state;
+  private Counters counters;
+  
+  enum EventFields { EVENT_CATEGORY,
+                     TASK_ID,
+                     TASK_ATTEMPT_ID,
+                     TASK_TYPE,
+                     TASK_STATUS,
+                     MAP_FINISH_TIME,
+                     FINISH_TIME,
+                     HOSTNAME,
+                     STATE,
+                     COUNTERS }
+    
+  MapAttemptFinishedEvent() {
+  }
+
+  /** 
+   * Create an event for successful completion of map attempts
+   * @param id Task Attempt ID
+   * @param taskType Type of the task
+   * @param taskStatus Status of the task
+   * @param mapFinishTime Finish time of the map phase
+   * @param finishTime Finish time of the attempt
+   * @param hostname Name of the host where the map executed
+   * @param state State string for the attempt
+   * @param counters Counters for the attempt
+   */
+  public MapAttemptFinishedEvent(TaskAttemptID id, 
+      TaskType taskType, String taskStatus, 
+      long mapFinishTime, long finishTime,
+      String hostname, String state, Counters counters) {
+    this.taskid = id.getTaskID();
+    this.attemptId = id;
+    this.taskType = taskType;
+    this.taskStatus = taskStatus;
+    this.mapFinishTime = mapFinishTime;
+    this.finishTime = finishTime;
+    this.hostname = hostname;
+    this.state = state;
+    this.counters = counters;
+    this.category = EventCategory.TASK_ATTEMPT;
+  }
+  
+  /** Get the task ID */
+  public TaskID getTaskId() { return taskid; }
+  /** Get the event category */
+  public EventCategory getEventCategory() { return category; }
+  /** Get the attempt id */
+  public TaskAttemptID getAttemptId() { return attemptId; }
+  /** Get the task type */
+  public TaskType getTaskType() { return taskType; }
+  /** Get the task status */
+  public String getTaskStatus() { return taskStatus; }
+  /** Get the map phase finish time */
+  public long getMapFinishTime() { return mapFinishTime; }
+  /** Get the attempt finish time */
+  public long getFinishTime() { return finishTime; }
+  /** Get the host name */
+  public String getHostname() { return hostname; }
+  /** Get the state string */
+  public String getState() { return state; }
+  /** Get the counters */
+  public Counters getCounters() { return counters; }
+  /** Get the event type */
+   public EventType getEventType() {
+    return EventType.MAP_ATTEMPT_FINISHED;
+  }
+  
+  public void readFields(JsonParser jp) throws IOException {
+    if (jp.nextToken() != JsonToken.START_OBJECT) {
+      throw new IOException("Unexpected token while reading");
+    }
+    
+    while (jp.nextToken() != JsonToken.END_OBJECT) {
+      String fieldname = jp.getCurrentName();
+      jp.nextToken(); // move to value
+      switch (Enum.valueOf(EventFields.class, fieldname)) {
+      case EVENT_CATEGORY:
+        category = Enum.valueOf(EventCategory.class, jp.getText());
+        break;
+      case TASK_ID:
+        taskid = TaskID.forName(jp.getText());
+        break;
+      case TASK_ATTEMPT_ID: 
+        attemptId = TaskAttemptID.forName(jp.getText());
+        break;
+      case TASK_TYPE:
+        taskType = TaskType.valueOf(jp.getText());
+        break;
+      case TASK_STATUS:
+        taskStatus = jp.getText();
+        break;
+      case MAP_FINISH_TIME:
+        mapFinishTime = jp.getLongValue();
+        break;
+      case FINISH_TIME:
+        finishTime = jp.getLongValue();
+        break;
+      case HOSTNAME:
+        hostname = jp.getText();
+        break;
+      case STATE:
+        state = jp.getText();
+        break;
+      case COUNTERS:
+        counters = EventReader.readCounters(jp);
+        break;
+      default: 
+        throw new IOException("Unrecognized field '"+fieldname+"'!");
+      }
+    }
+  }
+
+  public void writeFields(JsonGenerator gen) throws IOException {
+    gen.writeStartObject();
+    gen.writeStringField(EventFields.EVENT_CATEGORY.toString(),
+        category.toString());
+    gen.writeStringField(EventFields.TASK_ID.toString(), taskid.toString());
+    gen.writeStringField(EventFields.TASK_ATTEMPT_ID.toString(),
+        attemptId.toString());
+    gen.writeStringField(EventFields.TASK_TYPE.toString(), 
+        taskType.toString());
+    gen.writeStringField(EventFields.TASK_STATUS.toString(),
+        taskStatus);
+    gen.writeNumberField(EventFields.MAP_FINISH_TIME.toString(),
+        mapFinishTime);
+    gen.writeNumberField(EventFields.FINISH_TIME.toString(), finishTime);
+    gen.writeStringField(EventFields.HOSTNAME.toString(), hostname);
+    gen.writeStringField(EventFields.STATE.toString(), state);
+    EventWriter.writeCounters(counters, gen);
+    gen.writeEndObject();
+  }
+}

Added: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/ReduceAttemptFinishedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/ReduceAttemptFinishedEvent.java?rev=816052&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/ReduceAttemptFinishedEvent.java (added)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/ReduceAttemptFinishedEvent.java Thu Sep 17 05:04:21 2009
@@ -0,0 +1,191 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.jobhistory;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapreduce.Counters;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonToken;
+
+/**
+ * Event to record successful completion of a reduce attempt
+ *
+ */
+public class ReduceAttemptFinishedEvent  implements HistoryEvent {
+
+  private EventCategory category;
+  private TaskID taskid;
+  private TaskAttemptID attemptId;
+  private TaskType taskType;
+  private String taskStatus;
+  private long shuffleFinishTime;
+  private long sortFinishTime;
+  private long finishTime;
+  private String hostname;
+  private String state;
+  private Counters counters;
+
+  enum EventFields { EVENT_CATEGORY,
+    TASK_ID,
+    TASK_ATTEMPT_ID,
+    TASK_TYPE,
+    TASK_STATUS,
+    SHUFFLE_FINISH_TIME,
+    SORT_FINISH_TIME,
+    FINISH_TIME,
+    HOSTNAME,
+    STATE,
+    COUNTERS }
+
+  ReduceAttemptFinishedEvent() {
+  }
+
+  /**
+   * Create an event to record completion of a reduce attempt
+   * @param id Attempt Id
+   * @param taskType Type of task
+   * @param taskStatus Status of the task
+   * @param shuffleFinishTime Finish time of the shuffle phase
+   * @param sortFinishTime Finish time of the sort phase
+   * @param finishTime Finish time of the attempt
+   * @param hostname Name of the host where the attempt executed
+   * @param state State of the attempt
+   * @param counters Counters for the attempt
+   */
+  public ReduceAttemptFinishedEvent(TaskAttemptID id, 
+      TaskType taskType, String taskStatus, 
+      long shuffleFinishTime, long sortFinishTime, 
+      long finishTime,
+      String hostname, String state, Counters counters) {
+    this.taskid = id.getTaskID();
+    this.attemptId = id;
+    this.taskType = taskType;
+    this.taskStatus = taskStatus;
+    this.shuffleFinishTime = shuffleFinishTime;
+    this.sortFinishTime = sortFinishTime;
+    this.finishTime = finishTime;
+    this.hostname = hostname;
+    this.state = state;
+    this.counters = counters;
+    this.category = EventCategory.TASK_ATTEMPT;
+  }
+
+  /** Get the Task ID */
+  public TaskID getTaskId() { return taskid; }
+  /** Get the event category */
+  public EventCategory getEventCategory() { return category; }
+  /** Get the attempt id */
+  public TaskAttemptID getAttemptId() { return attemptId; }
+  /** Get the task type */
+  public TaskType getTaskType() { return taskType; }
+  /** Get the task status */
+  public String getTaskStatus() { return taskStatus; }
+  /** Get the finish time of the sort phase */
+  public long getSortFinishTime() { return sortFinishTime; }
+  /** Get the finish time of the shuffle phase */
+  public long getShuffleFinishTime() { return shuffleFinishTime; }
+  /** Get the finish time of the attempt */
+  public long getFinishTime() { return finishTime; }
+  /** Get the name of the host where the attempt ran */
+  public String getHostname() { return hostname; }
+  /** Get the state string */
+  public String getState() { return state; }
+  /** Get the counters for the attempt */
+  public Counters getCounters() { return counters; }
+  /** Get the event type */
+  public EventType getEventType() {
+    return EventType.REDUCE_ATTEMPT_FINISHED;
+  }
+
+
+  public void readFields(JsonParser jp) throws IOException {
+    if (jp.nextToken() != JsonToken.START_OBJECT) {
+      throw new IOException("Unexpected token while reading");
+    }
+
+    while (jp.nextToken() != JsonToken.END_OBJECT) {
+      String fieldname = jp.getCurrentName();
+      jp.nextToken(); // move to value
+      switch (Enum.valueOf(EventFields.class, fieldname)) {
+      case EVENT_CATEGORY:
+        category = Enum.valueOf(EventCategory.class, jp.getText());
+        break;
+      case TASK_ID:
+        taskid = TaskID.forName(jp.getText());
+        break;
+      case TASK_ATTEMPT_ID:
+        attemptId = TaskAttemptID.forName(jp.getText());
+        break;
+      case TASK_TYPE:
+        taskType = TaskType.valueOf(jp.getText());
+        break;
+      case TASK_STATUS:
+        taskStatus = jp.getText();
+        break;
+      case SHUFFLE_FINISH_TIME:
+        shuffleFinishTime = jp.getLongValue();
+        break;
+      case SORT_FINISH_TIME:
+        sortFinishTime = jp.getLongValue();
+        break;
+      case FINISH_TIME:
+        finishTime = jp.getLongValue();
+        break;
+      case HOSTNAME:
+        hostname = jp.getText();
+        break;
+      case STATE:
+        state = jp.getText();
+        break;
+      case COUNTERS:
+        counters = EventReader.readCounters(jp);
+        break;
+      default: 
+        throw new IOException("Unrecognized field '"+fieldname+"'!");
+      }
+    }
+  }
+
+  public void writeFields(JsonGenerator gen) throws IOException {
+    gen.writeStartObject();
+    gen.writeStringField(EventFields.EVENT_CATEGORY.toString(),
+        category.toString());
+    gen.writeStringField(EventFields.TASK_ID.toString(), taskid.toString());
+    gen.writeStringField(EventFields.TASK_ATTEMPT_ID.toString(),
+        attemptId.toString());
+    gen.writeStringField(EventFields.TASK_TYPE.toString(),
+        taskType.toString());
+    gen.writeStringField(EventFields.TASK_STATUS.toString(),
+        taskStatus);
+    gen.writeNumberField(EventFields.SHUFFLE_FINISH_TIME.toString(),
+        shuffleFinishTime);
+    gen.writeNumberField(EventFields.SORT_FINISH_TIME.toString(),
+        sortFinishTime);
+    gen.writeNumberField(EventFields.FINISH_TIME.toString(), finishTime);
+    gen.writeStringField(EventFields.HOSTNAME.toString(), hostname);
+    gen.writeStringField(EventFields.STATE.toString(), state);
+    EventWriter.writeCounters(counters, gen);
+    gen.writeEndObject();
+  }
+}

Added: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptFinishedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptFinishedEvent.java?rev=816052&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptFinishedEvent.java (added)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptFinishedEvent.java Thu Sep 17 05:04:21 2009
@@ -0,0 +1,168 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.jobhistory;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapreduce.Counters;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonToken;
+
+/**
+ * Event to record successful task completion
+ *
+ */
+public class TaskAttemptFinishedEvent  implements HistoryEvent {
+
+  private EventCategory category;
+  private TaskID taskid;
+  private TaskAttemptID attemptId;
+  private TaskType taskType;
+  private String taskStatus;
+  private long finishTime;
+  private String hostname;
+  private String state;
+  private Counters counters;
+
+  enum EventFields { EVENT_CATEGORY,
+    TASK_ID,
+    TASK_ATTEMPT_ID,
+    TASK_TYPE,
+    TASK_STATUS,
+    FINISH_TIME,
+    HOSTNAME,
+    STATE,
+    COUNTERS }
+
+  TaskAttemptFinishedEvent() {
+  }
+
+  /**
+   * Create an event to record successful finishes for setup and cleanup 
+   * attempts
+   * @param id Attempt ID
+   * @param taskType Type of task
+   * @param taskStatus Status of task
+   * @param finishTime Finish time of attempt
+   * @param hostname Host where the attempt executed
+   * @param state State string
+   * @param counters Counters for the attempt
+   */
+  public TaskAttemptFinishedEvent(TaskAttemptID id, 
+      TaskType taskType, String taskStatus, 
+      long finishTime,
+      String hostname, String state, Counters counters) {
+    this.taskid = id.getTaskID();
+    this.attemptId = id;
+    this.taskType = taskType;
+    this.taskStatus = taskStatus;
+    this.finishTime = finishTime;
+    this.hostname = hostname;
+    this.state = state;
+    this.counters = counters;
+    this.category = EventCategory.TASK_ATTEMPT;
+  }
+
+  /** Get the task ID */
+  public TaskID getTaskId() { return taskid; }
+  /** Get the event category */
+  public EventCategory getEventCategory() { return category; }
+  /** Get the task attempt id */
+  public TaskAttemptID getAttemptId() { return attemptId; }
+  /** Get the task type */
+  public TaskType getTaskType() { return taskType; }
+  /** Get the task status */
+  public String getTaskStatus() { return taskStatus; }
+  /** Get the attempt finish time */
+  public long getFinishTime() { return finishTime; }
+  /** Get the host where the attempt executed */
+  public String getHostname() { return hostname; }
+  /** Get the state string */
+  public String getState() { return state; }
+  /** Get the counters for the attempt */
+  public Counters getCounters() { return counters; }
+  /** Get the event type */
+  public EventType getEventType() {
+    return EventType.MAP_ATTEMPT_FINISHED;
+  }
+
+  public void readFields(JsonParser jp) throws IOException {
+    if (jp.nextToken() != JsonToken.START_OBJECT) {
+      throw new IOException("Unexpected token while reading");
+    }
+
+    while (jp.nextToken() != JsonToken.END_OBJECT) {
+      String fieldname = jp.getCurrentName();
+      jp.nextToken(); // move to value
+      switch (Enum.valueOf(EventFields.class, fieldname)) {
+      case EVENT_CATEGORY:
+        category = Enum.valueOf(EventCategory.class, jp.getText());
+        break;
+      case TASK_ID:
+        taskid = TaskID.forName(jp.getText());
+        break;
+      case TASK_ATTEMPT_ID: 
+        attemptId = TaskAttemptID.forName(jp.getText());
+        break;
+      case TASK_TYPE:
+        taskType = TaskType.valueOf(jp.getText());
+        break;
+      case TASK_STATUS:
+        taskStatus = jp.getText();
+        break;
+      case FINISH_TIME:
+        finishTime = jp.getLongValue();
+        break;
+      case HOSTNAME:
+        hostname = jp.getText();
+        break;
+      case STATE:
+        state = jp.getText();
+        break;
+      case COUNTERS:
+        counters = EventReader.readCounters(jp);
+        break;
+      default: 
+        throw new IOException("Unrecognized field '"+fieldname+"'!");
+      }
+    }
+  }
+
+  public void writeFields(JsonGenerator gen) throws IOException {
+    gen.writeStartObject();
+    gen.writeStringField(EventFields.EVENT_CATEGORY.toString(),
+        category.toString());
+    gen.writeStringField(EventFields.TASK_ID.toString(), taskid.toString());
+    gen.writeStringField(EventFields.TASK_ATTEMPT_ID.toString(),
+        attemptId.toString());
+    gen.writeStringField(EventFields.TASK_TYPE.toString(), 
+        taskType.toString());
+    gen.writeStringField(EventFields.TASK_STATUS.toString(),
+        taskStatus);
+    gen.writeNumberField(EventFields.FINISH_TIME.toString(), finishTime);
+    gen.writeStringField(EventFields.HOSTNAME.toString(), hostname);
+    gen.writeStringField(EventFields.STATE.toString(), state);
+    EventWriter.writeCounters(counters, gen);
+    gen.writeEndObject();
+  }
+}

Added: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java?rev=816052&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java (added)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java Thu Sep 17 05:04:21 2009
@@ -0,0 +1,144 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.jobhistory;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonToken;
+
+/**
+ * Event to record start of a task attempt
+ *
+ */
+public class TaskAttemptStartedEvent implements HistoryEvent {
+
+  private EventCategory category;
+  private TaskID taskid;
+  private TaskType taskType;
+  private TaskAttemptID attemptId;
+  private  long startTime;
+  private  String trackerName;
+  private int httpPort;
+
+  enum EventFields { EVENT_CATEGORY,
+                     TASK_ID,
+                     TASK_TYPE,
+                     TASK_ATTEMPT_ID,
+                     START_TIME,
+                     TRACKER_NAME,
+                     HTTP_PORT }
+
+  /**
+   * Create an event to record the start of an attempt
+   * @param attemptId Id of the attempt
+   * @param taskType Type of task
+   * @param startTime Start time of the attempt
+   * @param trackerName Name of the Task Tracker where attempt is running
+   * @param httpPort The port number of the tracker
+   */
+  public TaskAttemptStartedEvent( TaskAttemptID attemptId,  
+      TaskType taskType, long startTime, String trackerName,
+      int httpPort) {
+    this.attemptId = attemptId;
+    this.taskid = attemptId.getTaskID();
+    this.startTime = startTime;
+    this.taskType = taskType;
+    this.trackerName = trackerName;
+    this.httpPort = httpPort;
+    this.category = EventCategory.TASK_ATTEMPT;
+  }
+
+  TaskAttemptStartedEvent() {
+  }
+
+  /** Get the task id */
+  public TaskID getTaskId() { return taskid; }
+  /** Get the event category */
+  public EventCategory getEventCategory() { return category; }
+  /** Get the tracker name */
+  public String getTrackerName() { return trackerName; }
+  /** Get the start time */
+  public long getStartTime() { return startTime; }
+  /** Get the task type */
+  public TaskType getTaskType() { return taskType; }
+  /** Get the HTTP port */
+  public int getHttpPort() { return httpPort; }
+  /** Get the attempt id */
+  public TaskAttemptID getTaskAttemptId() { return attemptId; }
+  /** Get the event type */
+  public EventType getEventType() {
+    return EventType.MAP_ATTEMPT_STARTED;
+  }
+
+  public void readFields(JsonParser jp) throws IOException {
+    if (jp.nextToken() != JsonToken.START_OBJECT) {
+      throw new IOException("Unexpected Token while reading");
+    }
+    
+    while (jp.nextToken() != JsonToken.END_OBJECT) {
+      String fieldName = jp.getCurrentName();
+      jp.nextToken(); // Move to the value
+      switch (Enum.valueOf(EventFields.class, fieldName)) {
+      case EVENT_CATEGORY:
+        category = Enum.valueOf(EventCategory.class, jp.getText());
+        break;
+      case TASK_ID:
+        taskid = TaskID.forName(jp.getText());
+        break;
+      case TASK_ATTEMPT_ID: 
+        attemptId = TaskAttemptID.forName(jp.getText());
+        break;
+      case TASK_TYPE:
+        taskType = TaskType.valueOf(jp.getText());
+        break;
+      case START_TIME:
+        startTime = jp.getLongValue();
+        break;
+      case TRACKER_NAME:
+        trackerName = jp.getText();
+        break;
+      case HTTP_PORT:
+        httpPort = jp.getIntValue();
+        break;
+      default: 
+        throw new IOException("Unrecognized field '"+fieldName+"'!");
+      }
+    }
+  }
+
+  public void writeFields(JsonGenerator gen) throws IOException {
+    gen.writeStartObject();
+    gen.writeStringField(EventFields.EVENT_CATEGORY.toString(),
+        category.toString());
+    gen.writeStringField(EventFields.TASK_ID.toString(), taskid.toString());
+    gen.writeStringField(EventFields.TASK_ATTEMPT_ID.toString(),
+        attemptId.toString());
+    gen.writeStringField(EventFields.TASK_TYPE.toString(),
+        taskType.toString());
+    gen.writeNumberField(EventFields.START_TIME.toString(), startTime);
+    gen.writeStringField(EventFields.TRACKER_NAME.toString(), trackerName);
+    gen.writeNumberField(EventFields.HTTP_PORT.toString(), httpPort);
+    gen.writeEndObject();
+  }
+}

Added: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptUnsuccessfulCompletionEvent.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptUnsuccessfulCompletionEvent.java?rev=816052&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptUnsuccessfulCompletionEvent.java (added)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptUnsuccessfulCompletionEvent.java Thu Sep 17 05:04:21 2009
@@ -0,0 +1,155 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.jobhistory;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonToken;
+
+/**
+ * Event to record unsuccessful (Killed/Failed) completion of task attempts
+ *
+ */
+public class TaskAttemptUnsuccessfulCompletionEvent implements HistoryEvent {
+
+  private EventCategory category;
+  private TaskID taskid;
+  private TaskType taskType;
+  private TaskAttemptID attemptId;
+  private  long finishTime;
+  private String hostname;
+  private String status;
+  private  String error;
+
+  enum EventFields { EVENT_CATEGORY,
+    TASK_ID,
+    TASK_TYPE,
+    TASK_ATTEMPT_ID,
+    FINISH_TIME,
+    HOSTNAME,
+    STATUS,
+    ERROR }
+
+  /** 
+   * Create an event to record the unsuccessful completion of attempts
+   * @param id Attempt ID
+   * @param taskType Type of the task
+   * @param status Status of the attempt
+   * @param finishTime Finish time of the attempt
+   * @param hostname Name of the host where the attempt executed
+   * @param error Error string
+   */
+  public TaskAttemptUnsuccessfulCompletionEvent(TaskAttemptID id, 
+      TaskType taskType,
+      String status, long finishTime, 
+      String hostname, String error) {
+    this.taskid = id.getTaskID();
+    this.taskType = taskType;
+    this.attemptId = id;
+    this.finishTime = finishTime;
+    this.hostname = hostname;
+    this.error = error;
+    this.status = status;
+    this.category = EventCategory.TASK_ATTEMPT;
+  }
+
+  TaskAttemptUnsuccessfulCompletionEvent() {
+  }
+
+  /** Get the event category */
+  public EventCategory getEventCategory() { return category; }
+  /** Get the task id */
+  public TaskID getTaskId() { return taskid; }
+  /** Get the task type */
+  public TaskType getTaskType() { return taskType; }
+  /** Get the attempt id */
+  public TaskAttemptID getTaskAttemptId() { return attemptId; }
+  /** Get the finish time */
+  public long getFinishTime() { return finishTime; }
+  /** Get the name of the host where the attempt executed */
+  public String getHostname() { return hostname; }
+  /** Get the error string */
+  public String getError() { return error; }
+  /** Get the task status */
+  public String getTaskStatus() { return status; }
+  /** Get the event type */
+  public EventType getEventType() {
+    return EventType.MAP_ATTEMPT_KILLED;
+  }
+
+  public void readFields(JsonParser jp) throws IOException {
+    if (jp.nextToken() != JsonToken.START_OBJECT) {
+      throw new IOException("Unexpected Token while reading");
+    }
+
+    while (jp.nextToken() != JsonToken.END_OBJECT) {
+      String fieldName = jp.getCurrentName();
+      jp.nextToken(); // Move to the value
+      switch (Enum.valueOf(EventFields.class, fieldName)) {
+      case EVENT_CATEGORY:
+        category = Enum.valueOf(EventCategory.class, jp.getText());
+        break;
+      case TASK_ID:
+        taskid = TaskID.forName(jp.getText());
+        break;
+      case TASK_TYPE: 
+        taskType = TaskType.valueOf(jp.getText());
+        break;
+      case TASK_ATTEMPT_ID: 
+        attemptId = TaskAttemptID.forName(jp.getText());
+        break;
+      case FINISH_TIME:
+        finishTime = jp.getLongValue();
+        break;
+      case HOSTNAME:
+        hostname = jp.getText();
+        break;
+      case ERROR:
+        error = jp.getText();
+        break;
+      case STATUS:
+        status = jp.getText();
+        break;
+      default: 
+        throw new IOException("Unrecognized field '"+fieldName+"'!");
+      }
+    }
+  }
+
+  public void writeFields(JsonGenerator gen) throws IOException {
+    gen.writeStartObject();
+    gen.writeStringField(EventFields.EVENT_CATEGORY.toString(),
+        category.toString());
+    gen.writeStringField(EventFields.TASK_ID.toString(), taskid.toString());
+    gen.writeStringField(EventFields.TASK_TYPE.toString(),
+        taskType.toString());
+    gen.writeStringField(EventFields.TASK_ATTEMPT_ID.toString(),
+        attemptId.toString());
+    gen.writeNumberField(EventFields.FINISH_TIME.toString(), finishTime);
+    gen.writeStringField(EventFields.HOSTNAME.toString(), hostname);
+    gen.writeStringField(EventFields.ERROR.toString(), error);
+    gen.writeStringField(EventFields.STATUS.toString(), status);
+    gen.writeEndObject();
+  }
+}

Added: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskFailedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskFailedEvent.java?rev=816052&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskFailedEvent.java (added)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskFailedEvent.java Thu Sep 17 05:04:21 2009
@@ -0,0 +1,146 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.jobhistory;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonToken;
+
+/**
+ * Event to record the failure of a task
+ *
+ */
+public class TaskFailedEvent implements HistoryEvent {
+
+  private EventCategory category;
+  private TaskID taskid;
+  private TaskType taskType;
+  private  long finishTime;
+  private  String error;
+  private TaskAttemptID failedDueToAttempt;
+  private String status;
+
+  enum EventFields { EVENT_CATEGORY,
+                     TASK_ID,
+                     TASK_TYPE,
+                     FINISH_TIME,
+                     ERROR,
+                     STATUS,
+                     FAILED_ATTEMPT_ID }
+
+  /**
+   * Create an event to record task failure
+   * @param id Task ID
+   * @param finishTime Finish time of the task
+   * @param taskType Type of the task
+   * @param error Error String
+   * @param status Status
+   * @param failedDueToAttempt The attempt id due to which the task failed
+   */
+  public TaskFailedEvent(TaskID id, long finishTime, 
+      TaskType taskType, String error, String status,
+      TaskAttemptID failedDueToAttempt) {
+    this.taskid = id;
+    this.error = error;
+    this.finishTime = finishTime;
+    this.taskType = taskType;
+    this.failedDueToAttempt = failedDueToAttempt;
+    this.category = EventCategory.TASK;
+    this.status = status;
+  }
+
+  TaskFailedEvent() {
+  }
+
+  /** Get the task id */
+  public TaskID getTaskId() { return taskid; }
+  /** Get the event category */
+  public EventCategory getEventCategory() { return category; }
+  /** Get the error string */
+  public String getError() { return error; }
+  /** Get the finish time of the attempt */
+  public long getFinishTime() { return finishTime; }
+  /** Get the task type */
+  public TaskType getTaskType() { return taskType; }
+  /** Get the attempt id due to which the task failed */
+  public TaskAttemptID getFailedAttemptID() { return failedDueToAttempt; }
+  /** Get the task status */
+  public String getTaskStatus() { return status; }
+  /** Get the event type */
+  public EventType getEventType() { return EventType.TASK_FAILED; }
+
+  
+  public void readFields(JsonParser jp) throws IOException {
+    if (jp.nextToken() != JsonToken.START_OBJECT) {
+      throw new IOException("Unexpected Token while reading");
+    }
+    
+    while (jp.nextToken() != JsonToken.END_OBJECT) {
+      String fieldName = jp.getCurrentName();
+      jp.nextToken(); // Move to the value
+      switch (Enum.valueOf(EventFields.class, fieldName)) {
+      case EVENT_CATEGORY:
+        category = Enum.valueOf(EventCategory.class, jp.getText());
+        break;
+      case TASK_ID:
+        taskid = TaskID.forName(jp.getText());
+        break;
+      case TASK_TYPE:
+        taskType = TaskType.valueOf(jp.getText());
+        break;
+      case FINISH_TIME:
+        finishTime = jp.getLongValue();
+        break;
+      case ERROR:
+        error = jp.getText();
+        break;
+      case STATUS:
+        status = jp.getText();
+        break;
+      case FAILED_ATTEMPT_ID:
+        failedDueToAttempt = TaskAttemptID.forName(jp.getText());
+        break;
+      default: 
+        throw new IOException("Unrecognized field '"+fieldName+"'!");
+      }
+    }
+  }
+
+  public void writeFields(JsonGenerator gen) throws IOException {
+    gen.writeStartObject();
+    gen.writeStringField(EventFields.EVENT_CATEGORY.toString(),
+        category.toString());
+    gen.writeStringField(EventFields.TASK_ID.toString(), taskid.toString());
+    gen.writeStringField(EventFields.TASK_TYPE.toString(),
+        taskType.toString());
+    gen.writeNumberField(EventFields.FINISH_TIME.toString(), finishTime);
+    gen.writeStringField(EventFields.ERROR.toString(), error);
+    gen.writeStringField(EventFields.STATUS.toString(), status);
+    if (failedDueToAttempt != null) {
+      gen.writeStringField(EventFields.FAILED_ATTEMPT_ID.toString(),
+          failedDueToAttempt.toString());
+    }
+    gen.writeEndObject();
+  }
+}

Added: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskFinishedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskFinishedEvent.java?rev=816052&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskFinishedEvent.java (added)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskFinishedEvent.java Thu Sep 17 05:04:21 2009
@@ -0,0 +1,135 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.jobhistory;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapreduce.Counters;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonToken;
+
+/**
+ * Event to record the successful completion of a task
+ *
+ */
+public class TaskFinishedEvent implements HistoryEvent {
+
+  private EventCategory category;
+  private TaskID taskid;
+  private TaskType taskType;
+  private long finishTime;
+  private String status;
+  private Counters counters;
+  
+  enum EventFields { EVENT_CATEGORY,
+                     TASK_ID,
+                     TASK_TYPE,
+                     FINISH_TIME,
+                     STATUS,
+                     COUNTERS }
+    
+  TaskFinishedEvent() {
+  }
+
+  /**
+   * Create an event to record the successful completion of a task
+   * @param id Task ID
+   * @param finishTime Finish time of the task
+   * @param taskType Type of the task
+   * @param status Status string
+   * @param counters Counters for the task
+   */
+  public TaskFinishedEvent(TaskID id, long finishTime,
+                           TaskType taskType,
+                           String status, Counters counters) {
+    this.taskid = id;
+    this.finishTime = finishTime;
+    this.counters = counters;
+    this.taskType = taskType;
+    this.status = status;
+    this.category = EventCategory.TASK;
+  }
+  
+  /** Get task id */
+  public TaskID getTaskId() { return taskid; }
+  /** Get the task finish time */
+  public long getFinishTime() { return finishTime; }
+  /** Get task counters */
+  public Counters getCounters() { return counters; }
+  /** Get task type */
+  public TaskType getTaskType() { return taskType; }
+  /** Get task status */
+  public String getTaskStatus() { return status; }
+  /** Get event type */
+  public EventType getEventType() {
+    return EventType.TASK_FINISHED;
+  }
+  /** Get Event Category */
+  public EventCategory getEventCategory() { return category; }
+  
+  
+  public void readFields(JsonParser jp) throws IOException {
+    if (jp.nextToken() != JsonToken.START_OBJECT) {
+      throw new IOException("Unexpected token while reading");
+    }
+    
+    while (jp.nextToken() != JsonToken.END_OBJECT) {
+      String fieldname = jp.getCurrentName();
+      jp.nextToken(); // move to value
+      switch (Enum.valueOf(EventFields.class, fieldname)) {
+      case EVENT_CATEGORY:
+        category = Enum.valueOf(EventCategory.class, jp.getText());
+        break;
+      case TASK_ID:
+        taskid = TaskID.forName(jp.getText());
+        break;
+      case TASK_TYPE:
+        taskType = TaskType.valueOf(jp.getText());
+        break;
+      case FINISH_TIME:
+        finishTime = jp.getLongValue();
+        break;
+      case STATUS:
+        status = jp.getText();
+        break;
+      case COUNTERS:
+        counters = EventReader.readCounters(jp);
+        break;
+      default: 
+        throw new IOException("Unrecognized field '"+fieldname+"'!");
+      }
+    }
+  }
+
+  public void writeFields(JsonGenerator gen) throws IOException {
+    gen.writeStartObject();
+    gen.writeStringField(EventFields.EVENT_CATEGORY.toString(),
+        category.toString());
+    gen.writeStringField(EventFields.TASK_ID.toString(), taskid.toString());
+    gen.writeStringField(EventFields.TASK_TYPE.toString(),
+        taskType.toString());
+    gen.writeNumberField(EventFields.FINISH_TIME.toString(), finishTime);
+    gen.writeStringField(EventFields.STATUS.toString(), status);
+    EventWriter.writeCounters(counters, gen);
+    gen.writeEndObject();
+  }
+}

Added: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskStartedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskStartedEvent.java?rev=816052&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskStartedEvent.java (added)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskStartedEvent.java Thu Sep 17 05:04:21 2009
@@ -0,0 +1,123 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.jobhistory;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonToken;
+
+/**
+ * Event to record the start of a task
+ *
+ */
+public class TaskStartedEvent implements HistoryEvent {
+
+  private EventCategory category;
+  private TaskID taskid;
+  private TaskType taskType;
+  private  long startTime;
+  private  String splitLocations;
+
+  enum EventFields { EVENT_CATEGORY,
+                     TASK_ID,
+                     TASK_TYPE,
+                     START_TIME,
+                     SPLIT_LOCATIONS }
+
+  /**
+   * Create an event to record start of a task
+   * @param id Task Id
+   * @param startTime Start time of the task
+   * @param taskType Type of the task
+   * @param splitLocations Split locations, applicable for map tasks
+   */
+  public TaskStartedEvent(TaskID id, long startTime, 
+      TaskType taskType, String splitLocations) {
+    this.taskid = id;
+    this.splitLocations = splitLocations;
+    this.startTime = startTime;
+    this.taskType = taskType;
+    this.category = EventCategory.TASK;
+  }
+
+  TaskStartedEvent() {
+  }
+
+  /** Get the task id */
+  public TaskID getTaskId() { return taskid; }
+  /** Get the event category */
+  public EventCategory getEventCategory() { return category; }
+  /** Get the split locations, applicable for map tasks */
+  public String getSplitLocations() { return splitLocations; }
+  /** Get the start time of the task */
+  public long getStartTime() { return startTime; }
+  /** Get the task type */
+  public TaskType getTaskType() { return taskType; }
+  /** Get the event type */
+  public EventType getEventType() {
+    return EventType.TASK_STARTED;
+  }
+
+  public void readFields(JsonParser jp) throws IOException {
+    if (jp.nextToken() != JsonToken.START_OBJECT) {
+      throw new IOException("Unexpected Token while reading");
+    }
+    
+    while (jp.nextToken() != JsonToken.END_OBJECT) {
+      String fieldName = jp.getCurrentName();
+      jp.nextToken(); // Move to the value
+      switch (Enum.valueOf(EventFields.class, fieldName)) {
+      case EVENT_CATEGORY:
+        category = Enum.valueOf(EventCategory.class, jp.getText());
+        break;
+      case TASK_ID:
+        taskid = TaskID.forName(jp.getText());
+        break;
+      case TASK_TYPE:
+        taskType = TaskType.valueOf(jp.getText());
+        break;
+      case START_TIME:
+        startTime = jp.getLongValue();
+        break;
+      case SPLIT_LOCATIONS:
+        splitLocations = jp.getText();
+        break;
+      default: 
+        throw new IOException("Unrecognized field '"+fieldName+"'!");
+      }
+    }
+  }
+
+  public void writeFields(JsonGenerator gen) throws IOException {
+    gen.writeStartObject();
+    gen.writeStringField(EventFields.EVENT_CATEGORY.toString(),
+        category.toString());
+    gen.writeStringField(EventFields.TASK_ID.toString(), taskid.toString());
+    gen.writeStringField(EventFields.TASK_TYPE.toString(),
+        taskType.toString());
+    gen.writeNumberField(EventFields.START_TIME.toString(), startTime);
+    gen.writeStringField(EventFields.SPLIT_LOCATIONS.toString(),
+        splitLocations);
+    gen.writeEndObject();
+  }
+}

Added: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskUpdatedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskUpdatedEvent.java?rev=816052&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskUpdatedEvent.java (added)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/jobhistory/TaskUpdatedEvent.java Thu Sep 17 05:04:21 2009
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.jobhistory;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapreduce.TaskID;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonToken;
+
+/**
+ * Event to record updates to a task
+ *
+ */
+public class TaskUpdatedEvent implements HistoryEvent {
+
+  private EventCategory category;
+  private TaskID taskid;
+  private  long finishTime;
+
+  enum EventFields { EVENT_CATEGORY,
+                     TASK_ID,
+                     FINISH_TIME }
+
+  /**
+   * Create an event to record task updates
+   * @param id Id of the task
+   * @param finishTime Finish time of the task
+   */
+  public TaskUpdatedEvent(TaskID id, long finishTime) {
+    this.taskid = id;
+    this.finishTime = finishTime;
+    this.category = EventCategory.TASK;
+  }
+
+  TaskUpdatedEvent() {
+  }
+  /** Get the task ID */
+  public TaskID getTaskId() { return taskid; }
+  /** Get the event category */
+  public EventCategory getEventCategory() { return category; }
+  /** Get the task finish time */
+  public long getFinishTime() { return finishTime; }
+  /** Get the event type */
+  public EventType getEventType() {
+    return EventType.TASK_UPDATED;
+  }
+
+  public void readFields(JsonParser jp) throws IOException {
+    if (jp.nextToken() != JsonToken.START_OBJECT) {
+      throw new IOException("Unexpected Token while reading");
+    }
+    
+    while (jp.nextToken() != JsonToken.END_OBJECT) {
+      String fieldName = jp.getCurrentName();
+      jp.nextToken(); // Move to the value
+      switch (Enum.valueOf(EventFields.class, fieldName)) {
+      case EVENT_CATEGORY:
+        category = Enum.valueOf(EventCategory.class, jp.getText());
+        break;
+      case TASK_ID:
+        taskid = TaskID.forName(jp.getText());
+        break;
+      case FINISH_TIME:
+        finishTime = jp.getLongValue();
+        break;
+      default: 
+        throw new IOException("Unrecognized field '"+fieldName+"'!");
+      }
+    }
+  }
+
+  public void writeFields(JsonGenerator gen) throws IOException {
+    gen.writeStartObject();
+    gen.writeStringField(EventFields.EVENT_CATEGORY.toString(),
+        category.toString());
+    gen.writeStringField(EventFields.TASK_ID.toString(), taskid.toString());
+    gen.writeNumberField(EventFields.FINISH_TIME.toString(), finishTime);
+    gen.writeEndObject();
+  }
+}

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/FakeObjectUtilities.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/FakeObjectUtilities.java?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/FakeObjectUtilities.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/FakeObjectUtilities.java Thu Sep 17 05:04:21 2009
@@ -26,9 +26,12 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.TaskStatus.Phase;
 import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.jobhistory.HistoryEvent;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistory;
 
 /** 
  * Utilities used in unit test.
@@ -76,6 +79,7 @@
       this.profile = new JobProfile(jobConf.getUser(), getJobID(), 
           jobFile.toString(), null, jobConf.getJobName(),
           jobConf.getQueueName());
+      this.jobHistory = new FakeJobHistory();
     }
 
     @Override
@@ -233,4 +237,28 @@
     sendHeartBeat(jt, null, true, tracker, (short) 0);
   }
 
+  static class FakeJobHistory extends JobHistory {
+    @Override
+    public void init(JobTracker jt, 
+        JobConf conf,
+        String hostname, 
+        long jobTrackerStartTime) throws IOException { }
+    
+    @Override
+    public void initDone(JobConf conf, FileSystem fs) throws IOException { }
+    
+    @Override
+    public void markCompleted(org.apache.hadoop.mapreduce.JobID id)
+    throws IOException { }
+    
+    @Override
+    public void shutDown() { }
+
+    @Override
+    public void 
+    logEvent(HistoryEvent event, org.apache.hadoop.mapreduce.JobID id) { }
+    
+    @Override
+    public void closeWriter(org.apache.hadoop.mapreduce.JobID id) { }
+  }
 }

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/MiniMRCluster.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/MiniMRCluster.java?rev=816052&r1=816051&r2=816052&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/MiniMRCluster.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/MiniMRCluster.java Thu Sep 17 05:04:21 2009
@@ -64,7 +64,7 @@
   /**
    * An inner class that runs a job tracker.
    */
-  class JobTrackerRunner implements Runnable {
+  public class JobTrackerRunner implements Runnable {
     private JobTracker tracker = null;
     private volatile boolean isActive = true;
     



Mime
View raw message