hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From vino...@apache.org
Subject svn commit: r1294419 - in /hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project: ./ hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/ hadoop-mapreduce-client/hadoop-mapreduce-client-hs/sr...
Date Tue, 28 Feb 2012 00:33:33 GMT
Author: vinodkv
Date: Tue Feb 28 00:33:33 2012
New Revision: 1294419

URL: http://svn.apache.org/viewvc?rev=1294419&view=rev
Log:
MAPREDUCE-3901. Modified JobHistory records in YARN to lazily load job and task reports so
as to improve UI response times. Contributed by Siddarth Seth.
svn merge --ignore-ancestry -c 1294417 ../../trunk

Added:
    hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEntities.java
      - copied unchanged from r1294417, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEntities.java
    hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/resources/job_1329348432655_0001-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist
      - copied unchanged from r1294417, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/resources/job_1329348432655_0001-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist
    hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/resources/job_1329348432655_0001_conf.xml
      - copied unchanged from r1294417, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/resources/job_1329348432655_0001_conf.xml
Modified:
    hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/CHANGES.txt
    hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java
    hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java
    hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java
    hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java
    hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java

Modified: hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/CHANGES.txt?rev=1294419&r1=1294418&r2=1294419&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/CHANGES.txt Tue Feb 28 00:33:33
2012
@@ -28,6 +28,9 @@ Release 0.23.2 - UNRELEASED
 
   OPTIMIZATIONS
 
+    MAPREDUCE-3901. Modified JobHistory records in YARN to lazily load job and
+    task reports so as to improve UI response times. (Siddarth Seth via vinodkv)
+
   BUG FIXES
 
     MAPREDUCE-3918  proc_historyserver no longer in command line arguments for

Modified: hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java?rev=1294419&r1=1294418&r2=1294419&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java
(original)
+++ hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java
Tue Feb 28 00:33:33 2012
@@ -30,6 +30,7 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.util.BuilderUtils;
 import org.apache.hadoop.yarn.util.Records;
 
 public class MRBuilderUtils {
@@ -41,6 +42,11 @@ public class MRBuilderUtils {
     return jobId;
   }
 
+  public static JobId newJobId(long clusterTs, int appIdInt, int id) {
+    ApplicationId appId = BuilderUtils.newApplicationId(clusterTs, appIdInt);
+    return MRBuilderUtils.newJobId(appId, id);
+  }
+
   public static TaskId newTaskId(JobId jobId, int id, TaskType taskType) {
     TaskId taskId = Records.newRecord(TaskId.class);
     taskId.setJobId(jobId);

Modified: hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java?rev=1294419&r1=1294418&r2=1294419&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java
(original)
+++ hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java
Tue Feb 28 00:33:33 2012
@@ -19,13 +19,16 @@
 package org.apache.hadoop.mapreduce.v2.hs;
 
 import java.io.IOException;
-import java.util.ArrayList;
+import java.net.UnknownHostException;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -34,6 +37,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobACLsManager;
 import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.JobACL;
+import org.apache.hadoop.mapreduce.TaskID;
 import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
@@ -54,7 +58,7 @@ import org.apache.hadoop.mapreduce.v2.ut
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AccessControlList;
 import org.apache.hadoop.yarn.YarnException;
-import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.util.Records;
 
 
 /**
@@ -64,50 +68,31 @@ import org.apache.hadoop.yarn.factory.pr
 public class CompletedJob implements org.apache.hadoop.mapreduce.v2.app.job.Job {
   
   static final Log LOG = LogFactory.getLog(CompletedJob.class);
-  private final Counters counters;
   private final Configuration conf;
-  private final JobId jobId;
-  private final List<String> diagnostics = new ArrayList<String>();
-  private final JobReport report;
-  private final Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
-  private final Map<TaskId, Task> mapTasks = new HashMap<TaskId, Task>();
-  private final Map<TaskId, Task> reduceTasks = new HashMap<TaskId, Task>();
-  private final String user;
+  private final JobId jobId; //Can be picked from JobInfo with a conversion.
+  private final String user; //Can be picked up from JobInfo
   private final Path confFile;
-  private JobACLsManager aclsMgr;
-  private List<TaskAttemptCompletionEvent> completionEvents = null;
   private JobInfo jobInfo;
-
+  private JobReport report;
+  AtomicBoolean tasksLoaded = new AtomicBoolean(false);
+  private Lock tasksLock = new ReentrantLock();
+  private Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
+  private Map<TaskId, Task> mapTasks = new HashMap<TaskId, Task>();
+  private Map<TaskId, Task> reduceTasks = new HashMap<TaskId, Task>();
+  private List<TaskAttemptCompletionEvent> completionEvents = null;
+  private JobACLsManager aclsMgr;
+  
+  
   public CompletedJob(Configuration conf, JobId jobId, Path historyFile, 
       boolean loadTasks, String userName, Path confFile, JobACLsManager aclsMgr) 
           throws IOException {
     LOG.info("Loading job: " + jobId + " from file: " + historyFile);
     this.conf = conf;
     this.jobId = jobId;
+    this.user = userName;
     this.confFile = confFile;
     this.aclsMgr = aclsMgr;
-    
     loadFullHistoryData(loadTasks, historyFile);
-    user = userName;
-    counters = jobInfo.getTotalCounters();
-    diagnostics.add(jobInfo.getErrorInfo());
-    report =
-        RecordFactoryProvider.getRecordFactory(null).newRecordInstance(
-            JobReport.class);
-    report.setJobId(jobId);
-    report.setJobState(JobState.valueOf(jobInfo.getJobStatus()));
-    report.setSubmitTime(jobInfo.getSubmitTime());
-    report.setStartTime(jobInfo.getLaunchTime());
-    report.setFinishTime(jobInfo.getFinishTime());
-    report.setJobName(jobInfo.getJobname());
-    report.setUser(jobInfo.getUsername());
-    report.setMapProgress((float) getCompletedMaps() / getTotalMaps());
-    report.setReduceProgress((float) getCompletedReduces() / getTotalReduces());
-    report.setJobFile(confFile.toString());
-    report.setTrackingUrl(JobHistoryUtils.getHistoryUrl(conf, TypeConverter
-        .toYarn(TypeConverter.fromYarn(jobId)).getAppId()));
-    report.setAMInfos(getAMInfos());
-    report.setIsUber(isUber());
   }
 
   @Override
@@ -122,7 +107,7 @@ public class CompletedJob implements org
 
   @Override
   public Counters getAllCounters() {
-    return counters;
+    return jobInfo.getTotalCounters();
   }
 
   @Override
@@ -131,10 +116,36 @@ public class CompletedJob implements org
   }
 
   @Override
-  public JobReport getReport() {
+  public synchronized JobReport getReport() {
+    if (report == null) {
+      constructJobReport();
+    }
     return report;
   }
 
+  private void constructJobReport() {
+    report = Records.newRecord(JobReport.class);
+    report.setJobId(jobId);
+    report.setJobState(JobState.valueOf(jobInfo.getJobStatus()));
+    report.setSubmitTime(jobInfo.getSubmitTime());
+    report.setStartTime(jobInfo.getLaunchTime());
+    report.setFinishTime(jobInfo.getFinishTime());
+    report.setJobName(jobInfo.getJobname());
+    report.setUser(jobInfo.getUsername());
+    report.setMapProgress((float) getCompletedMaps() / getTotalMaps());
+    report.setReduceProgress((float) getCompletedReduces() / getTotalReduces());
+    report.setJobFile(confFile.toString());
+    String historyUrl = "N/A";
+    try {
+      historyUrl = JobHistoryUtils.getHistoryUrl(conf, jobId.getAppId());
+    } catch (UnknownHostException e) {
+      //Ignore.
+    }
+    report.setTrackingUrl(historyUrl);
+    report.setAMInfos(getAMInfos());
+    report.setIsUber(isUber());
+  }
+
   @Override
   public float getProgress() {
     return 1.0f;
@@ -142,16 +153,23 @@ public class CompletedJob implements org
 
   @Override
   public JobState getState() {
-    return report.getJobState();
+    return JobState.valueOf(jobInfo.getJobStatus());
   }
 
   @Override
   public Task getTask(TaskId taskId) {
-    return tasks.get(taskId);
+    if (tasksLoaded.get()) {
+      return tasks.get(taskId);
+    } else {
+      TaskID oldTaskId = TypeConverter.fromYarn(taskId);
+      CompletedTask completedTask =
+          new CompletedTask(taskId, jobInfo.getAllTasks().get(oldTaskId));
+      return completedTask;
+    }
   }
 
   @Override
-  public TaskAttemptCompletionEvent[] getTaskAttemptCompletionEvents(
+  public synchronized TaskAttemptCompletionEvent[] getTaskAttemptCompletionEvents(
       int fromEventId, int maxEvents) {
     if (completionEvents == null) {
       constructTaskAttemptCompletionEvents();
@@ -167,6 +185,7 @@ public class CompletedJob implements org
   }
 
   private void constructTaskAttemptCompletionEvents() {
+    loadAllTasks();
     completionEvents = new LinkedList<TaskAttemptCompletionEvent>();
     List<TaskAttempt> allTaskAttempts = new LinkedList<TaskAttempt>();
     for (TaskId taskId : tasks.keySet()) {
@@ -205,8 +224,8 @@ public class CompletedJob implements org
     int eventId = 0;
     for (TaskAttempt taskAttempt : allTaskAttempts) {
 
-      TaskAttemptCompletionEvent tace = RecordFactoryProvider.getRecordFactory(
-          null).newRecordInstance(TaskAttemptCompletionEvent.class);
+      TaskAttemptCompletionEvent tace =
+          Records.newRecord(TaskAttemptCompletionEvent.class);
 
       int attemptRunTime = -1;
       if (taskAttempt.getLaunchTime() != 0 && taskAttempt.getFinishTime() != 0) {
@@ -237,15 +256,42 @@ public class CompletedJob implements org
 
   @Override
   public Map<TaskId, Task> getTasks() {
+    loadAllTasks();
     return tasks;
   }
 
+  private void loadAllTasks() {
+    if (tasksLoaded.get()) {
+      return;
+    }
+    tasksLock.lock();
+    try {
+      if (tasksLoaded.get()) {
+        return;
+      }
+      for (Map.Entry<TaskID, TaskInfo> entry : jobInfo.getAllTasks().entrySet()) {
+        TaskId yarnTaskID = TypeConverter.toYarn(entry.getKey());
+        TaskInfo taskInfo = entry.getValue();
+        Task task = new CompletedTask(yarnTaskID, taskInfo);
+        tasks.put(yarnTaskID, task);
+        if (task.getType() == TaskType.MAP) {
+          mapTasks.put(task.getID(), task);
+        } else if (task.getType() == TaskType.REDUCE) {
+          reduceTasks.put(task.getID(), task);
+        }
+      }
+      tasksLoaded.set(true);
+    } finally {
+      tasksLock.unlock();
+    }
+  }
+
   //History data is leisurely loaded when task level data is requested
   private synchronized void loadFullHistoryData(boolean loadTasks,
       Path historyFileAbsolute) throws IOException {
     LOG.info("Loading history file: [" + historyFileAbsolute + "]");
-    if (jobInfo != null) {
-      return; //data already loaded
+    if (this.jobInfo != null) {
+      return;
     }
     
     if (historyFileAbsolute != null) {
@@ -254,7 +300,7 @@ public class CompletedJob implements org
         parser =
             new JobHistoryParser(historyFileAbsolute.getFileSystem(conf),
                 historyFileAbsolute);
-        jobInfo = parser.parse();
+        this.jobInfo = parser.parse();
       } catch (IOException e) {
         throw new YarnException("Could not load history file "
             + historyFileAbsolute, e);
@@ -268,27 +314,15 @@ public class CompletedJob implements org
     } else {
       throw new IOException("History file not found");
     }
-    
     if (loadTasks) {
-    for (Map.Entry<org.apache.hadoop.mapreduce.TaskID, TaskInfo> entry : jobInfo
-        .getAllTasks().entrySet()) {
-      TaskId yarnTaskID = TypeConverter.toYarn(entry.getKey());
-      TaskInfo taskInfo = entry.getValue();
-      Task task = new CompletedTask(yarnTaskID, taskInfo);
-      tasks.put(yarnTaskID, task);
-      if (task.getType() == TaskType.MAP) {
-        mapTasks.put(task.getID(), task);
-      } else if (task.getType() == TaskType.REDUCE) {
-        reduceTasks.put(task.getID(), task);
-      }
-    }
-    }
-    LOG.info("TaskInfo loaded");
+      loadAllTasks();
+      LOG.info("TaskInfo loaded");
+    }    
   }
 
   @Override
   public List<String> getDiagnostics() {
-    return diagnostics;
+    return Collections.singletonList(jobInfo.getErrorInfo());
   }
 
   @Override
@@ -318,6 +352,7 @@ public class CompletedJob implements org
 
   @Override
   public Map<TaskId, Task> getTasks(TaskType taskType) {
+    loadAllTasks();
     if (TaskType.MAP.equals(taskType)) {
       return mapTasks;
     } else {//we have only two types of tasks

Modified: hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java?rev=1294419&r1=1294418&r2=1294419&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java
(original)
+++ hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java
Tue Feb 28 00:33:33 2012
@@ -20,10 +20,13 @@ package org.apache.hadoop.mapreduce.v2.h
 
 import java.util.ArrayList;
 import java.util.LinkedHashMap;
+import java.util.LinkedList;
+import java.util.List;
 import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
@@ -35,59 +38,24 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
-import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.util.Records;
 
 public class CompletedTask implements Task {
 
-
-  private final TaskType type;
-  private Counters counters;
-  private final long startTime;
-  private final long finishTime;
-  private TaskState state;
   private final TaskId taskId;
-  private final TaskReport report;
+  private final TaskInfo taskInfo;
+  private TaskReport report;
+  private TaskAttemptId successfulAttempt;
+  private List<String> reportDiagnostics = new LinkedList<String>();
+  private Lock taskAttemptsLock = new ReentrantLock();
+  private AtomicBoolean taskAttemptsLoaded = new AtomicBoolean(false);
   private final Map<TaskAttemptId, TaskAttempt> attempts =
     new LinkedHashMap<TaskAttemptId, TaskAttempt>();
-  
-  private static final Log LOG = LogFactory.getLog(CompletedTask.class);
 
   CompletedTask(TaskId taskId, TaskInfo taskInfo) {
     //TODO JobHistoryParser.handleTaskFailedAttempt should use state from the event.
-    LOG.debug("HandlingTaskId: [" + taskId + "]");
+    this.taskInfo = taskInfo;
     this.taskId = taskId;
-    this.startTime = taskInfo.getStartTime();
-    this.finishTime = taskInfo.getFinishTime();
-    this.type = TypeConverter.toYarn(taskInfo.getTaskType());
-    if (taskInfo.getCounters() != null)
-      this.counters = taskInfo.getCounters();
-    if (taskInfo.getTaskStatus() != null) {
-      this.state = TaskState.valueOf(taskInfo.getTaskStatus());
-    } else {
-      this.state = TaskState.KILLED;
-    }
-    report = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskReport.class);
-    for (TaskAttemptInfo attemptHistory : taskInfo.getAllTaskAttempts()
-        .values()) {
-      CompletedTaskAttempt attempt = new CompletedTaskAttempt(taskId, 
-          attemptHistory);
-      report.addAllDiagnostics(attempt.getDiagnostics()); //TODO TMI?
-      attempts.put(attempt.getID(), attempt);
-      if (attemptHistory.getTaskStatus() != null
-          && attemptHistory.getTaskStatus().equals(
-              TaskState.SUCCEEDED.toString())
-          && report.getSuccessfulAttempt() == null) {
-        report.setSuccessfulAttempt(TypeConverter.toYarn(attemptHistory
-            .getAttemptId()));
-      }
-    }
-    report.setTaskId(taskId);
-    report.setStartTime(startTime);
-    report.setFinishTime(finishTime);
-    report.setTaskState(state);
-    report.setProgress(getProgress());
-    report.setCounters(TypeConverter.toYarn(getCounters()));
-    report.addAllRunningAttempts(new ArrayList<TaskAttemptId>(attempts.keySet()));
   }
 
   @Override
@@ -97,17 +65,19 @@ public class CompletedTask implements Ta
 
   @Override
   public TaskAttempt getAttempt(TaskAttemptId attemptID) {
+    loadAllTaskAttempts();
     return attempts.get(attemptID);
   }
 
   @Override
   public Map<TaskAttemptId, TaskAttempt> getAttempts() {
+    loadAllTaskAttempts();
     return attempts;
   }
 
   @Override
   public Counters getCounters() {
-    return counters;
+    return taskInfo.getCounters();
   }
 
   @Override
@@ -121,13 +91,18 @@ public class CompletedTask implements Ta
   }
 
   @Override
-  public TaskReport getReport() {
+  public synchronized TaskReport getReport() {
+    if (report == null) {
+      constructTaskReport();
+    }
     return report;
   }
+  
 
+  
   @Override
   public TaskType getType() {
-    return type;
+    return TypeConverter.toYarn(taskInfo.getTaskType());
   }
 
   @Override
@@ -137,7 +112,54 @@ public class CompletedTask implements Ta
 
   @Override
   public TaskState getState() {
-    return state;
+    return taskInfo.getTaskStatus() == null ? TaskState.KILLED : TaskState
+        .valueOf(taskInfo.getTaskStatus());
   }
 
+  private void constructTaskReport() {
+    loadAllTaskAttempts();
+    this.report = Records.newRecord(TaskReport.class);
+    report.setTaskId(taskId);
+    report.setStartTime(taskInfo.getStartTime());
+    report.setFinishTime(taskInfo.getFinishTime());
+    report.setTaskState(getState());
+    report.setProgress(getProgress());
+    report.setCounters(TypeConverter.toYarn(getCounters()));
+    if (successfulAttempt != null) {
+      report.setSuccessfulAttempt(successfulAttempt);
+    }
+    report.addAllDiagnostics(reportDiagnostics);
+    report
+        .addAllRunningAttempts(new ArrayList<TaskAttemptId>(attempts.keySet()));
+  }
+
+  private void loadAllTaskAttempts() {
+    if (taskAttemptsLoaded.get()) {
+      return;
+    }
+    taskAttemptsLock.lock();
+    try {
+      if (taskAttemptsLoaded.get()) {
+        return;
+      }
+
+      for (TaskAttemptInfo attemptHistory : taskInfo.getAllTaskAttempts()
+          .values()) {
+        CompletedTaskAttempt attempt =
+            new CompletedTaskAttempt(taskId, attemptHistory);
+        reportDiagnostics.addAll(attempt.getDiagnostics());
+        attempts.put(attempt.getID(), attempt);
+        if (successfulAttempt == null
+            && attemptHistory.getTaskStatus() != null
+            && attemptHistory.getTaskStatus().equals(
+                TaskState.SUCCEEDED.toString())) {
+          successfulAttempt =
+              TypeConverter.toYarn(attemptHistory.getAttemptId());
+        }
+      }
+      taskAttemptsLoaded.set(true);
+    } finally {
+      taskAttemptsLock.unlock();
+    }
+  }
 }

Modified: hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java?rev=1294419&r1=1294418&r2=1294419&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java
(original)
+++ hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java
Tue Feb 28 00:33:33 2012
@@ -30,25 +30,21 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
 import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.util.Records;
 
 public class CompletedTaskAttempt implements TaskAttempt {
 
   private final TaskAttemptInfo attemptInfo;
   private final TaskAttemptId attemptId;
-  private Counters counters;
   private final TaskAttemptState state;
-  private final TaskAttemptReport report;
   private final List<String> diagnostics = new ArrayList<String>();
+  private TaskAttemptReport report;
 
   private String localDiagMessage;
 
   CompletedTaskAttempt(TaskId taskId, TaskAttemptInfo attemptInfo) {
     this.attemptInfo = attemptInfo;
     this.attemptId = TypeConverter.toYarn(attemptInfo.getAttemptId());
-    if (attemptInfo.getCounters() != null) {
-      this.counters = attemptInfo.getCounters();
-    }
     if (attemptInfo.getTaskStatus() != null) {
       this.state = TaskAttemptState.valueOf(attemptInfo.getTaskStatus());
     } else {
@@ -56,37 +52,9 @@ public class CompletedTaskAttempt implem
       localDiagMessage = "Attmpt state missing from History : marked as KILLED";
       diagnostics.add(localDiagMessage);
     }
-    
     if (attemptInfo.getError() != null) {
       diagnostics.add(attemptInfo.getError());
     }
-    
-    report = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptReport.class);
-    
-    report.setTaskAttemptId(attemptId);
-    report.setTaskAttemptState(state);
-    report.setProgress(getProgress());
-    report.setStartTime(attemptInfo.getStartTime());
-    
-    report.setFinishTime(attemptInfo.getFinishTime());
-    report.setShuffleFinishTime(attemptInfo.getShuffleFinishTime());
-    report.setSortFinishTime(attemptInfo.getSortFinishTime());
-    if (localDiagMessage != null) {
-      report.setDiagnosticInfo(attemptInfo.getError() + ", " + localDiagMessage);
-    } else {
-    report.setDiagnosticInfo(attemptInfo.getError());
-    }
-//    report.setPhase(attemptInfo.get); //TODO
-    report.setStateString(attemptInfo.getState());
-    report.setCounters(TypeConverter.toYarn(getCounters()));
-    report.setContainerId(attemptInfo.getContainerId());
-    if (attemptInfo.getHostname() == null) {
-      report.setNodeManagerHost("UNKNOWN");
-    } else {
-      report.setNodeManagerHost(attemptInfo.getHostname());
-      report.setNodeManagerPort(attemptInfo.getPort());
-    }
-    report.setNodeManagerHttpPort(attemptInfo.getHttpPort());
   }
 
   @Override
@@ -111,7 +79,7 @@ public class CompletedTaskAttempt implem
 
   @Override
   public Counters getCounters() {
-    return counters;
+    return attemptInfo.getCounters();
   }
 
   @Override
@@ -125,7 +93,10 @@ public class CompletedTaskAttempt implem
   }
 
   @Override
-  public TaskAttemptReport getReport() {
+  public synchronized TaskAttemptReport getReport() {
+    if (report == null) {
+      constructTaskAttemptReport();
+    }
     return report;
   }
 
@@ -146,26 +117,55 @@ public class CompletedTaskAttempt implem
 
   @Override
   public long getLaunchTime() {
-    return report.getStartTime();
+    return attemptInfo.getStartTime();
   }
 
   @Override
   public long getFinishTime() {
-    return report.getFinishTime();
+    return attemptInfo.getFinishTime();
   }
   
   @Override
   public long getShuffleFinishTime() {
-    return report.getShuffleFinishTime();
+    return attemptInfo.getShuffleFinishTime();
   }
 
   @Override
   public long getSortFinishTime() {
-    return report.getSortFinishTime();
+    return attemptInfo.getSortFinishTime();
   }
 
   @Override
   public int getShufflePort() {
-    throw new UnsupportedOperationException("Not supported yet.");
+    return attemptInfo.getShufflePort();
+  }
+
+  private void constructTaskAttemptReport() {
+    report = Records.newRecord(TaskAttemptReport.class);
+
+    report.setTaskAttemptId(attemptId);
+    report.setTaskAttemptState(state);
+    report.setProgress(getProgress());
+    report.setStartTime(attemptInfo.getStartTime());
+    report.setFinishTime(attemptInfo.getFinishTime());
+    report.setShuffleFinishTime(attemptInfo.getShuffleFinishTime());
+    report.setSortFinishTime(attemptInfo.getSortFinishTime());
+    if (localDiagMessage != null) {
+      report
+          .setDiagnosticInfo(attemptInfo.getError() + ", " + localDiagMessage);
+    } else {
+      report.setDiagnosticInfo(attemptInfo.getError());
+    }
+    // report.setPhase(attemptInfo.get); //TODO
+    report.setStateString(attemptInfo.getState());
+    report.setCounters(TypeConverter.toYarn(getCounters()));
+    report.setContainerId(attemptInfo.getContainerId());
+    if (attemptInfo.getHostname() == null) {
+      report.setNodeManagerHost("UNKNOWN");
+    } else {
+      report.setNodeManagerHost(attemptInfo.getHostname());
+      report.setNodeManagerPort(attemptInfo.getPort());
+    }
+    report.setNodeManagerHttpPort(attemptInfo.getHttpPort());
   }
 }

Modified: hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java?rev=1294419&r1=1294418&r2=1294419&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java
(original)
+++ hadoop/common/branches/branch-0.23.2/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java
Tue Feb 28 00:33:33 2012
@@ -24,6 +24,7 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -117,9 +118,8 @@ public class JobHistory extends Abstract
   
   //Maintains a list of known done subdirectories. Not currently used.
   private final Set<Path> existingDoneSubdirs = new HashSet<Path>();
-  
-  private final SortedMap<JobId, Job> loadedJobCache = 
-    new ConcurrentSkipListMap<JobId, Job>();
+
+  private Map<JobId, Job> loadedJobCache = null;
 
   /**
    * Maintains a mapping between intermediate user directories and the last 
@@ -167,6 +167,7 @@ public class JobHistory extends Abstract
    * .....${DONE_DIR}/VERSION_STRING/YYYY/MM/DD/HH/SERIAL_NUM/jh{index_entries}.jhist
    */
 
+  @SuppressWarnings("serial")
   @Override
   public void init(Configuration conf) throws YarnException {
     LOG.info("JobHistory Init");
@@ -224,6 +225,16 @@ public class JobHistory extends Abstract
             DEFAULT_MOVE_THREAD_INTERVAL);
     numMoveThreads = conf.getInt(JHAdminConfig.MR_HISTORY_MOVE_THREAD_COUNT,
         DEFAULT_MOVE_THREAD_COUNT);
+    
+    loadedJobCache =
+        Collections.synchronizedMap(new LinkedHashMap<JobId, Job>(
+            loadedJobCacheSize + 1, 0.75f, true) {
+          @Override
+          public boolean removeEldestEntry(final Map.Entry<JobId, Job> eldest) {
+            return super.size() > loadedJobCacheSize;
+          }
+        });
+    
     try {
       initExisting();
     } catch (IOException e) {
@@ -465,9 +476,6 @@ public class JobHistory extends Abstract
       LOG.debug("Adding "+job.getID()+" to loaded job cache");
     }
     loadedJobCache.put(job.getID(), job);
-    if (loadedJobCache.size() > loadedJobCacheSize ) {
-      loadedJobCache.remove(loadedJobCache.firstKey());
-    }
   }
   
   
@@ -655,7 +663,7 @@ public class JobHistory extends Abstract
     synchronized(metaInfo) {
       try {
         Job job = new CompletedJob(conf, metaInfo.getJobIndexInfo().getJobId(), 
-            metaInfo.getHistoryFile(), true, metaInfo.getJobIndexInfo().getUser(),
+            metaInfo.getHistoryFile(), false, metaInfo.getJobIndexInfo().getUser(),
             metaInfo.getConfFile(), this.aclsMgr);
         addToLoadedJobCache(job);
         return job;
@@ -938,7 +946,7 @@ public class JobHistory extends Abstract
     LOG.debug("Called getAllJobs()");
     return getAllJobsInternal();
   }
-  
+
   static class MetaInfo {
     private Path historyFile;
     private Path confFile; 



Mime
View raw message