hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject svn commit: r1179484 [2/6] - in /hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project: ./ conf/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/ hadoop-mapreduce-client/hadoop-mapreduce-client-a...
Date Thu, 06 Oct 2011 01:16:57 GMT
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java Thu Oct  6 01:16:48 2011
@@ -22,6 +22,7 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.JobPriority;
 import org.apache.hadoop.mapred.TaskCompletionEvent;
@@ -44,7 +45,7 @@ import org.apache.hadoop.mapreduce.v2.ut
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.records.NodeReport;
 import org.apache.hadoop.yarn.api.records.QueueACL;
 import org.apache.hadoop.yarn.api.records.QueueState;
@@ -55,7 +56,7 @@ import org.apache.hadoop.yarn.factory.pr
 public class TypeConverter {
 
   private static RecordFactory recordFactory;
-  
+
   static {
     recordFactory = RecordFactoryProvider.getRecordFactory(null);
   }
@@ -74,7 +75,7 @@ public class TypeConverter {
   public static JobId toYarn(org.apache.hadoop.mapreduce.JobID id) {
     JobId jobId = recordFactory.newRecordInstance(JobId.class);
     jobId.setId(id.getId()); //currently there is 1-1 mapping between appid and jobid
-    
+
     ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class);
     appId.setId(id.getId());
     appId.setClusterTimestamp(toClusterTimeStamp(id.getJtIdentifier()));
@@ -136,7 +137,7 @@ public class TypeConverter {
     }
     return TaskAttemptState.valueOf(state.toString());
   }
-  
+
   public static Phase toYarn(org.apache.hadoop.mapred.TaskStatus.Phase phase) {
     switch (phase) {
     case STARTING:
@@ -160,7 +161,7 @@ public class TypeConverter {
     TaskCompletionEvent[] oldEvents =
         new TaskCompletionEvent[newEvents.length];
     int i = 0;
-    for (TaskAttemptCompletionEvent newEvent 
+    for (TaskAttemptCompletionEvent newEvent
         : newEvents) {
       oldEvents[i++] = fromYarn(newEvent);
     }
@@ -214,19 +215,19 @@ public class TypeConverter {
     taskAttemptId.setId(id.getId());
     return taskAttemptId;
   }
-  
+
   public static org.apache.hadoop.mapreduce.Counters fromYarn(
       Counters yCntrs) {
     if (yCntrs == null) {
       return null;
     }
-    org.apache.hadoop.mapreduce.Counters counters = 
+    org.apache.hadoop.mapreduce.Counters counters =
       new org.apache.hadoop.mapreduce.Counters();
     for (CounterGroup yGrp : yCntrs.getAllCounterGroups().values()) {
       counters.addGroup(yGrp.getName(), yGrp.getDisplayName());
       for (Counter yCntr : yGrp.getAllCounters().values()) {
-        org.apache.hadoop.mapreduce.Counter c = 
-          counters.findCounter(yGrp.getName(), 
+        org.apache.hadoop.mapreduce.Counter c =
+          counters.findCounter(yGrp.getName(),
               yCntr.getName());
         c.setValue(yCntr.getValue());
       }
@@ -280,29 +281,27 @@ public class TypeConverter {
     return yCntrs;
   }
   
-  public static org.apache.hadoop.mapred.JobStatus fromYarn(
-      JobReport jobreport, String jobFile) {
+  public static JobStatus fromYarn(JobReport jobreport, String trackingUrl) {
     JobPriority jobPriority = JobPriority.NORMAL;
-    org.apache.hadoop.mapred.JobStatus jobStatus =
-        new org.apache.hadoop.mapred.JobStatus(fromYarn(jobreport.getJobId()),
-            jobreport.getSetupProgress(), jobreport.getMapProgress(),
-            jobreport.getReduceProgress(), jobreport.getCleanupProgress(),
-            fromYarn(jobreport.getJobState()),
-            jobPriority, jobreport.getUser(), jobreport.getJobName(),
-            jobFile, jobreport.getTrackingUrl());
+    JobStatus jobStatus = new org.apache.hadoop.mapred.JobStatus(
+        fromYarn(jobreport.getJobId()), jobreport.getSetupProgress(), jobreport
+            .getMapProgress(), jobreport.getReduceProgress(), jobreport
+            .getCleanupProgress(), fromYarn(jobreport.getJobState()),
+        jobPriority, jobreport.getUser(), jobreport.getJobName(), jobreport
+            .getJobFile(), trackingUrl);
     jobStatus.setFailureInfo(jobreport.getDiagnostics());
     return jobStatus;
   }
-  
+
   public static org.apache.hadoop.mapreduce.QueueState fromYarn(
       QueueState state) {
-    org.apache.hadoop.mapreduce.QueueState qState = 
+    org.apache.hadoop.mapreduce.QueueState qState =
       org.apache.hadoop.mapreduce.QueueState.getState(
         state.toString().toLowerCase());
     return qState;
   }
 
-  
+
   public static int fromYarn(JobState state) {
     switch (state) {
     case NEW:
@@ -340,7 +339,7 @@ public class TypeConverter {
     }
     throw new YarnException("Unrecognized task state: " + state);
   }
-  
+
   public static TaskReport fromYarn(org.apache.hadoop.mapreduce.v2.api.records.TaskReport report) {
     String[] diagnostics = null;
     if (report.getDiagnosticsList() != null) {
@@ -352,14 +351,14 @@ public class TypeConverter {
     } else {
       diagnostics = new String[0];
     }
-    
-    TaskReport rep = new TaskReport(fromYarn(report.getTaskId()), 
+
+    TaskReport rep = new TaskReport(fromYarn(report.getTaskId()),
         report.getProgress(), report.getTaskState().toString(),
       diagnostics, fromYarn(report.getTaskState()), report.getStartTime(), report.getFinishTime(),
       fromYarn(report.getCounters()));
-    List<org.apache.hadoop.mapreduce.TaskAttemptID> runningAtts 
+    List<org.apache.hadoop.mapreduce.TaskAttemptID> runningAtts
           = new ArrayList<org.apache.hadoop.mapreduce.TaskAttemptID>();
-    for (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId id 
+    for (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId id
         : report.getRunningAttemptsList()) {
       runningAtts.add(fromYarn(id));
     }
@@ -369,7 +368,7 @@ public class TypeConverter {
     }
     return rep;
   }
-  
+
   public static List<TaskReport> fromYarn(
       List<org.apache.hadoop.mapreduce.v2.api.records.TaskReport> taskReports) {
     List<TaskReport> reports = new ArrayList<TaskReport>();
@@ -378,14 +377,14 @@ public class TypeConverter {
     }
     return reports;
   }
-  
-  public static JobStatus.State fromYarn(ApplicationState state) {
+
+  public static JobStatus.State fromYarn(YarnApplicationState state) {
     switch (state) {
     case SUBMITTED:
       return State.PREP;
     case RUNNING:
       return State.RUNNING;
-    case SUCCEEDED:
+    case FINISHED:
       return State.SUCCEEDED;
     case FAILED:
       return State.FAILED;
@@ -397,7 +396,7 @@ public class TypeConverter {
 
   private static final String TT_NAME_PREFIX = "tracker_";
   public static TaskTrackerInfo fromYarn(NodeReport node) {
-    TaskTrackerInfo taskTracker = 
+    TaskTrackerInfo taskTracker =
       new TaskTrackerInfo(TT_NAME_PREFIX + node.getNodeId().toString());
     return taskTracker;
   }
@@ -418,7 +417,7 @@ public class TypeConverter {
       new JobStatus(
           TypeConverter.fromYarn(application.getApplicationId()),
           0.0f, 0.0f, 0.0f, 0.0f,
-          TypeConverter.fromYarn(application.getState()),
+          TypeConverter.fromYarn(application.getYarnApplicationState()),
           org.apache.hadoop.mapreduce.JobPriority.NORMAL,
           application.getUser(), application.getName(),
           application.getQueue(), jobFile, trackingUrl
@@ -434,7 +433,7 @@ public class TypeConverter {
     List<JobStatus> jobStatuses = new ArrayList<JobStatus>();
     for (ApplicationReport application : applications) {
       // each applicationReport has its own jobFile
-      org.apache.hadoop.mapreduce.JobID jobId = 
+      org.apache.hadoop.mapreduce.JobID jobId =
           TypeConverter.fromYarn(application.getApplicationId());
       jobStatuses.add(TypeConverter.fromYarn(application,
           MRApps.getJobFile(conf, application.getUser(), jobId)));
@@ -442,14 +441,14 @@ public class TypeConverter {
     return jobStatuses.toArray(new JobStatus[jobStatuses.size()]);
   }
 
-  
-  public static QueueInfo fromYarn(org.apache.hadoop.yarn.api.records.QueueInfo 
+
+  public static QueueInfo fromYarn(org.apache.hadoop.yarn.api.records.QueueInfo
       queueInfo, Configuration conf) {
     return new QueueInfo(queueInfo.getQueueName(),queueInfo.toString(),
         fromYarn(queueInfo.getQueueState()), TypeConverter.fromYarnApps(
         queueInfo.getApplications(), conf));
   }
-  
+
   public static QueueInfo[] fromYarnQueueInfo(
       List<org.apache.hadoop.yarn.api.records.QueueInfo> queues,
       Configuration conf) {
@@ -468,9 +467,9 @@ public class TypeConverter {
       for (QueueACL qAcl : aclInfo.getUserAcls()) {
         operations.add(qAcl.toString());
       }
-      
-      QueueAclsInfo acl = 
-        new QueueAclsInfo(aclInfo.getQueueName(), 
+
+      QueueAclsInfo acl =
+        new QueueAclsInfo(aclInfo.getQueueName(),
             operations.toArray(new String[operations.size()]));
       acls.add(acl);
     }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/JobReport.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/JobReport.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/JobReport.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/JobReport.java Thu Oct  6 01:16:48 2011
@@ -31,6 +31,7 @@ public interface JobReport {
   public abstract String getJobName();
   public abstract String getTrackingUrl();
   public abstract String getDiagnostics();
+  public abstract String getJobFile();
 
   public abstract void setJobId(JobId jobId);
   public abstract void setJobState(JobState jobState);
@@ -44,4 +45,5 @@ public interface JobReport {
   public abstract void setJobName(String jobName);
   public abstract void setTrackingUrl(String trackingUrl);
   public abstract void setDiagnostics(String diagnostics);
+  public abstract void setJobFile(String jobFile);
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobReportPBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobReportPBImpl.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobReportPBImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobReportPBImpl.java Thu Oct  6 01:16:48 2011
@@ -229,7 +229,19 @@ public class JobReportPBImpl extends Pro
     maybeInitBuilder();
     builder.setDiagnostics(diagnostics);
   }
+  
+  @Override
+  public String getJobFile() {
+    JobReportProtoOrBuilder p = viaProto ? proto : builder;
+    return p.getJobFile();
+  }
 
+  @Override
+  public void setJobFile(String jobFile) {
+    maybeInitBuilder();
+    builder.setJobFile(jobFile);
+  }
+  
   private JobIdPBImpl convertFromProtoFormat(JobIdProto p) {
     return new JobIdPBImpl(p);
   }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java Thu Oct  6 01:16:48 2011
@@ -55,7 +55,7 @@ public class MRBuilderUtils {
   public static JobReport newJobReport(JobId jobId, String jobName,
       String userName, JobState state, long startTime, long finishTime,
       float setupProgress, float mapProgress, float reduceProgress,
-      float cleanupProgress) {
+      float cleanupProgress, String jobFile) {
     JobReport report = Records.newRecord(JobReport.class);
     report.setJobId(jobId);
     report.setJobName(jobName);
@@ -67,6 +67,7 @@ public class MRBuilderUtils {
     report.setCleanupProgress(cleanupProgress);
     report.setMapProgress(mapProgress);
     report.setReduceProgress(reduceProgress);
+    report.setJobFile(jobFile);
     return report;
   }
 }
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/mr_protos.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/mr_protos.proto?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/mr_protos.proto (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/proto/mr_protos.proto Thu Oct  6 01:16:48 2011
@@ -145,6 +145,7 @@ message JobReportProto {
   optional string jobName = 10;
   optional string trackingUrl = 11;
   optional string diagnostics = 12;
+  optional string jobFile = 13;
 }
 
 enum TaskAttemptCompletionEventStatusProto {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java Thu Oct  6 01:16:48 2011
@@ -21,7 +21,7 @@ import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
 import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationReportPBImpl;
@@ -35,11 +35,11 @@ public class TestTypeConverter {
   @Test
   public void testFromYarn() throws Exception {
     int appStartTime = 612354;
-    ApplicationState state = ApplicationState.RUNNING;
+    YarnApplicationState state = YarnApplicationState.RUNNING;
     ApplicationId applicationId = new ApplicationIdPBImpl();
     ApplicationReportPBImpl applicationReport = new ApplicationReportPBImpl();
     applicationReport.setApplicationId(applicationId);
-    applicationReport.setState(state);
+    applicationReport.setYarnApplicationState(state);
     applicationReport.setStartTime(appStartTime);
     applicationReport.setUser("TestTypeConverter-user");
     JobStatus jobStatus = TypeConverter.fromYarn(applicationReport, "dummy-jobfile");
@@ -56,7 +56,7 @@ public class TestTypeConverter {
     ApplicationReport mockReport = mock(ApplicationReport.class);
     when(mockReport.getTrackingUrl()).thenReturn("dummy-tracking-url");
     when(mockReport.getApplicationId()).thenReturn(mockAppId);
-    when(mockReport.getState()).thenReturn(ApplicationState.KILLED);
+    when(mockReport.getYarnApplicationState()).thenReturn(YarnApplicationState.KILLED);
     when(mockReport.getUser()).thenReturn("dummy-user");
     when(mockReport.getQueue()).thenReturn("dummy-queue");
     String jobFile = "dummy-path/job.xml";

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/avro/Events.avpr
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/avro/Events.avpr?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/avro/Events.avpr (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/avro/Events.avpr Thu Oct  6 01:16:48 2011
@@ -64,7 +64,8 @@
           {"name": "launchTime", "type": "long"},
           {"name": "totalMaps", "type": "int"},
           {"name": "totalReduces", "type": "int"},
-          {"name": "jobStatus", "type": "string"}
+          {"name": "jobStatus", "type": "string"},
+          {"name": "uberized", "type": "boolean"}
       ]
      },
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java Thu Oct  6 01:16:48 2011
@@ -462,8 +462,6 @@ public class Job extends JobContextImpl 
     sb.append(status.getReduceProgress()).append("\n");
     sb.append("Job state: ");
     sb.append(status.getState()).append("\n");
-    sb.append("history URL: ");
-    sb.append(status.getHistoryFile()).append("\n");
     sb.append("retired: ").append(status.isRetired()).append("\n");
     sb.append("reason for failure: ").append(reasonforFailure);
     return sb.toString();

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java Thu Oct  6 01:16:48 2011
@@ -473,4 +473,6 @@ public interface MRJobConfig {
   public static final String MAPREDUCE_V2_CHILD_CLASS = 
       "org.apache.hadoop.mapred.YarnChild";
 
+  public static final String APPLICATION_ATTEMPT_ID =
+      "mapreduce.job.application.attempt.id";
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/OutputCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/OutputCommitter.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/OutputCommitter.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/OutputCommitter.java Thu Oct  6 01:16:48 2011
@@ -143,4 +143,35 @@ public abstract class OutputCommitter {
    */
   public abstract void abortTask(TaskAttemptContext taskContext)
   throws IOException;
+
+  /**
+   * Is task output recovery supported for restarting jobs?
+   * 
+   * If task output recovery is supported, job restart can be done more 
+   * efficiently.
+   * 
+   * @return <code>true</code> if task output recovery is supported,
+   *         <code>false</code> otherwise
+   * @see #recoverTask(TaskAttemptContext)         
+   */
+  public boolean isRecoverySupported() {
+    return false;
+  }
+  
+  /**
+   * Recover the task output. 
+   * 
+   * The retry-count for the job will be passed via the 
+   * {@link MRJobConfig#APPLICATION_ATTEMPT_ID} key in  
+   * {@link TaskAttemptContext#getConfiguration()} for the 
+   * <code>OutputCommitter</code>.
+   * 
+   * If an exception is thrown the task will be attempted again. 
+   * 
+   * @param taskContext Context of the task whose output is being recovered
+   * @throws IOException
+   */
+  public void recoverTask(TaskAttemptContext taskContext)
+  throws IOException
+  {}
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java Thu Oct  6 01:16:48 2011
@@ -302,6 +302,7 @@ public class JobHistoryParser {
     info.launchTime = event.getLaunchTime();
     info.totalMaps = event.getTotalMaps();
     info.totalReduces = event.getTotalReduces();
+    info.uberized = event.getUberized();
   }
 
   private void handleJobInfoChangeEvent(JobInfoChangeEvent event) {
@@ -346,6 +347,7 @@ public class JobHistoryParser {
     Map<JobACL, AccessControlList> jobACLs;
     
     Map<TaskID, TaskInfo> tasksMap;
+    boolean uberized;
     
     /** Create a job info object where job information will be stored
      * after a parse
@@ -373,7 +375,8 @@ public class JobHistoryParser {
       System.out.println("MAP_COUNTERS:" + mapCounters.toString());
       System.out.println("REDUCE_COUNTERS:" + reduceCounters.toString());
       System.out.println("TOTAL_COUNTERS: " + totalCounters.toString());
-      
+      System.out.println("UBERIZED: " + uberized);
+
       for (TaskInfo ti: tasksMap.values()) {
         ti.printAll();
       }
@@ -421,6 +424,8 @@ public class JobHistoryParser {
     /** @return the priority of this job */
     public String getPriority() { return priority.toString(); }
     public Map<JobACL, AccessControlList> getJobACLs() { return jobACLs; }
+    /** @return the uberized status of this job */
+    public boolean getUberized() { return uberized; }
   }
   
   /**

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobInitedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobInitedEvent.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobInitedEvent.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobInitedEvent.java Thu Oct  6 01:16:48 2011
@@ -42,14 +42,16 @@ public class JobInitedEvent implements H
    * @param totalMaps
    * @param totalReduces
    * @param jobStatus
+   * @param uberized True if the job's map and reduce stages were combined
    */
   public JobInitedEvent(JobID id, long launchTime, int totalMaps,
-                        int totalReduces, String jobStatus) {
+                        int totalReduces, String jobStatus, boolean uberized) {
     datum.jobid = new Utf8(id.toString());
     datum.launchTime = launchTime;
     datum.totalMaps = totalMaps;
     datum.totalReduces = totalReduces;
     datum.jobStatus = new Utf8(jobStatus);
+    datum.uberized = uberized;
   }
 
   JobInitedEvent() { }
@@ -67,9 +69,10 @@ public class JobInitedEvent implements H
   public int getTotalReduces() { return datum.totalReduces; }
   /** Get the status */
   public String getStatus() { return datum.jobStatus.toString(); }
- /** Get the event type */
+  /** Get the event type */
   public EventType getEventType() {
     return EventType.JOB_INITED;
   }
-
+  /** Get whether the job's map and reduce stages were combined */
+  public boolean getUberized() { return datum.uberized; }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java Thu Oct  6 01:16:48 2011
@@ -35,7 +35,6 @@ import org.apache.hadoop.mapreduce.MRJob
 import org.apache.hadoop.mapreduce.OutputCommitter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.apache.hadoop.util.StringUtils;
 
 /** An {@link OutputCommitter} that commits files specified 
  * in job output directory i.e. ${mapreduce.output.fileoutputformat.outputdir}. 
@@ -69,9 +68,8 @@ public class FileOutputCommitter extends
       this.outputPath = outputPath;
       outputFileSystem = outputPath.getFileSystem(context.getConfiguration());
       workPath = new Path(outputPath,
-                          (FileOutputCommitter.TEMP_DIR_NAME + Path.SEPARATOR +
-                           "_" + context.getTaskAttemptID().toString()
-                           )).makeQualified(outputFileSystem);
+                          getTaskAttemptBaseDirName(context))
+                          .makeQualified(outputFileSystem);
     }
   }
 
@@ -82,7 +80,8 @@ public class FileOutputCommitter extends
    */
   public void setupJob(JobContext context) throws IOException {
     if (outputPath != null) {
-      Path tmpDir = new Path(outputPath, FileOutputCommitter.TEMP_DIR_NAME);
+      Path tmpDir = new Path(outputPath, getJobAttemptBaseDirName(context) + 
+    		  Path.SEPARATOR + FileOutputCommitter.TEMP_DIR_NAME);
       FileSystem fileSys = tmpDir.getFileSystem(context.getConfiguration());
       if (!fileSys.mkdirs(tmpDir)) {
         LOG.error("Mkdirs failed to create " + tmpDir.toString());
@@ -106,11 +105,27 @@ public class FileOutputCommitter extends
   }
   
   /**
+   * Move all job output to the final place.
    * Delete the temporary directory, including all of the work directories.
    * Create a _SUCCESS file to make it as successful.
    * @param context the job's context
    */
   public void commitJob(JobContext context) throws IOException {
+    //delete the task temp directory from the current jobtempdir
+    Path tmpDir = new Path(outputPath, getJobAttemptBaseDirName(context) +
+        Path.SEPARATOR + FileOutputCommitter.TEMP_DIR_NAME);
+    FileSystem fileSys = tmpDir.getFileSystem(context.getConfiguration());
+    if (fileSys.exists(tmpDir)) {
+      fileSys.delete(tmpDir, true);
+    } else {
+      LOG.warn("Task temp dir could not be deleted " + tmpDir);
+    }
+    
+	  //move the job output to final place
+    Path jobOutputPath = 
+        new Path(outputPath, getJobAttemptBaseDirName(context));
+	  moveJobOutputs(outputFileSystem, outputPath, jobOutputPath);
+	  
     // delete the _temporary folder and create a _done file in the o/p folder
     cleanupJob(context);
     if (shouldMarkOutputDir(context.getConfiguration())) {
@@ -118,6 +133,31 @@ public class FileOutputCommitter extends
     }
   }
 
+  private void moveJobOutputs(FileSystem fs,
+      Path finalOutputDir, Path jobOutput) throws IOException {
+    if (fs.isFile(jobOutput)) {
+      Path finalOutputPath = getFinalPath(finalOutputDir, jobOutput, jobOutput);
+      if (!fs.rename(jobOutput, finalOutputPath)) {
+        if (!fs.delete(finalOutputPath, true)) {
+          throw new IOException("Failed to delete earlier output of job");
+        }
+        if (!fs.rename(jobOutput, finalOutputPath)) {
+          throw new IOException("Failed to save output of job");
+        }
+      }
+      LOG.debug("Moved " + jobOutput + " to " + finalOutputPath);
+    } else if (fs.getFileStatus(jobOutput).isDirectory()) {
+      FileStatus[] paths = fs.listStatus(jobOutput);
+      Path finalOutputPath = getFinalPath(finalOutputDir, jobOutput, jobOutput);
+      fs.mkdirs(finalOutputPath);
+      if (paths != null) {
+        for (FileStatus path : paths) {
+          moveJobOutputs(fs, finalOutputDir, path.getPath());
+        }
+      }
+    }
+  }
+
   @Override
   @Deprecated
   public void cleanupJob(JobContext context) throws IOException {
@@ -163,8 +203,10 @@ public class FileOutputCommitter extends
     if (workPath != null) {
       context.progress();
       if (outputFileSystem.exists(workPath)) {
-        // Move the task outputs to their final place
-        moveTaskOutputs(context, outputFileSystem, outputPath, workPath);
+        // Move the task outputs to the current job attempt output dir
+    	  Path jobOutputPath = 
+    	      new Path(outputPath, getJobAttemptBaseDirName(context));
+        moveTaskOutputs(context, outputFileSystem, jobOutputPath, workPath);
         // Delete the temporary task-specific output directory
         if (!outputFileSystem.delete(workPath, true)) {
           LOG.warn("Failed to delete the temporary output" + 
@@ -271,4 +313,50 @@ public class FileOutputCommitter extends
   public Path getWorkPath() throws IOException {
     return workPath;
   }
+
+  @Override
+  public boolean isRecoverySupported() {
+    return true;
+  }
+  
+  @Override
+  public void recoverTask(TaskAttemptContext context)
+      throws IOException {
+    context.progress();
+    Path jobOutputPath = 
+        new Path(outputPath, getJobAttemptBaseDirName(context));
+    int previousAttempt =         
+        context.getConfiguration().getInt(
+            MRJobConfig.APPLICATION_ATTEMPT_ID, 0) - 1;
+    if (previousAttempt < 0) {
+      throw new IOException ("Cannot recover task output for first attempt...");
+    }
+
+    Path pathToRecover = 
+        new Path(outputPath, getJobAttemptBaseDirName(previousAttempt));
+    if (outputFileSystem.exists(pathToRecover)) {
+      // Move the task outputs to their final place
+      moveJobOutputs(outputFileSystem, jobOutputPath, pathToRecover);
+      LOG.info("Saved output of job to " + jobOutputPath);
+    }
+  }
+
+  protected static String getJobAttemptBaseDirName(JobContext context) {
+    int appAttemptId = 
+        context.getConfiguration().getInt(
+            MRJobConfig.APPLICATION_ATTEMPT_ID, 0);
+    return getJobAttemptBaseDirName(appAttemptId);
+  }
+
+  protected static String getJobAttemptBaseDirName(int appAttemptId) {
+    return FileOutputCommitter.TEMP_DIR_NAME + Path.SEPARATOR + 
+      + appAttemptId;
+  }
+
+  protected static String getTaskAttemptBaseDirName(
+      TaskAttemptContext context) {
+	  return getJobAttemptBaseDirName(context) + Path.SEPARATOR + 
+	  FileOutputCommitter.TEMP_DIR_NAME + Path.SEPARATOR +
+      "_" + context.getTaskAttemptID().toString();
+  }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplitWriter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplitWriter.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplitWriter.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplitWriter.java Thu Oct  6 01:16:48 2011
@@ -114,15 +114,15 @@ public class JobSplitWriter {
     if (array.length != 0) {
       SerializationFactory factory = new SerializationFactory(conf);
       int i = 0;
-      long offset = out.size();
+      long offset = out.getPos();
       for(T split: array) {
-        int prevCount = out.size();
+        long prevCount = out.getPos();
         Text.writeString(out, split.getClass().getName());
         Serializer<T> serializer = 
           factory.getSerializer((Class<T>) split.getClass());
         serializer.open(out);
         serializer.serialize(split);
-        int currCount = out.size();
+        long currCount = out.getPos();
         info[i++] = 
           new JobSplit.SplitMetaInfo( 
               split.getLocations(), offset,
@@ -139,12 +139,12 @@ public class JobSplitWriter {
     SplitMetaInfo[] info = new SplitMetaInfo[splits.length];
     if (splits.length != 0) {
       int i = 0;
-      long offset = out.size();
+      long offset = out.getPos();
       for(org.apache.hadoop.mapred.InputSplit split: splits) {
-        int prevLen = out.size();
+        long prevLen = out.getPos();
         Text.writeString(out, split.getClass().getName());
         split.write(out);
-        int currLen = out.size();
+        long currLen = out.getPos();
         info[i++] = new JobSplit.SplitMetaInfo( 
             split.getLocations(), offset,
             split.getLength());

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml Thu Oct  6 01:16:48 2011
@@ -1174,7 +1174,7 @@
 
 <property>
   <name>yarn.app.mapreduce.am.staging-dir</name>
-  <value>/tmp/hadoop-yarn/${user.name}/staging</value>
+  <value>/tmp/hadoop-yarn/staging</value>
   <description>The staging dir used while submitting jobs.
   </description>
 </property>

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Oct  6 01:16:48 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:1166973-1177128
+/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:1166973-1179483
 /hadoop/core/branches/branch-0.19/mapred/src/java/mapred-default.xml:713112
 /hadoop/core/trunk/src/mapred/mapred-default.xml:776175-785643

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java Thu Oct  6 01:16:48 2011
@@ -48,6 +48,7 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AccessControlList;
 import org.apache.hadoop.yarn.YarnException;
@@ -96,9 +97,11 @@ public class CompletedJob implements org
     report.setFinishTime(jobInfo.getFinishTime());
     report.setJobName(jobInfo.getJobname());
     report.setUser(jobInfo.getUsername());
-    //TODO Possibly populate job progress. Never used.
-    //report.setMapProgress(progress) 
-    //report.setReduceProgress(progress)
+    report.setMapProgress((float) getCompletedMaps() / getTotalMaps());
+    report.setReduceProgress((float) getCompletedReduces() / getTotalReduces());
+    report.setJobFile(confFile.toString());
+    report.setTrackingUrl(JobHistoryUtils.getHistoryUrl(conf, TypeConverter
+        .toYarn(TypeConverter.fromYarn(jobId)).getAppId()));
   }
 
   @Override
@@ -287,8 +290,7 @@ public class CompletedJob implements org
 
   @Override
   public boolean isUber() {
-    LOG.warn("isUber is not yet implemented");
-    return false;
+    return jobInfo.getUberized();
   }
 
   @Override

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java Thu Oct  6 01:16:48 2011
@@ -101,12 +101,9 @@ public class HistoryClientService extend
   }
 
   public void start() {
-    YarnRPC rpc = YarnRPC.create(getConfig());
-    Configuration conf = new Configuration(getConfig());
-    conf.setClass(
-        YarnConfiguration.YARN_SECURITY_INFO,
-        ClientHSSecurityInfo.class, SecurityInfo.class);
-    initializeWebApp(getConfig());
+    Configuration conf = getConfig();
+    YarnRPC rpc = YarnRPC.create(conf);
+    initializeWebApp(conf);
     String serviceAddr = conf.get(JHAdminConfig.MR_HISTORY_ADDRESS,
         JHAdminConfig.DEFAULT_MR_HISTORY_ADDRESS);
     InetSocketAddress address = NetUtils.createSocketAddr(serviceAddr);

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsController.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsController.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsController.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsController.java Thu Oct  6 01:16:48 2011
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.mapreduce.v2.hs.webapp;
 
+import java.io.IOException;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.v2.app.webapp.App;
 import org.apache.hadoop.mapreduce.v2.app.webapp.AppController;
@@ -57,7 +59,7 @@ public class HsController extends AppCon
    * @see org.apache.hadoop.mapreduce.v2.app.webapp.AppController#countersPage()
    */
   @Override
-  protected Class<? extends View> countersPage() {
+  public Class<? extends View> countersPage() {
     return HsCountersPage.class;
   }
   
@@ -108,7 +110,16 @@ public class HsController extends AppCon
   public void jobCounters() {
     super.jobCounters();
   }
-
+  
+  /*
+   * (non-Javadoc)
+   * @see org.apache.hadoop.mapreduce.v2.app.webapp.AppController#taskCounters()
+   */
+  @Override
+  public void taskCounters() {
+    super.taskCounters();
+  }
+  
   /*
    * (non-Javadoc)
    * @see org.apache.hadoop.mapreduce.v2.app.webapp.AppController#tasks()
@@ -157,4 +168,31 @@ public class HsController extends AppCon
   public void about() {
     render(aboutPage());
   }
+  
+  /*
+   * (non-Javadoc)
+   * @see org.apache.hadoop.mapreduce.v2.app.webapp.AppController#singleCounterPage()
+   */
+  @Override
+  protected Class<? extends View> singleCounterPage() {
+    return HsSingleCounterPage.class;
+  }
+  
+  /*
+   * (non-Javadoc)
+   * @see org.apache.hadoop.mapreduce.v2.app.webapp.AppController#singleJobCounter()
+   */
+  @Override
+  public void singleJobCounter() throws IOException{
+    super.singleJobCounter();
+  }
+  
+  /*
+   * (non-Javadoc)
+   * @see org.apache.hadoop.mapreduce.v2.app.webapp.AppController#singleTaskCounter()
+   */
+  @Override
+  public void singleTaskCounter() throws IOException{
+    super.singleTaskCounter();
+  }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsCountersPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsCountersPage.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsCountersPage.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsCountersPage.java Thu Oct  6 01:16:48 2011
@@ -18,11 +18,12 @@
 
 package org.apache.hadoop.mapreduce.v2.hs.webapp;
 
+import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_ID;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.*;
+
 import org.apache.hadoop.mapreduce.v2.app.webapp.CountersBlock;
 import org.apache.hadoop.yarn.webapp.SubView;
 
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.*;
-
 /**
  * Render the counters page
  */
@@ -34,7 +35,12 @@ public class HsCountersPage extends HsVi
    */
   @Override protected void preHead(Page.HTML<_> html) {
     commonPreHead(html);
-    set(initID(ACCORDION, "nav"), "{autoHeight:false, active:1}");
+    String tid = $(TASK_ID);
+    String activeNav = "2";
+    if(tid == null || tid.isEmpty()) {
+      activeNav = "1";
+    }
+    set(initID(ACCORDION, "nav"), "{autoHeight:false, active:"+activeNav+"}");
     set(DATATABLES_SELECTOR, "#counters .dt-counters");
     set(initSelector(DATATABLES),
         "{bJQueryUI:true, sDom:'t', iDisplayLength:-1}");
@@ -47,9 +53,9 @@ public class HsCountersPage extends HsVi
   @Override protected void postHead(Page.HTML<_> html) {
     html.
       style("#counters, .dt-counters { table-layout: fixed }",
-            "#counters th { overflow: hidden; vertical-align: center }",
+            "#counters th { overflow: hidden; vertical-align: middle }",
             "#counters .dataTables_wrapper { min-height: 1em }",
-            "#counters .group { width: 10em }",
+            "#counters .group { width: 15em }",
             "#counters .name { width: 30em }");
   }
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsNavBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsNavBlock.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsNavBlock.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsNavBlock.java Thu Oct  6 01:16:48 2011
@@ -55,6 +55,14 @@ public class HsNavBlock extends HtmlBloc
           li().a(url("conf", jobid), "Configuration")._().
           li().a(url("tasks", jobid, "m"), "Map tasks")._().
           li().a(url("tasks", jobid, "r"), "Reduce tasks")._()._();
+      if (app.getTask() != null) {
+        String taskid = MRApps.toString(app.getTask().getID());
+        nav.
+          h3("Task").
+          ul().
+            li().a(url("task", taskid), "Task Overview")._().
+            li().a(url("taskcounters", taskid), "Counters")._()._();
+      }
     }
     nav.
       h3("Tools").

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java Thu Oct  6 01:16:48 2011
@@ -250,7 +250,7 @@ public class HsTaskPage extends HsView {
   @Override protected void preHead(Page.HTML<_> html) {
     commonPreHead(html);
     //override the nav config from commonPReHead
-    set(initID(ACCORDION, "nav"), "{autoHeight:false, active:1}");
+    set(initID(ACCORDION, "nav"), "{autoHeight:false, active:2}");
     //Set up the java script and CSS for the attempts table
     set(DATATABLES_ID, "attempts");
     set(initID(DATATABLES, "attempts"), attemptsTableInit());

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java Thu Oct  6 01:16:48 2011
@@ -41,10 +41,15 @@ public class HsWebApp extends WebApp imp
     route(pajoin("/job", JOB_ID), HsController.class, "job");
     route(pajoin("/conf", JOB_ID), HsController.class, "conf");
     route(pajoin("/jobcounters", JOB_ID), HsController.class, "jobCounters");
+    route(pajoin("/singlejobcounter",JOB_ID, COUNTER_GROUP, COUNTER_NAME),
+        HsController.class, "singleJobCounter");
     route(pajoin("/tasks", JOB_ID, TASK_TYPE), HsController.class, "tasks");
     route(pajoin("/attempts", JOB_ID, TASK_TYPE, ATTEMPT_STATE),
         HsController.class, "attempts");
     route(pajoin("/task", TASK_ID), HsController.class, "task");
+    route(pajoin("/taskcounters", TASK_ID), HsController.class, "taskCounters");
+    route(pajoin("/singletaskcounter",TASK_ID, COUNTER_GROUP, COUNTER_NAME),
+        HsController.class, "singleTaskCounter");
     route("/about", HsController.class, "about");
   }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java Thu Oct  6 01:16:48 2011
@@ -95,6 +95,8 @@ public class TestJobHistoryParsing {
         2, jobInfo.getFinishedMaps());
     Assert.assertEquals("incorrect finishedReduces ",
         1, jobInfo.getFinishedReduces());
+    Assert.assertEquals("incorrect uberized ",
+        job.isUber(), jobInfo.getUberized());
     int totalTasks = jobInfo.getAllTasks().size();
     Assert.assertEquals("total number of tasks is incorrect  ", 3, totalTasks);
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java Thu Oct  6 01:16:48 2011
@@ -26,17 +26,13 @@ import static org.junit.Assert.assertEqu
 
 import java.util.HashMap;
 import java.util.Map;
-import java.util.Map.Entry;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
-import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.MockJobs;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
-import org.apache.hadoop.mapreduce.v2.app.job.Task;
-import org.apache.hadoop.mapreduce.v2.app.webapp.AMParams;
 import org.apache.hadoop.mapreduce.v2.app.webapp.TestAMWebApp;
 import org.apache.hadoop.yarn.Clock;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
@@ -92,6 +88,7 @@ public class TestHSWebApp {
       return jobs; // OK
     }
 
+    @SuppressWarnings("rawtypes")
     @Override
     public EventHandler getEventHandler() {
       return null;
@@ -171,4 +168,16 @@ public class TestHSWebApp {
     WebAppTests.testPage(HsConfPage.class, AppContext.class,
                          new TestAppContext());
   }
+  
+  @Test public void testAboutView() {
+    LOG.info("HsAboutPage");
+    WebAppTests.testPage(HsAboutPage.class, AppContext.class,
+                         new TestAppContext());
+  }
+  
+  @Test public void testSingleCounterView() {
+    LOG.info("HsSingleCounterPage");
+    WebAppTests.testPage(HsSingleCounterPage.class, AppContext.class,
+                         new TestAppContext());
+  }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java Thu Oct  6 01:16:48 2011
@@ -80,17 +80,14 @@ public class ClientCache {
       return null;
     }
     LOG.info("Connecting to HistoryServer at: " + serviceAddr);
-    final Configuration myConf = new Configuration(conf);
-    myConf.setClass(YarnConfiguration.YARN_SECURITY_INFO,
-        ClientHSSecurityInfo.class, SecurityInfo.class);
-    final YarnRPC rpc = YarnRPC.create(myConf);
+    final YarnRPC rpc = YarnRPC.create(conf);
     LOG.info("Connected to HistoryServer at: " + serviceAddr);
     UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
     return currentUser.doAs(new PrivilegedAction<MRClientProtocol>() {
       @Override
       public MRClientProtocol run() {
         return (MRClientProtocol) rpc.getProxy(MRClientProtocol.class,
-            NetUtils.createSocketAddr(serviceAddr), myConf);
+            NetUtils.createSocketAddr(serviceAddr), conf);
       }
     });
   }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java Thu Oct  6 01:16:48 2011
@@ -21,10 +21,12 @@ package org.apache.hadoop.mapred;
 import java.io.IOException;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
+import java.net.InetSocketAddress;
 import java.security.PrivilegedAction;
 import java.util.HashMap;
 import java.util.List;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -60,7 +62,7 @@ import org.apache.hadoop.security.token.
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
@@ -86,8 +88,9 @@ public class ClientServiceDelegate {
   private MRClientProtocol realProxy = null;
   private RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
   private static String UNKNOWN_USER = "Unknown User";
+  private String trackingUrl;
 
-  public ClientServiceDelegate(Configuration conf, ResourceMgrDelegate rm, 
+  public ClientServiceDelegate(Configuration conf, ResourceMgrDelegate rm,
       JobID jobId, MRClientProtocol historyServerProxy) {
     this.conf = new Configuration(conf); // Cloning for modifying.
     // For faster redirects from AM to HS.
@@ -101,7 +104,7 @@ public class ClientServiceDelegate {
 
   // Get the instance of the NotRunningJob corresponding to the specified
   // user and state
-  private NotRunningJob getNotRunningJob(ApplicationReport applicationReport, 
+  private NotRunningJob getNotRunningJob(ApplicationReport applicationReport,
       JobState state) {
     synchronized (notRunningJobs) {
       HashMap<String, NotRunningJob> map = notRunningJobs.get(state);
@@ -109,8 +112,8 @@ public class ClientServiceDelegate {
         map = new HashMap<String, NotRunningJob>();
         notRunningJobs.put(state, map);
       }
-      String user = 
-          (applicationReport == null) ? 
+      String user =
+          (applicationReport == null) ?
               UNKNOWN_USER : applicationReport.getUser();
       NotRunningJob notRunningJob = map.get(user);
       if (notRunningJob == null) {
@@ -129,8 +132,11 @@ public class ClientServiceDelegate {
     // Possibly allow nulls through the PB tunnel, otherwise deal with an exception
     // and redirect to the history server.
     ApplicationReport application = rm.getApplicationReport(appId);
+    if (application != null) {
+      trackingUrl = application.getTrackingUrl();
+    }
     String serviceAddr = null;
-    while (application == null || ApplicationState.RUNNING.equals(application.getState())) {
+    while (application == null || YarnApplicationState.RUNNING.equals(application.getYarnApplicationState())) {
       if (application == null) {
         LOG.info("Could not get Job info from RM for job " + jobId
             + ". Redirecting to job history server.");
@@ -140,8 +146,8 @@ public class ClientServiceDelegate {
         if (application.getHost() == null || "".equals(application.getHost())) {
           LOG.debug("AM not assigned to Job. Waiting to get the AM ...");
           Thread.sleep(2000);
-   
-          LOG.debug("Application state is " + application.getState());
+
+          LOG.debug("Application state is " + application.getYarnApplicationState());
           application = rm.getApplicationReport(appId);
           continue;
         }
@@ -151,8 +157,11 @@ public class ClientServiceDelegate {
           Token<ApplicationTokenIdentifier> clientToken =
             new Token<ApplicationTokenIdentifier>();
           clientToken.decodeFromUrlString(clientTokenEncoded);
-          clientToken.setService(new Text(application.getHost() + ":"
-              + application.getRpcPort()));
+          // RPC layer client expects ip:port as service for tokens
+          InetSocketAddress addr = NetUtils.createSocketAddr(application
+              .getHost(), application.getRpcPort());
+          clientToken.setService(new Text(addr.getAddress().getHostAddress()
+              + ":" + addr.getPort()));
           UserGroupInformation.getCurrentUser().addToken(clientToken);
         }
         LOG.info("Tracking Url of JOB is " + application.getTrackingUrl());
@@ -163,7 +172,7 @@ public class ClientServiceDelegate {
         //possibly the AM has crashed
         //there may be some time before AM is restarted
         //keep retrying by getting the address from RM
-        LOG.info("Could not connect to " + serviceAddr + 
+        LOG.info("Could not connect to " + serviceAddr +
         ". Waiting for getting the latest AM address...");
         try {
           Thread.sleep(2000);
@@ -184,35 +193,36 @@ public class ClientServiceDelegate {
     }
 
     /** we just want to return if its allocating, so that we don't
-     * block on it. This is to be able to return job status 
+     * block on it. This is to be able to return job status
      * on an allocating Application.
      */
-    
+
     String user = application.getUser();
     if (user == null) {
       throw RPCUtil.getRemoteException("User is not set in the application report");
     }
-    if (application.getState() == ApplicationState.NEW ||
-        application.getState() == ApplicationState.SUBMITTED) {
+    if (application.getYarnApplicationState() == YarnApplicationState.NEW ||
+        application.getYarnApplicationState() == YarnApplicationState.SUBMITTED) {
       realProxy = null;
       return getNotRunningJob(application, JobState.NEW);
     }
-    
-    if (application.getState() == ApplicationState.FAILED) {
+
+    if (application.getYarnApplicationState() == YarnApplicationState.FAILED) {
       realProxy = null;
       return getNotRunningJob(application, JobState.FAILED);
     }
-    
-    if (application.getState() == ApplicationState.KILLED) {
+
+    if (application.getYarnApplicationState() == YarnApplicationState.KILLED) {
       realProxy = null;
       return getNotRunningJob(application, JobState.KILLED);
     }
-    
-    //History server can serve a job only if application 
+
+    //History server can serve a job only if application
     //succeeded.
-    if (application.getState() == ApplicationState.SUCCEEDED) {
-      LOG.info("Application state is completed. " +
-          "Redirecting to job history server");
+    if (application.getYarnApplicationState() == YarnApplicationState.FINISHED) {
+      LOG.info("Application state is completed. FinalApplicationStatus="
+          + application.getFinalApplicationStatus().toString()
+          + ". Redirecting to job history server");
       realProxy = checkAndGetHSProxy(application, JobState.SUCCEEDED);
     }
     return realProxy;
@@ -233,19 +243,15 @@ public class ClientServiceDelegate {
     realProxy = currentUser.doAs(new PrivilegedAction<MRClientProtocol>() {
       @Override
       public MRClientProtocol run() {
-        Configuration myConf = new Configuration(conf);
-        myConf.setClass(
-            YarnConfiguration.YARN_SECURITY_INFO,
-            SchedulerSecurityInfo.class, SecurityInfo.class); 
-        YarnRPC rpc = YarnRPC.create(myConf);
+        YarnRPC rpc = YarnRPC.create(conf);
         return (MRClientProtocol) rpc.getProxy(MRClientProtocol.class,
-            NetUtils.createSocketAddr(serviceAddr), myConf);
+            NetUtils.createSocketAddr(serviceAddr), conf);
       }
     });
     LOG.trace("Connected to ApplicationMaster at: " + serviceAddr);
   }
 
-  private synchronized Object invoke(String method, Class argClass, 
+  private synchronized Object invoke(String method, Class argClass,
       Object args) throws YarnRemoteException {
     Method methodOb = null;
     try {
@@ -284,10 +290,10 @@ public class ClientServiceDelegate {
     org.apache.hadoop.mapreduce.v2.api.records.JobId jobID = TypeConverter.toYarn(arg0);
       GetCountersRequest request = recordFactory.newRecordInstance(GetCountersRequest.class);
       request.setJobId(jobID);
-      Counters cnt = ((GetCountersResponse) 
+      Counters cnt = ((GetCountersResponse)
           invoke("getCounters", GetCountersRequest.class, request)).getCounters();
       return TypeConverter.fromYarn(cnt);
-      
+
   }
 
   public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1, int arg2)
@@ -299,7 +305,7 @@ public class ClientServiceDelegate {
     request.setJobId(jobID);
     request.setFromEventId(arg1);
     request.setMaxEvents(arg2);
-    List<org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent> list = 
+    List<org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent> list =
       ((GetTaskAttemptCompletionEventsResponse) invoke(
         "getTaskAttemptCompletionEvents", GetTaskAttemptCompletionEventsRequest.class, request)).
         getCompletionEventList();
@@ -327,38 +333,43 @@ public class ClientServiceDelegate {
   }
   
   public JobStatus getJobStatus(JobID oldJobID) throws YarnRemoteException {
-    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId = 
+    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId =
       TypeConverter.toYarn(oldJobID);
-    GetJobReportRequest request = 
+    GetJobReportRequest request =
         recordFactory.newRecordInstance(GetJobReportRequest.class);
     request.setJobId(jobId);
-    JobReport report = ((GetJobReportResponse) invoke("getJobReport", 
+    JobReport report = ((GetJobReportResponse) invoke("getJobReport",
         GetJobReportRequest.class, request)).getJobReport();
-    String jobFile = MRApps.getJobFile(conf, report.getUser(), oldJobID); 
-
-    return TypeConverter.fromYarn(report, jobFile);
+    if (StringUtils.isEmpty(report.getJobFile())) {
+      String jobFile = MRApps.getJobFile(conf, report.getUser(), oldJobID);
+      report.setJobFile(jobFile);
+    }
+    String historyTrackingUrl = report.getTrackingUrl();
+    return TypeConverter.fromYarn(report, "http://"
+        + (StringUtils.isNotEmpty(historyTrackingUrl) ? historyTrackingUrl
+            : trackingUrl));
   }
 
   public org.apache.hadoop.mapreduce.TaskReport[] getTaskReports(JobID oldJobID, TaskType taskType)
        throws YarnRemoteException, YarnRemoteException {
-    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId = 
+    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId =
       TypeConverter.toYarn(oldJobID);
-    GetTaskReportsRequest request = 
+    GetTaskReportsRequest request =
         recordFactory.newRecordInstance(GetTaskReportsRequest.class);
     request.setJobId(jobId);
     request.setTaskType(TypeConverter.toYarn(taskType));
-    
-    List<org.apache.hadoop.mapreduce.v2.api.records.TaskReport> taskReports = 
-      ((GetTaskReportsResponse) invoke("getTaskReports", GetTaskReportsRequest.class, 
+
+    List<org.apache.hadoop.mapreduce.v2.api.records.TaskReport> taskReports =
+      ((GetTaskReportsResponse) invoke("getTaskReports", GetTaskReportsRequest.class,
           request)).getTaskReportList();
-    
+
     return TypeConverter.fromYarn
     (taskReports).toArray(new org.apache.hadoop.mapreduce.TaskReport[0]);
   }
 
   public boolean killTask(TaskAttemptID taskAttemptID, boolean fail)
        throws YarnRemoteException {
-    org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID 
+    org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID
       = TypeConverter.toYarn(taskAttemptID);
     if (fail) {
       FailTaskAttemptRequest failRequest = recordFactory.newRecordInstance(FailTaskAttemptRequest.class);
@@ -371,10 +382,10 @@ public class ClientServiceDelegate {
     }
     return true;
   }
-  
+
   public boolean killJob(JobID oldJobID)
        throws YarnRemoteException {
-    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId 
+    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId
     = TypeConverter.toYarn(oldJobID);
     KillJobRequest killRequest = recordFactory.newRecordInstance(KillJobRequest.class);
     killRequest.setJobId(jobId);
@@ -382,5 +393,5 @@ public class ClientServiceDelegate {
     return true;
   }
 
-    
+
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java Thu Oct  6 01:16:48 2011
@@ -22,8 +22,6 @@ import java.util.ArrayList;
 import java.util.HashMap;
 
 import org.apache.commons.lang.NotImplementedException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
@@ -55,40 +53,36 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.util.BuilderUtils;
 
 public class NotRunningJob implements MRClientProtocol {
 
-  private static final Log LOG = LogFactory.getLog(NotRunningJob.class);
-  
-  private RecordFactory recordFactory = 
+  private RecordFactory recordFactory =
     RecordFactoryProvider.getRecordFactory(null);
-  
+
   private final JobState jobState;
   private final ApplicationReport applicationReport;
-  
-  
+
+
   private ApplicationReport getUnknownApplicationReport() {
-    ApplicationReport unknown = 
-        recordFactory.newRecordInstance(ApplicationReport.class);
-    unknown.setUser("N/A");
-    unknown.setHost("N/A");
-    unknown.setName("N/A");
-    unknown.setQueue("N/A");
-    unknown.setStartTime(0);
-    unknown.setFinishTime(0);
-    unknown.setTrackingUrl("N/A");
-    unknown.setDiagnostics("N/A");
-    LOG.info("getUnknownApplicationReport");
-    return unknown;
+    ApplicationId unknownAppId = recordFactory.newRecordInstance(ApplicationId.class);
+
+    // Setting AppState to NEW and finalStatus to UNDEFINED as they are never used 
+    // for a non running job
+    return BuilderUtils.newApplicationReport(unknownAppId, "N/A", "N/A", "N/A", "N/A", 0, "", 
+        YarnApplicationState.NEW, "N/A", "N/A", 0, 0, FinalApplicationStatus.UNDEFINED);    
   }
-  
+
   NotRunningJob(ApplicationReport applicationReport, JobState jobState) {
-    this.applicationReport = 
-        (applicationReport ==  null) ? 
+    this.applicationReport =
+        (applicationReport ==  null) ?
             getUnknownApplicationReport() : applicationReport;
     this.jobState = jobState;
   }
@@ -96,7 +90,7 @@ public class NotRunningJob implements MR
   @Override
   public FailTaskAttemptResponse failTaskAttempt(
       FailTaskAttemptRequest request) throws YarnRemoteException {
-    FailTaskAttemptResponse resp = 
+    FailTaskAttemptResponse resp =
       recordFactory.newRecordInstance(FailTaskAttemptResponse.class);
     return resp;
   }
@@ -104,7 +98,7 @@ public class NotRunningJob implements MR
   @Override
   public GetCountersResponse getCounters(GetCountersRequest request)
       throws YarnRemoteException {
-    GetCountersResponse resp = 
+    GetCountersResponse resp =
       recordFactory.newRecordInstance(GetCountersResponse.class);
     Counters counters = recordFactory.newRecordInstance(Counters.class);
     counters.addAllCounterGroups(new HashMap<String, CounterGroup>());
@@ -115,7 +109,7 @@ public class NotRunningJob implements MR
   @Override
   public GetDiagnosticsResponse getDiagnostics(GetDiagnosticsRequest request)
       throws YarnRemoteException {
-    GetDiagnosticsResponse resp = 
+    GetDiagnosticsResponse resp =
       recordFactory.newRecordInstance(GetDiagnosticsResponse.class);
     resp.addDiagnostics("");
     return resp;
@@ -135,7 +129,7 @@ public class NotRunningJob implements MR
     jobReport.setTrackingUrl(applicationReport.getTrackingUrl());
     jobReport.setFinishTime(applicationReport.getFinishTime());
 
-    GetJobReportResponse resp = 
+    GetJobReportResponse resp =
         recordFactory.newRecordInstance(GetJobReportResponse.class);
     resp.setJobReport(jobReport);
     return resp;
@@ -145,7 +139,7 @@ public class NotRunningJob implements MR
   public GetTaskAttemptCompletionEventsResponse getTaskAttemptCompletionEvents(
       GetTaskAttemptCompletionEventsRequest request)
       throws YarnRemoteException {
-    GetTaskAttemptCompletionEventsResponse resp = 
+    GetTaskAttemptCompletionEventsResponse resp =
       recordFactory.newRecordInstance(GetTaskAttemptCompletionEventsResponse.class);
     resp.addAllCompletionEvents(new ArrayList<TaskAttemptCompletionEvent>());
     return resp;
@@ -161,7 +155,7 @@ public class NotRunningJob implements MR
   @Override
   public GetTaskReportResponse getTaskReport(GetTaskReportRequest request)
       throws YarnRemoteException {
-    GetTaskReportResponse resp = 
+    GetTaskReportResponse resp =
       recordFactory.newRecordInstance(GetTaskReportResponse.class);
     TaskReport report = recordFactory.newRecordInstance(TaskReport.class);
     report.setTaskId(request.getTaskId());
@@ -176,7 +170,7 @@ public class NotRunningJob implements MR
   @Override
   public GetTaskReportsResponse getTaskReports(GetTaskReportsRequest request)
       throws YarnRemoteException {
-    GetTaskReportsResponse resp = 
+    GetTaskReportsResponse resp =
       recordFactory.newRecordInstance(GetTaskReportsResponse.class);
     resp.addAllTaskReports(new ArrayList<TaskReport>());
     return resp;
@@ -185,7 +179,7 @@ public class NotRunningJob implements MR
   @Override
   public KillJobResponse killJob(KillJobRequest request)
       throws YarnRemoteException {
-    KillJobResponse resp = 
+    KillJobResponse resp =
       recordFactory.newRecordInstance(KillJobResponse.class);
     return resp;
   }
@@ -193,7 +187,7 @@ public class NotRunningJob implements MR
   @Override
   public KillTaskResponse killTask(KillTaskRequest request)
       throws YarnRemoteException {
-    KillTaskResponse resp = 
+    KillTaskResponse resp =
       recordFactory.newRecordInstance(KillTaskResponse.class);
     return resp;
   }
@@ -201,9 +195,9 @@ public class NotRunningJob implements MR
   @Override
   public KillTaskAttemptResponse killTaskAttempt(
       KillTaskAttemptRequest request) throws YarnRemoteException {
-    KillTaskAttemptResponse resp = 
+    KillTaskAttemptResponse resp =
       recordFactory.newRecordInstance(KillTaskAttemptResponse.class);
     return resp;
   }
-  
+
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java Thu Oct  6 01:16:48 2011
@@ -91,13 +91,9 @@ public class ResourceMgrDelegate {
             YarnConfiguration.RM_ADDRESS,
             YarnConfiguration.DEFAULT_RM_ADDRESS));
     LOG.info("Connecting to ResourceManager at " + rmAddress);
-    Configuration appsManagerServerConf = new Configuration(this.conf);
-    appsManagerServerConf.setClass(
-        YarnConfiguration.YARN_SECURITY_INFO,
-        ClientRMSecurityInfo.class, SecurityInfo.class);
     applicationsManager =
         (ClientRMProtocol) rpc.getProxy(ClientRMProtocol.class,
-            rmAddress, appsManagerServerConf);
+            rmAddress, this.conf);
     LOG.info("Connected to ResourceManager at " + rmAddress);
   }
   
@@ -262,7 +258,7 @@ public class ResourceMgrDelegate {
     String user = 
       UserGroupInformation.getCurrentUser().getShortUserName();
     Path path = MRApps.getStagingAreaDir(conf, user);
-    LOG.info("DEBUG --- getStagingAreaDir: dir=" + path);
+    LOG.debug("getStagingAreaDir: dir=" + path);
     return path.toString();
   }
 



Mime
View raw message