hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject svn commit: r1173012 [2/3] - in /hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project: ./ conf/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/ hadoop-mapreduce-client/hadoop-mapreduce-clie...
Date Tue, 20 Sep 2011 07:40:08 GMT
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java Tue Sep 20 07:40:04 2011
@@ -31,22 +31,22 @@ import java.net.URLConnection;
 import java.net.URI;
 import java.security.PrivilegedExceptionAction;
 
-import javax.security.auth.login.LoginException;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration.IntegerRanges;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.RawComparator;
+import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.filecache.DistributedCache;
+import org.apache.hadoop.mapreduce.protocol.ClientProtocol;
 import org.apache.hadoop.mapreduce.task.JobContextImpl;
 import org.apache.hadoop.mapreduce.util.ConfigUtil;
-import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
 
 /**
@@ -130,7 +130,7 @@ public class Job extends JobContextImpl 
 
   @Deprecated
   public Job(Configuration conf) throws IOException {
-    this(new Cluster(conf), conf);
+    this(new JobConf(conf));
   }
 
   @Deprecated
@@ -139,18 +139,13 @@ public class Job extends JobContextImpl 
     setJobName(jobName);
   }
 
-  Job(Cluster cluster) throws IOException {
-    this(cluster, new Configuration());
-  }
-
-  Job(Cluster cluster, Configuration conf) throws IOException {
+  Job(JobConf conf) throws IOException {
     super(conf, null);
-    this.cluster = cluster;
+    this.cluster = null;
   }
 
-  Job(Cluster cluster, JobStatus status,
-             Configuration conf) throws IOException {
-    this(cluster, conf);
+  Job(JobStatus status, JobConf conf) throws IOException {
+    this(conf);
     setJobID(status.getJobID());
     this.status = status;
     state = JobState.RUNNING;
@@ -170,7 +165,13 @@ public class Job extends JobContextImpl 
   }
       
   /**
-   * Creates a new {@link Job} with no particular {@link Cluster} .
+   * Creates a new {@link Job} with no particular {@link Cluster} and a 
+   * given {@link Configuration}.
+   * 
+   * The <code>Job</code> makes a copy of the <code>Configuration</code> so 
+   * that any necessary internal modifications do not reflect on the incoming 
+   * parameter.
+   * 
    * A Cluster will be created from the conf parameter only when it's needed.
    * 
    * @param conf the configuration
@@ -179,13 +180,18 @@ public class Job extends JobContextImpl 
    */
   public static Job getInstance(Configuration conf) throws IOException {
     // create with a null Cluster
-    return new Job(null, conf);
+    JobConf jobConf = new JobConf(conf);
+    return new Job(jobConf);
   }
 
       
   /**
    * Creates a new {@link Job} with no particular {@link Cluster} and a given jobName.
    * A Cluster will be created from the conf parameter only when it's needed.
+   *
+   * The <code>Job</code> makes a copy of the <code>Configuration</code> so 
+   * that any necessary internal modifications do not reflect on the incoming 
+   * parameter.
    * 
    * @param conf the configuration
    * @return the {@link Job} , with no connection to a cluster yet.
@@ -194,25 +200,92 @@ public class Job extends JobContextImpl 
   public static Job getInstance(Configuration conf, String jobName)
            throws IOException {
     // create with a null Cluster
-    Job result = new Job(null, conf);
+    Job result = getInstance(conf);
     result.setJobName(jobName);
     return result;
   }
   
-  public static Job getInstance(Cluster cluster) throws IOException {
-     return new Job(cluster);
+  /**
+   * Creates a new {@link Job} with no particular {@link Cluster} and given
+   * {@link Configuration} and {@link JobStatus}.
+   * A Cluster will be created from the conf parameter only when it's needed.
+   * 
+   * The <code>Job</code> makes a copy of the <code>Configuration</code> so 
+   * that any necessary internal modifications do not reflect on the incoming 
+   * parameter.
+   * 
+   * @param status job status
+   * @param conf job configuration
+   * @return the {@link Job} , with no connection to a cluster yet.
+   * @throws IOException
+   */
+  public static Job getInstance(JobStatus status, Configuration conf) 
+  throws IOException {
+    return new Job(status, new JobConf(conf));
+  }
+
+  /**
+   * Creates a new {@link Job} with no particular {@link Cluster}.
+   * A Cluster will be created from the conf parameter only when it's needed.
+   *
+   * The <code>Job</code> makes a copy of the <code>Configuration</code> so 
+   * that any necessary internal modifications do not reflect on the incoming 
+   * parameter.
+   * 
+   * @param ignored
+   * @return the {@link Job} , with no connection to a cluster yet.
+   * @throws IOException
+   * @deprecated Use {@link #getInstance()}
+   */
+  @Deprecated
+  public static Job getInstance(Cluster ignored) throws IOException {
+    return getInstance();
   }
   
-  public static Job getInstance(Cluster cluster, Configuration conf) 
+  /**
+   * Creates a new {@link Job} with no particular {@link Cluster} and given
+   * {@link Configuration}.
+   * A Cluster will be created from the conf parameter only when it's needed.
+   * 
+   * The <code>Job</code> makes a copy of the <code>Configuration</code> so 
+   * that any necessary internal modifications do not reflect on the incoming 
+   * parameter.
+   * 
+   * @param ignored
+   * @param conf job configuration
+   * @return the {@link Job} , with no connection to a cluster yet.
+   * @throws IOException
+   * @deprecated Use {@link #getInstance(Configuration)}
+   */
+  @Deprecated
+  public static Job getInstance(Cluster ignored, Configuration conf) 
       throws IOException {
-    return new Job(cluster, conf);
+    return getInstance(conf);
   }
   
+  /**
+   * Creates a new {@link Job} with no particular {@link Cluster} and given
+   * {@link Configuration} and {@link JobStatus}.
+   * A Cluster will be created from the conf parameter only when it's needed.
+   * 
+   * The <code>Job</code> makes a copy of the <code>Configuration</code> so 
+   * that any necessary internal modifications do not reflect on the incoming 
+   * parameter.
+   * 
+   * @param cluster cluster
+   * @param status job status
+   * @param conf job configuration
+   * @return the {@link Job} , with no connection to a cluster yet.
+   * @throws IOException
+   */
+  @Private
   public static Job getInstance(Cluster cluster, JobStatus status, 
       Configuration conf) throws IOException {
-    return new Job(cluster, status, conf);
+    Job job = getInstance(status, conf);
+    job.setCluster(cluster);
+    return job;
   }
-  
+
   private void ensureState(JobState state) throws IllegalStateException {
     if (state != this.state) {
       throw new IllegalStateException("Job in state "+ this.state + 
@@ -254,6 +327,10 @@ public class Job extends JobContextImpl 
     updateStatus();
     return status;
   }
+  
+  private void setStatus(JobStatus status) {
+    this.status = status;
+  }
 
   /**
    * Returns the current state of the Job.
@@ -354,6 +431,12 @@ public class Job extends JobContextImpl 
     return status.isRetired();
   }
 
+  /** Only for mocks in unit tests. */
+  @Private
+  private void setCluster(Cluster cluster) {
+    this.cluster = cluster;
+  }
+
   /**
    * Dump stats to screen.
    */
@@ -1055,6 +1138,12 @@ public class Job extends JobContextImpl 
     return cluster != null;
   }
 
+  /** Only for mocking via unit tests. */
+  @Private
+  public JobSubmitter getJobSubmitter(FileSystem fs, 
+      ClientProtocol submitClient) throws IOException {
+    return new JobSubmitter(fs, submitClient);
+  }
   /**
    * Submit the job to the cluster and return immediately.
    * @throws IOException
@@ -1064,8 +1153,8 @@ public class Job extends JobContextImpl 
     ensureState(JobState.DEFINE);
     setUseNewAPI();
     connect();
-    final JobSubmitter submitter = new JobSubmitter(cluster.getFileSystem(),
-        cluster.getClient());
+    final JobSubmitter submitter = 
+        getJobSubmitter(cluster.getFileSystem(), cluster.getClient());
     status = ugi.doAs(new PrivilegedExceptionAction<JobStatus>() {
       public JobStatus run() throws IOException, InterruptedException, 
       ClassNotFoundException {
@@ -1114,7 +1203,7 @@ public class Job extends JobContextImpl 
       throws IOException, InterruptedException {
     String lastReport = null;
     Job.TaskStatusFilter filter;
-    Configuration clientConf = cluster.getConf();
+    Configuration clientConf = getConfiguration();
     filter = Job.getTaskOutputFilter(clientConf);
     JobID jobId = getJobID();
     LOG.info("Running job: " + jobId);

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java Tue Sep 20 07:40:04 2011
@@ -319,7 +319,6 @@ class JobSubmitter {
    * @throws InterruptedException
    * @throws IOException
    */
-  @SuppressWarnings("unchecked")
   JobStatus submitJobInternal(Job job, Cluster cluster) 
   throws ClassNotFoundException, InterruptedException, IOException {
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java Tue Sep 20 07:40:04 2011
@@ -65,6 +65,9 @@ public interface MRConfig {
     "mapreduce.jobtracker.kerberos.principal";
 
   public static final String FRAMEWORK_NAME  = "mapreduce.framework.name";
+  public static final String CLASSIC_FRAMEWORK_NAME  = "classic";
+  public static final String YARN_FRAMEWORK_NAME  = "yarn";
+
   public static final String TASK_LOCAL_OUTPUT_CLASS =
   "mapreduce.task.local.output.class";
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java Tue Sep 20 07:40:04 2011
@@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.conf.Configuration.IntegerRanges;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.RawComparator;
+import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
@@ -60,7 +61,11 @@ public class JobContextImpl implements J
   protected final Credentials credentials;
   
   public JobContextImpl(Configuration conf, JobID jobId) {
-    this.conf = new org.apache.hadoop.mapred.JobConf(conf);
+    if (conf instanceof JobConf) {
+      this.conf = (JobConf)conf;
+    } else {
+      this.conf = new JobConf(conf);
+    }
     this.jobId = jobId;
     this.credentials = this.conf.getCredentials();
     try {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java Tue Sep 20 07:40:04 2011
@@ -215,7 +215,7 @@ public class CLI extends Configured impl
     // Submit the request
     try {
       if (submitJobFile != null) {
-        Job job = Job.getInstance(cluster, new JobConf(submitJobFile));
+        Job job = Job.getInstance(new JobConf(submitJobFile));
         job.submit();
         System.out.println("Created job " + job.getJobID());
         exitCode = 0;

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Sep 20 07:40:04 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:1166973-1171806
+/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:1166973-1173011
 /hadoop/core/branches/branch-0.19/mapred/src/java/mapred-default.xml:713112
 /hadoop/core/trunk/src/mapred/mapred-default.xml:776175-785643

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobMonitorAndPrint.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobMonitorAndPrint.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobMonitorAndPrint.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobMonitorAndPrint.java Tue Sep 20 07:40:04 2011
@@ -64,7 +64,7 @@ public class TestJobMonitorAndPrint exte
     when(cluster.getClient()).thenReturn(clientProtocol);
     JobStatus jobStatus = new JobStatus(new JobID("job_000", 1), 0f, 0f, 0f, 0f, 
         State.RUNNING, JobPriority.HIGH, "tmp-user", "tmp-jobname", "tmp-jobfile", "tmp-url");
-    job = new Job(cluster, jobStatus, conf);
+    job = Job.getInstance(cluster, jobStatus, conf);
     job = spy(job);
   }
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsController.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsController.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsController.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsController.java Tue Sep 20 07:40:04 2011
@@ -139,18 +139,11 @@ public class HsController extends AppCon
   /**
    * @return the page that will be used to render the /conf page
    */
+  @Override
   protected Class<? extends View> confPage() {
     return HsConfPage.class;
   }
-  
-  /**
-   * Render the /conf page
-   */
-  public void conf() {
-    requireJob();
-    render(confPage());
-  }
-  
+
   /**
    * @return the page about the current server.
    */

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java Tue Sep 20 07:40:04 2011
@@ -92,6 +92,60 @@ public class TestJobHistoryEvents {
         parsedJob.getState());
   }
 
+  /**
+   * Verify that all the events are flushed on stopping the HistoryHandler
+   * @throws Exception
+   */
+  @Test
+  public void testEventsFlushOnStop() throws Exception {
+
+    Configuration conf = new Configuration();
+    conf.set(MRJobConfig.USER_NAME, "test");
+    MRApp app = new MRAppWithSpecialHistoryHandler(1, 0, true, this
+        .getClass().getName(), true);
+    app.submit(conf);
+    Job job = app.getContext().getAllJobs().values().iterator().next();
+    JobId jobId = job.getID();
+    LOG.info("JOBID is " + TypeConverter.fromYarn(jobId).toString());
+    app.waitForState(job, JobState.SUCCEEDED);
+
+    // make sure all events are flushed
+    app.waitForState(Service.STATE.STOPPED);
+    /*
+     * Use HistoryContext to read logged events and verify the number of
+     * completed maps
+     */
+    HistoryContext context = new JobHistory();
+    ((JobHistory) context).init(conf);
+    Job parsedJob = context.getJob(jobId);
+    Assert.assertEquals("CompletedMaps not correct", 1, parsedJob
+        .getCompletedMaps());
+
+    Map<TaskId, Task> tasks = parsedJob.getTasks();
+    Assert.assertEquals("No of tasks not correct", 1, tasks.size());
+    verifyTask(tasks.values().iterator().next());
+
+    Map<TaskId, Task> maps = parsedJob.getTasks(TaskType.MAP);
+    Assert.assertEquals("No of maps not correct", 1, maps.size());
+
+    Assert.assertEquals("Job state not currect", JobState.SUCCEEDED,
+        parsedJob.getState());
+  }
+
+  @Test
+  public void testJobHistoryEventHandlerIsFirstServiceToStop() {
+    MRApp app = new MRAppWithSpecialHistoryHandler(1, 0, true, this
+        .getClass().getName(), true);
+    Configuration conf = new Configuration();
+    app.init(conf);
+    Service[] services = app.getServices().toArray(new Service[0]);
+    // Verifying that it is the last to be added is same as verifying that it is
+    // the first to be stopped. CompositeService related tests already validate
+    // this.
+    Assert.assertEquals("JobHistoryEventHandler",
+        services[services.length - 1].getName());
+  }
+
   private void verifyTask(Task task) {
     Assert.assertEquals("Task state not currect", TaskState.SUCCEEDED,
         task.getState());
@@ -116,14 +170,43 @@ public class TestJobHistoryEvents {
     @Override
     protected EventHandler<JobHistoryEvent> createJobHistoryHandler(
         AppContext context) {
-      JobHistoryEventHandler eventHandler = new JobHistoryEventHandler(context, 
-          getStartCount());
+      JobHistoryEventHandler eventHandler = new JobHistoryEventHandler(
+          context, getStartCount());
       return eventHandler;
     }
   }
-  
+
+  /**
+   * MRapp with special HistoryEventHandler that writes events only during stop.
+   * This is to simulate events that don't get written by the eventHandling
+   * thread due to say a slow DFS and verify that they are flushed during stop.
+   */
+  private static class MRAppWithSpecialHistoryHandler extends MRApp {
+
+    public MRAppWithSpecialHistoryHandler(int maps, int reduces,
+        boolean autoComplete, String testName, boolean cleanOnStart) {
+      super(maps, reduces, autoComplete, testName, cleanOnStart);
+    }
+
+    @Override
+    protected EventHandler<JobHistoryEvent> createJobHistoryHandler(
+        AppContext context) {
+      return new JobHistoryEventHandler(context, getStartCount()) {
+        @Override
+        public void start() {
+          // Don't start any event draining thread.
+          super.eventHandlingThread = new Thread();
+          super.eventHandlingThread.start();
+        }
+      };
+    }
+
+  }
+
   public static void main(String[] args) throws Exception {
     TestJobHistoryEvents t = new TestJobHistoryEvents();
     t.testHistoryEvents();
+    t.testEventsFlushOnStop();
+    t.testJobHistoryEventHandlerIsFirstServiceToStop();
   }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java Tue Sep 20 07:40:04 2011
@@ -23,6 +23,7 @@ import java.security.PrivilegedAction;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -74,8 +75,10 @@ public class ClientCache {
 
   private MRClientProtocol instantiateHistoryProxy()
   throws IOException {
-	final String serviceAddr = conf.get(JHAdminConfig.MR_HISTORY_ADDRESS,
-	          JHAdminConfig.DEFAULT_MR_HISTORY_ADDRESS);
+    final String serviceAddr = conf.get(JHAdminConfig.MR_HISTORY_ADDRESS);
+    if (StringUtils.isEmpty(serviceAddr)) {
+      return null;
+    }
     LOG.info("Connecting to HistoryServer at: " + serviceAddr);
     final Configuration myConf = new Configuration(conf);
     myConf.setClass(YarnConfiguration.YARN_SECURITY_INFO,

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java Tue Sep 20 07:40:04 2011
@@ -63,7 +63,6 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.api.records.ApplicationState;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
-import org.apache.hadoop.yarn.exceptions.impl.pb.YarnRemoteExceptionPBImpl;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.ipc.RPCUtil;
@@ -86,6 +85,7 @@ class ClientServiceDelegate {
   private boolean forceRefresh;
   private MRClientProtocol realProxy = null;
   private RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
+  private static String UNKNOWN_USER = "Unknown User";
 
   ClientServiceDelegate(Configuration conf, ResourceMgrDelegate rm, 
       JobID jobId, MRClientProtocol historyServerProxy) {
@@ -126,7 +126,12 @@ class ClientServiceDelegate {
     // and redirect to the history server.
     ApplicationReport application = rm.getApplicationReport(appId);
     String serviceAddr = null;
-    while (ApplicationState.RUNNING.equals(application.getState())) {
+    while (application == null || ApplicationState.RUNNING.equals(application.getState())) {
+      if (application == null) {
+        LOG.info("Could not get Job info from RM for job " + jobId
+            + ". Redirecting to job history server.");
+        return checkAndGetHSProxy(UNKNOWN_USER, JobState.NEW);
+      }
       try {
         if (application.getHost() == null || "".equals(application.getHost())) {
           LOG.debug("AM not assigned to Job. Waiting to get the AM ...");
@@ -163,6 +168,11 @@ class ClientServiceDelegate {
           throw new YarnException(e1);
         }
         application = rm.getApplicationReport(appId);
+        if (application == null) {
+          LOG.info("Could not get Job info from RM for job " + jobId
+              + ". Redirecting to job history server.");
+          return checkAndGetHSProxy(UNKNOWN_USER, JobState.RUNNING);
+        }
       } catch (InterruptedException e) {
         LOG.warn("getProxy() call interruped", e);
         throw new YarnException(e);
@@ -176,7 +186,7 @@ class ClientServiceDelegate {
     
     String user = application.getUser();
     if (user == null) {
-      throw new YarnRemoteExceptionPBImpl("User is not set in the application report");
+      throw RPCUtil.getRemoteException("User is not set in the application report");
     }
     if (application.getState() == ApplicationState.NEW ||
         application.getState() == ApplicationState.SUBMITTED) {
@@ -199,11 +209,19 @@ class ClientServiceDelegate {
     if (application.getState() == ApplicationState.SUCCEEDED) {
       LOG.info("Application state is completed. " +
           "Redirecting to job history server");
-      realProxy = historyServerProxy;
+      realProxy = checkAndGetHSProxy(user, JobState.SUCCEEDED);
     }
     return realProxy;
   }
 
+  private MRClientProtocol checkAndGetHSProxy(String user, JobState state) {
+    if (null == historyServerProxy) {
+      LOG.warn("Job History Server is not configured.");
+      return getNotRunningJob(user, state);
+    }
+    return historyServerProxy;
+  }
+
   private void instantiateAMProxy(final String serviceAddr) throws IOException {
     UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
     LOG.trace("Connecting to ApplicationMaster at: " + serviceAddr);
@@ -236,11 +254,14 @@ class ClientServiceDelegate {
       try {
         return methodOb.invoke(getProxy(), args);
       } catch (YarnRemoteException yre) {
-        LOG.warn("Exception thrown by remote end.");
-        LOG.warn(RPCUtil.toString(yre));
+        LOG.warn("Exception thrown by remote end.", yre);
         throw yre;
       } catch (InvocationTargetException e) {
-        //TODO Finite # of errors before giving up?
+        if (e.getTargetException() instanceof YarnRemoteException) {
+          LOG.warn("Exception thrown by remote end.", e
+              .getTargetException());
+          throw (YarnRemoteException) e.getTargetException();
+        }
         LOG.info("Failed to contact AM/History for job " + jobId
             + "  Will retry..", e.getTargetException());
         forceRefresh = true;

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java Tue Sep 20 07:40:04 2011
@@ -74,16 +74,16 @@ import org.apache.hadoop.yarn.security.c
 public class ResourceMgrDelegate {
   private static final Log LOG = LogFactory.getLog(ResourceMgrDelegate.class);
       
-  private Configuration conf;
+  private YarnConfiguration conf;
   ClientRMProtocol applicationsManager;
   private ApplicationId applicationId;
   private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
 
-  public ResourceMgrDelegate(Configuration conf) {
+  public ResourceMgrDelegate(YarnConfiguration conf) {
     this.conf = conf;
-    YarnRPC rpc = YarnRPC.create(conf);
+    YarnRPC rpc = YarnRPC.create(this.conf);
     InetSocketAddress rmAddress =
-        NetUtils.createSocketAddr(conf.get(
+        NetUtils.createSocketAddr(this.conf.get(
             YarnConfiguration.RM_ADDRESS,
             YarnConfiguration.DEFAULT_RM_ADDRESS));
     LOG.info("Connecting to ResourceManager at " + rmAddress);

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java Tue Sep 20 07:40:04 2011
@@ -20,16 +20,13 @@ package org.apache.hadoop.mapred;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
-import java.util.ArrayList;
 import java.util.HashMap;
-import java.util.List;
 import java.util.Map;
 import java.util.Vector;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
@@ -43,7 +40,6 @@ import org.apache.hadoop.mapreduce.Count
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.JobStatus;
-import org.apache.hadoop.mapreduce.JobSubmissionFiles;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.QueueAclsInfo;
 import org.apache.hadoop.mapreduce.QueueInfo;
@@ -62,7 +58,6 @@ import org.apache.hadoop.security.Creden
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AccessControlList;
 import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.api.ApplicationConstants;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -91,7 +86,7 @@ public class YARNRunner implements Clien
   private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
   private ResourceMgrDelegate resMgrDelegate;
   private ClientCache clientCache;
-  private YarnConfiguration conf;
+  private Configuration conf;
   private final FileContext defaultFileContext;
 
   /**
@@ -99,22 +94,21 @@ public class YARNRunner implements Clien
    * yarn
    * @param conf the configuration object for the client
    */
-  public YARNRunner(YarnConfiguration conf) {
-   this(conf, new ResourceMgrDelegate(conf));
+  public YARNRunner(Configuration conf) {
+   this(conf, new ResourceMgrDelegate(new YarnConfiguration(conf)));
   }
 
   /**
-   * Similar to {@link #YARNRunner(YarnConfiguration)} but allowing injecting 
+   * Similar to {@link #YARNRunner(Configuration)} but allowing injecting 
    * {@link ResourceMgrDelegate}. Enables mocking and testing.
    * @param conf the configuration object for the client
    * @param resMgrDelegate the resourcemanager client handle.
    */
-  public YARNRunner(YarnConfiguration conf, ResourceMgrDelegate resMgrDelegate) {
+  public YARNRunner(Configuration conf, ResourceMgrDelegate resMgrDelegate) {
     this.conf = conf;
     try {
       this.resMgrDelegate = resMgrDelegate;
-      this.clientCache = new ClientCache(this.conf,
-          resMgrDelegate);
+      this.clientCache = new ClientCache(this.conf, resMgrDelegate);
       this.defaultFileContext = FileContext.getFileContext(this.conf);
     } catch (UnsupportedFileSystemException ufe) {
       throw new RuntimeException("Error in instantiating YarnClient", ufe);
@@ -223,23 +217,10 @@ public class YARNRunner implements Clien
       throw new YarnException(e);
     }
 
-    // XXX Remove
-    Path submitJobDir = new Path(jobSubmitDir);
-    FileContext defaultFS = FileContext.getFileContext(conf);
-    Path submitJobFile =
-      defaultFS.makeQualified(JobSubmissionFiles.getJobConfPath(submitJobDir));
-    FSDataInputStream in = defaultFS.open(submitJobFile);
-    conf.addResource(in);
-    // ---
-
     // Construct necessary information to start the MR AM
     ApplicationSubmissionContext appContext = 
       createApplicationSubmissionContext(conf, jobSubmitDir, ts);
     
-    // XXX Remove
-    in.close();
-    // ---
-    
     // Submit to ResourceManager
     ApplicationId applicationId = resMgrDelegate.submitApplication(appContext);
     

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YarnClientProtocolProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YarnClientProtocolProvider.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YarnClientProtocolProvider.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YarnClientProtocolProvider.java Tue Sep 20 07:40:04 2011
@@ -25,14 +25,13 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.protocol.ClientProtocol;
 import org.apache.hadoop.mapreduce.protocol.ClientProtocolProvider;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
 
 public class YarnClientProtocolProvider extends ClientProtocolProvider {
 
   @Override
   public ClientProtocol create(Configuration conf) throws IOException {
-    if ("yarn".equals(conf.get(MRConfig.FRAMEWORK_NAME))) {
-      return new YARNRunner(new YarnConfiguration(conf));
+    if (MRConfig.YARN_FRAMEWORK_NAME.equals(conf.get(MRConfig.FRAMEWORK_NAME))) {
+      return new YARNRunner(conf);
     }
     return null;
   }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java Tue Sep 20 07:40:04 2011
@@ -119,7 +119,7 @@ public class TestClientRedirect {
   public void testRedirect() throws Exception {
     
     Configuration conf = new YarnConfiguration();
-    conf.set(MRConfig.FRAMEWORK_NAME, "yarn");
+    conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
     conf.set(YarnConfiguration.RM_ADDRESS, RMADDRESS);
     conf.set(JHAdminConfig.MR_HISTORY_ADDRESS, HSHOSTADDRESS);
     RMService rmService = new RMService("test");
@@ -139,8 +139,8 @@ public class TestClientRedirect {
     Cluster cluster = new Cluster(conf);
     org.apache.hadoop.mapreduce.JobID jobID =
       new org.apache.hadoop.mapred.JobID("201103121733", 1);
-    org.apache.hadoop.mapreduce.Counters counters = cluster.getJob(jobID)
-        .getCounters();
+    org.apache.hadoop.mapreduce.Counters counters = 
+        cluster.getJob(jobID).getCounters();
     validateCounters(counters);
     Assert.assertTrue(amContact);
    

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java Tue Sep 20 07:40:04 2011
@@ -28,6 +28,7 @@ import org.apache.hadoop.mapred.ShuffleH
 import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer;
+import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.server.MiniYARNCluster;
@@ -59,7 +60,7 @@ public class MiniMRYarnCluster extends M
 
   @Override
   public void init(Configuration conf) {
-    conf.set(MRConfig.FRAMEWORK_NAME, "yarn");
+    conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
     conf.set(MRJobConfig.USER_NAME, System.getProperty("user.name"));
     conf.set(MRJobConfig.MR_AM_STAGING_DIR, new File(getTestWorkDir(),
         "apps_staging_dir/${user.name}/").getAbsolutePath());
@@ -82,6 +83,10 @@ public class MiniMRYarnCluster extends M
     // for corresponding uberized tests.
     conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false);
 
+    // Set config for JH Server
+    conf.set(JHAdminConfig.MR_HISTORY_ADDRESS, 
+        JHAdminConfig.DEFAULT_MR_HISTORY_ADDRESS);
+    
     super.init(conf);
   }
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java Tue Sep 20 07:40:04 2011
@@ -52,6 +52,7 @@ import org.apache.hadoop.mapreduce.Count
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobCounter;
 import org.apache.hadoop.mapreduce.JobStatus;
+import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
@@ -133,11 +134,15 @@ public class TestMRJobs {
       return;
     }
 
+    Configuration sleepConf = new Configuration(mrCluster.getConfig());
+    // set master address to local to test that local mode applied iff framework == classic and master_address == local
+    sleepConf.set(MRConfig.MASTER_ADDRESS, "local");	
+    
     SleepJob sleepJob = new SleepJob();
-    sleepJob.setConf(mrCluster.getConfig());
-
-    int numReduces = mrCluster.getConfig().getInt("TestMRJobs.testSleepJob.reduces", 2); // or mrCluster.getConfig().getInt(MRJobConfig.NUM_REDUCES, 2);
+    sleepJob.setConf(sleepConf);
 
+    int numReduces = sleepConf.getInt("TestMRJobs.testSleepJob.reduces", 2); // or sleepConf.getConfig().getInt(MRJobConfig.NUM_REDUCES, 2);
+   
     // job with 3 maps (10s) and numReduces reduces (5s), 1 "record" each:
     Job job = sleepJob.createJob(3, numReduces, 10000, 1, 5000, 1);
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java Tue Sep 20 07:40:04 2011
@@ -60,20 +60,16 @@ public abstract class ApplicationAttempt
   @Unstable
   public abstract void setAttemptId(int attemptId);
 
-  
-  
-  protected static final NumberFormat idFormat = NumberFormat.getInstance();
-  static {
-    idFormat.setGroupingUsed(false);
-    idFormat.setMinimumIntegerDigits(4);
-  }
-
-  protected static final NumberFormat counterFormat = NumberFormat
-      .getInstance();
-  static {
-    counterFormat.setGroupingUsed(false);
-    counterFormat.setMinimumIntegerDigits(6);
-  }
+  static final ThreadLocal<NumberFormat> attemptIdFormat =
+      new ThreadLocal<NumberFormat>() {
+        @Override
+        public NumberFormat initialValue() {
+          NumberFormat fmt = NumberFormat.getInstance();
+          fmt.setGroupingUsed(false);
+          fmt.setMinimumIntegerDigits(6);
+          return fmt;
+        }
+      };
 
   @Override
   public int hashCode() {
@@ -81,22 +77,25 @@ public abstract class ApplicationAttempt
     final int prime = 31;
     int result = 1;
     ApplicationId appId = getApplicationId();
-    result = prime * result + ((appId == null) ? 0 : appId.hashCode());
+    result = prime * result +  appId.hashCode();
     result = prime * result + getAttemptId();
     return result;
   }
 
   @Override
-  public boolean equals(Object other) {
-    if (other == null)
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (obj == null)
       return false;
-    if (other.getClass().isAssignableFrom(this.getClass())) {
-      ApplicationAttemptId otherAttemptId = (ApplicationAttemptId) other;
-      if (this.getApplicationId().equals(otherAttemptId.getApplicationId())) {
-        return this.getAttemptId() == otherAttemptId.getAttemptId();
-      }
-    }
-    return false;
+    if (getClass() != obj.getClass())
+      return false;
+    ApplicationAttemptId other = (ApplicationAttemptId) obj;
+    if (!this.getApplicationId().equals(other.getApplicationId()))
+      return false;
+    if (this.getAttemptId() != other.getAttemptId())
+      return false;
+    return true;
   }
 
   @Override
@@ -109,14 +108,14 @@ public abstract class ApplicationAttempt
       return compareAppIds;
     }
   }
-  
+
   @Override
   public String toString() {
-    String id =
-        (this.getApplicationId() != null) ? this.getApplicationId()
-            .getClusterTimestamp()
-            + "_"
-            + idFormat.format(this.getApplicationId().getId()) : "none";
-    return "appattempt_" + id + "_" + counterFormat.format(getAttemptId());
+    StringBuilder sb = new StringBuilder("appattempt_");
+    sb.append(this.getApplicationId().getClusterTimestamp()).append("_");
+    sb.append(ApplicationId.appIdFormat.get().format(
+        this.getApplicationId().getId()));
+    sb.append("_").append(attemptIdFormat.get().format(getAttemptId()));
+    return sb.toString();
   }
-}
+}
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java Tue Sep 20 07:40:04 2011
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.yarn.api.records;
 
+import java.text.NumberFormat;
+
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Stable;
@@ -60,8 +62,20 @@ public abstract class ApplicationId impl
   @Private
   @Unstable
   public abstract void setClusterTimestamp(long clusterTimestamp);
+
   
   
+  static final ThreadLocal<NumberFormat> appIdFormat =
+    new ThreadLocal<NumberFormat>() {
+      @Override
+      public NumberFormat initialValue() {
+        NumberFormat fmt = NumberFormat.getInstance();
+        fmt.setGroupingUsed(false);
+        fmt.setMinimumIntegerDigits(4);
+        return fmt;
+      }
+    };
+
   @Override
   public int compareTo(ApplicationId other) {
     if (this.getClusterTimestamp() - other.getClusterTimestamp() == 0) {
@@ -74,9 +88,10 @@ public abstract class ApplicationId impl
 
   @Override
   public String toString() {
-    return "application_" + this.getClusterTimestamp() + "_" + this.getId();
+    return "application_" + this.getClusterTimestamp() + "_"
+        + appIdFormat.get().format(getId());
   }
-  
+
   @Override
   public int hashCode() {
     // Generated by eclipse.
@@ -90,15 +105,18 @@ public abstract class ApplicationId impl
   }
 
   @Override
-  public boolean equals(Object other) {
-    if (other == null) return false;
-    if (other.getClass().isAssignableFrom(this.getClass())) {
-      ApplicationId otherAppId = (ApplicationId)other;
-      if (this.getClusterTimestamp() == otherAppId.getClusterTimestamp() &&
-          this.getId() == otherAppId.getId()) {
-        return true;
-      }
-    }
-    return false;
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (obj == null)
+      return false;
+    if (getClass() != obj.getClass())
+      return false;
+    ApplicationId other = (ApplicationId) obj;
+    if (this.getClusterTimestamp() != other.getClusterTimestamp())
+      return false;
+    if (this.getId() != other.getId())
+      return false;
+    return true;
   }
-}
+}
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java Tue Sep 20 07:40:04 2011
@@ -59,19 +59,6 @@ public abstract class ContainerId implem
   public abstract void setId(int id);
  
   
-  // TODO: Why thread local?
-  // ^ NumberFormat instances are not threadsafe
-  private static final ThreadLocal<NumberFormat> appIdFormat =
-      new ThreadLocal<NumberFormat>() {
-        @Override
-        public NumberFormat initialValue() {
-          NumberFormat fmt = NumberFormat.getInstance();
-          fmt.setGroupingUsed(false);
-          fmt.setMinimumIntegerDigits(4);
-          return fmt;
-        }
-      };
-
   // TODO: fail the app submission if attempts are more than 10 or something
   private static final ThreadLocal<NumberFormat> appAttemptIdFormat =
       new ThreadLocal<NumberFormat>() {
@@ -102,24 +89,24 @@ public abstract class ContainerId implem
     final int prime = 31;
     int result = 1;
     result = prime * result + getId();
-    result = prime * result
-        + ((getApplicationAttemptId() == null) ? 0 : getApplicationAttemptId().hashCode());
+    result = prime * result + getApplicationAttemptId().hashCode();
     return result;
   }
 
   @Override
-  public boolean equals(Object other) {
-    if (other == null) {
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (obj == null)
       return false;
-    }
-    if (other.getClass().isAssignableFrom(this.getClass())) {
-      ContainerId otherCId = (ContainerId)other;
-      if (this.getApplicationAttemptId().equals(
-          otherCId.getApplicationAttemptId())) {
-        return this.getId() == otherCId.getId();
-      }
-    }
-    return false;
+    if (getClass() != obj.getClass())
+      return false;
+    ContainerId other = (ContainerId) obj;
+    if (!this.getApplicationAttemptId().equals(other.getApplicationAttemptId()))
+      return false;
+    if (this.getId() != other.getId())
+      return false;
+    return true;
   }
 
   @Override
@@ -133,15 +120,18 @@ public abstract class ContainerId implem
     }
     
   }
-  
+
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
+    sb.append("container_");
     ApplicationId appId = getApplicationAttemptId().getApplicationId();
-    sb.append("container_").append(appId.getClusterTimestamp()).append("_");
-    sb.append(appIdFormat.get().format(appId.getId())).append("_");
-    sb.append(appAttemptIdFormat.get().format(getApplicationAttemptId().
-        getAttemptId())).append("_");
+    sb.append(appId.getClusterTimestamp()).append("_");
+    sb.append(ApplicationId.appIdFormat.get().format(appId.getId()))
+        .append("_");
+    sb.append(
+        appAttemptIdFormat.get().format(
+            getApplicationAttemptId().getAttemptId())).append("_");
     sb.append(containerIdFormat.get().format(getId()));
     return sb.toString();
   }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java Tue Sep 20 07:40:04 2011
@@ -32,7 +32,7 @@ import org.apache.hadoop.classification.
  */
 @Public
 @Stable
-public interface NodeId extends Comparable<NodeId> {
+public abstract class NodeId implements Comparable<NodeId> {
 
   /**
    * Get the <em>hostname</em> of the node.
@@ -40,11 +40,11 @@ public interface NodeId extends Comparab
    */ 
   @Public
   @Stable
-  String getHost();
+  public abstract String getHost();
   
   @Private
   @Unstable
-  void setHost(String host);
+  public abstract void setHost(String host);
 
   /**
    * Get the <em>port</em> for communicating with the node.
@@ -52,9 +52,54 @@ public interface NodeId extends Comparab
    */
   @Public
   @Stable
-  int getPort();
+  public abstract int getPort();
   
   @Private
   @Unstable
-  void setPort(int port);
+  public abstract void setPort(int port);
+
+  @Override
+  public String toString() {
+    return this.getHost() + ":" + this.getPort();
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + this.getHost().hashCode();
+    result = prime * result + this.getPort();
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (obj == null)
+      return false;
+    if (getClass() != obj.getClass())
+      return false;
+    NodeId other = (NodeId) obj;
+    if (!this.getHost().equals(other.getHost()))
+      return false;
+    if (this.getPort() != other.getPort())
+      return false;
+    return true;
+  }
+
+  @Override
+  public int compareTo(NodeId other) {
+    int hostCompare = this.getHost().compareTo(other.getHost());
+    if (hostCompare == 0) {
+      if (this.getPort() > other.getPort()) {
+        return 1;
+      } else if (this.getPort() < other.getPort()) {
+        return -1;
+      }
+      return 0;
+    }
+    return hostCompare;
+  }
+
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationAttemptIdPBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationAttemptIdPBImpl.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationAttemptIdPBImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationAttemptIdPBImpl.java Tue Sep 20 07:40:04 2011
@@ -25,7 +25,8 @@ import org.apache.hadoop.yarn.proto.Yarn
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
 
 public class ApplicationAttemptIdPBImpl extends ApplicationAttemptId {
-  ApplicationAttemptIdProto proto = ApplicationAttemptIdProto.getDefaultInstance();
+  ApplicationAttemptIdProto proto = ApplicationAttemptIdProto
+      .getDefaultInstance();
   ApplicationAttemptIdProto.Builder builder = null;
   boolean viaProto = false;
   
@@ -48,7 +49,9 @@ public class ApplicationAttemptIdPBImpl 
   }
 
   private synchronized void mergeLocalToBuilder() {
-    if (this.applicationId != null && !((ApplicationIdPBImpl)applicationId).getProto().equals(builder.getApplicationId())) {
+    if (this.applicationId != null
+        && !((ApplicationIdPBImpl) applicationId).getProto().equals(
+            builder.getApplicationId())) {
       builder.setApplicationId(convertToProtoFormat(this.applicationId));
     }
   }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/NodeIdPBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/NodeIdPBImpl.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/NodeIdPBImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/NodeIdPBImpl.java Tue Sep 20 07:40:04 2011
@@ -20,13 +20,12 @@ package org.apache.hadoop.yarn.api.recor
 
 
 import org.apache.hadoop.yarn.api.records.NodeId;
-import org.apache.hadoop.yarn.api.records.ProtoBase;
 import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder;
 
 
     
-public class NodeIdPBImpl extends ProtoBase<NodeIdProto> implements NodeId {
+public class NodeIdPBImpl extends NodeId {
   NodeIdProto proto = NodeIdProto.getDefaultInstance();
   NodeIdProto.Builder builder = null;
   boolean viaProto = false;
@@ -40,13 +39,13 @@ public class NodeIdPBImpl extends ProtoB
     viaProto = true;
   }
   
-  public NodeIdProto getProto() {
+  public synchronized NodeIdProto getProto() {
     proto = viaProto ? proto : builder.build();
     viaProto = true;
     return proto;
   }
 
-  private void maybeInitBuilder() {
+  private synchronized void maybeInitBuilder() {
     if (viaProto || builder == null) {
       builder = NodeIdProto.newBuilder(proto);
     }
@@ -54,77 +53,26 @@ public class NodeIdPBImpl extends ProtoB
   }
     
   @Override
-  public String getHost() {
+  public synchronized String getHost() {
     NodeIdProtoOrBuilder p = viaProto ? proto : builder;
     return (p.getHost());
   }
 
   @Override
-  public void setHost(String host) {
+  public synchronized void setHost(String host) {
     maybeInitBuilder();
     builder.setHost((host));
   }
 
   @Override
-  public int getPort() {
+  public synchronized int getPort() {
     NodeIdProtoOrBuilder p = viaProto ? proto : builder;
     return (p.getPort());
   }
 
   @Override
-  public void setPort(int port) {
+  public synchronized void setPort(int port) {
     maybeInitBuilder();
     builder.setPort((port));
   }
-
-  @Override
-  public String toString() {
-    return this.getHost() + ":" + this.getPort();
-  }
-  
-  @Override
-  public int hashCode() {
-    final int prime = 31;
-    int result = super.hashCode();
-    String host = this.getHost();
-    result = prime * result + ((host == null) ? 0 : host.hashCode());
-    result = prime * result + this.getPort();
-    return result;
-  }
-
-  @Override
-  public boolean equals(Object obj) {
-    if (this == obj)
-      return true;
-    if (!super.equals(obj))
-      return false;
-    if (getClass() != obj.getClass())
-      return false;
-    NodeIdPBImpl other = (NodeIdPBImpl) obj;
-    String host = this.getHost();
-    String otherHost = other.getHost();
-    if (host == null) {
-      if (otherHost != null)
-        return false;
-    } else if (!host.equals(otherHost))
-      return false;
-    if (this.getPort() != other.getPort())
-      return false;
-    return true;
-  }
-
-  @Override
-  public int compareTo(NodeId other) {
-    int hostCompare = this.getHost().compareTo(other.getHost());
-    if (hostCompare == 0) {
-      if (this.getPort() > other.getPort()) {
-        return 1;
-      } else if (this.getPort() < other.getPort()) {
-        return -1;
-      }
-      return 0;
-    }
-    return hostCompare;
-  }
-
 }  

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Router.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Router.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Router.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Router.java Tue Sep 20 07:40:04 2011
@@ -25,6 +25,8 @@ import com.google.common.collect.Maps;
 
 import java.lang.reflect.Method;
 import java.lang.reflect.Modifier;
+import java.lang.NoSuchMethodException;
+import java.lang.SecurityException;
 import java.util.EnumSet;
 import java.util.List;
 import java.util.Map;
@@ -88,23 +90,28 @@ class Router {
   private Dest addController(WebApp.HTTP httpMethod, String path,
                              Class<? extends Controller> cls,
                              String action, List<String> names) {
-    for (Method method : cls.getDeclaredMethods()) {
-      if (method.getName().equals(action) &&
-          method.getParameterTypes().length == 0 &&
-          Modifier.isPublic(method.getModifiers())) {
-        // TODO: deal with parameters using the names
-        Dest dest = routes.get(path);
-        if (dest == null) {
-          method.setAccessible(true); // avoid any runtime checks
-          dest = new Dest(path, method, cls, names, httpMethod);
-          routes.put(path, dest);
-          return dest;
-        }
-        dest.methods.add(httpMethod);
+    try {
+      // Look for the method in all public methods declared in the class
+      // or inherited by the class.
+      // Note: this does not distinguish methods with the same signature
+      // but different return types.
+      // TODO: We may want to deal with methods that take parameters in the future
+      Method method = cls.getMethod(action, null);
+      Dest dest = routes.get(path);
+      if (dest == null) {
+        method.setAccessible(true); // avoid any runtime checks
+        dest = new Dest(path, method, cls, names, httpMethod);
+        routes.put(path, dest);
         return dest;
       }
+      dest.methods.add(httpMethod);
+      return dest;
+    } catch (NoSuchMethodException nsme) {
+      throw new WebAppException(action + "() not found in " + cls);
+    } catch (SecurityException se) {
+      throw new WebAppException("Security exception thrown for " + action +
+        "() in " + cls);
     }
-    throw new WebAppException(action + "() not found in " + cls);
   }
 
   private void addDefaultView(Dest dest) {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/HeaderBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/HeaderBlock.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/HeaderBlock.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/HeaderBlock.java Tue Sep 20 07:40:04 2011
@@ -23,10 +23,14 @@ import static org.apache.hadoop.yarn.web
 public class HeaderBlock extends HtmlBlock {
 
   @Override protected void render(Block html) {
+    String loggedIn = ""; 
+    if (request().getRemoteUser() != null) {
+      loggedIn = "Logged in as: " + request().getRemoteUser();
+    }
     html.
       div("#header.ui-widget").
         div("#user").
-          _("Logged in as: "+ request().getRemoteUser())._().
+          _(loggedIn)._().
         div("#logo").
           img("/static/hadoop-st.png")._().
         h1($(TITLE))._();

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationAttemptId.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationAttemptId.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationAttemptId.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationAttemptId.java Tue Sep 20 07:40:04 2011
@@ -1,3 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
 package org.apache.hadoop.yarn.api;
 
 import junit.framework.Assert;
@@ -32,6 +51,10 @@ public class TestApplicationAttemptId {
     Assert.assertFalse(a1.hashCode() == a3.hashCode());
     Assert.assertFalse(a1.hashCode() == a4.hashCode());
     
+    long ts = System.currentTimeMillis();
+    ApplicationAttemptId a6 = createAppAttemptId(ts, 543627, 33492611);
+    Assert.assertEquals("appattempt_10_0001_000001", a1.toString());
+    Assert.assertEquals("appattempt_" + ts + "_543627_33492611", a6.toString());
   }
 
   private ApplicationAttemptId createAppAttemptId(long clusterTimeStamp,
@@ -45,4 +68,9 @@ public class TestApplicationAttemptId {
     appAttemptId.setAttemptId(attemptId);
     return appAttemptId;
   }
-}
+  
+  public static void main(String[] args) throws Exception {
+    TestApplicationAttemptId t = new TestApplicationAttemptId();
+    t.testApplicationAttemptId();
+  }
+}
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationId.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationId.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationId.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationId.java Tue Sep 20 07:40:04 2011
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hadoop.yarn.api;
 
 import junit.framework.Assert;
@@ -26,6 +44,11 @@ public class TestApplicationId {
     Assert.assertTrue(a1.hashCode() == a3.hashCode());
     Assert.assertFalse(a1.hashCode() == a2.hashCode());
     Assert.assertFalse(a2.hashCode() == a4.hashCode());
+    
+    long ts = System.currentTimeMillis();
+    ApplicationId a5 = createAppId(ts, 45436343);
+    Assert.assertEquals("application_10_0001", a1.toString());
+    Assert.assertEquals("application_" + ts + "_45436343", a5.toString());
   }
 
   private ApplicationId createAppId(long clusterTimeStamp, int id) {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java Tue Sep 20 07:40:04 2011
@@ -1,3 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
 package org.apache.hadoop.yarn.api;
 
 import junit.framework.Assert;
@@ -32,6 +51,12 @@ public class TestContainerId {
     Assert.assertFalse(c1.hashCode() == c2.hashCode());
     Assert.assertFalse(c1.hashCode() == c4.hashCode());
     Assert.assertFalse(c1.hashCode() == c5.hashCode());
+    
+    long ts = System.currentTimeMillis();
+    ContainerId c6 = createContainerId(ts, 36473, 4365472, 25645811);
+    Assert.assertEquals("container_10_0001_01_000001", c1.toString());
+    Assert.assertEquals("container_" + ts + "_36473_4365472_25645811",
+        c6.toString());
   }
 
   private ContainerId createContainerId(long clusterTimestamp, int appIdInt,

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java Tue Sep 20 07:40:04 2011
@@ -97,7 +97,7 @@ public class ResourceTrackerService exte
   public synchronized void init(Configuration conf) {
     String resourceTrackerBindAddress =
       conf.get(YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS,
-          YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS);
+          YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS);
     resourceTrackerAddress = NetUtils.createSocketAddr(resourceTrackerBindAddress);
 
     RackResolver.init(conf);

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMApp.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMApp.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMApp.java Tue Sep 20 07:40:04 2011
@@ -18,40 +18,120 @@
 
 package org.apache.hadoop.yarn.server.resourcemanager.rmapp;
 
+import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.server.resourcemanager.recovery.ApplicationsStore;
 import org.apache.hadoop.yarn.server.resourcemanager.recovery.ApplicationsStore.ApplicationStore;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
 
+/**
+ * The read interface to an Application in the ResourceManager. Take a 
+ * look at {@link RMAppImpl} for its implementation. This interface 
+ * exposes methods to access various updates in application status/report.
+ */
 public interface RMApp extends EventHandler<RMAppEvent>{
 
+  /**
+   * The application id for this {@link RMApp}.
+   * @return the {@link ApplicationId} for this {@link RMApp}.
+   */
   ApplicationId getApplicationId();
 
+  /**
+   * The current state of the {@link RMApp}.
+   * @return the current state {@link RMAppState} for this application.
+   */
   RMAppState getState();
 
+  /**
+   * The user who submitted this application.
+   * @return the user who submitted the application.
+   */
   String getUser();
 
+  /**
+   * Progress of application.
+   * @return the progress of the {@link RMApp}.
+   */
   float getProgress();
 
+  /**
+   * {@link RMApp} can have multiple application attempts {@link RMAppAttempt}.
+   * This method returns the {@link RMAppAttempt} corresponding to
+   *  {@link ApplicationAttemptId}.
+   * @param appAttemptId the application attempt id
+   * @return  the {@link RMAppAttempt} corresponding to the {@link ApplicationAttemptId}.
+   */
   RMAppAttempt getRMAppAttempt(ApplicationAttemptId appAttemptId);
 
+  /**
+   * Each Application is submitted to a queue decided by {@link 
+   * ApplicationSubmissionContext#setQueue(String)}.
+   * This method returns the queue to which an application was submitted.
+   * @return the queue to which the application was submitted to.
+   */
   String getQueue();
 
+  /**
+   * The name of the application as set in {@link 
+   * ApplicationSubmissionContext#setApplicationName(String)}.
+   * @return the name of the application.
+   */
   String getName();
 
+  /**
+   * {@link RMApp} can have multiple application attempts {@link RMAppAttempt}.
+   * This method returns the current {@link RMAppAttempt}.
+   * @return the current {@link RMAppAttempt} 
+   */
   RMAppAttempt getCurrentAppAttempt();
 
+  /**
+   * To get the status of an application in the RM, this method can be used.
+   * @return the {@link ApplicationReport} detailing the status of the application.
+   */
   ApplicationReport createAndGetApplicationReport();
 
+  /**
+   * Application level metadata is stored in {@link ApplicationStore} whicn 
+   * can persist the information.
+   * @return the {@link ApplicationStore}  for this {@link RMApp}.
+   */
   ApplicationStore getApplicationStore();
 
+  /**
+   * The finish time of the {@link RMApp}
+   * @return the finish time of the application.,
+   */
   long getFinishTime();
 
+  /**
+   * the start time of the application.
+   * @return the start time of the application.
+   */
   long getStartTime();
 
+  /**
+   * The tracking url for the application master.
+   * @return the tracking url for the application master.
+   */
   String getTrackingUrl();
 
+  /**
+   * the diagnostics information for the application master.
+   * @return the diagnostics information for the application master.
+   */
   StringBuilder getDiagnostics();
+  
+  /**
+   * The final state of the AM when unregistering as in 
+   * {@link FinishApplicationMasterRequest#setFinalState(String)}.
+   * @return the final state of the AM as set in 
+   * {@link FinishApplicationMasterRequest#setFinalState(String)}.
+   */
+  String getAMFinalState();
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java Tue Sep 20 07:40:04 2011
@@ -40,11 +40,10 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.event.Dispatcher;
 import org.apache.hadoop.yarn.server.resourcemanager.ApplicationMasterService;
-import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
-import org.apache.hadoop.yarn.server.resourcemanager.recovery.ApplicationsStore.ApplicationStore;
 import org.apache.hadoop.yarn.server.resourcemanager.RMAppManagerEvent;
 import org.apache.hadoop.yarn.server.resourcemanager.RMAppManagerEventType;
-import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.AMLivelinessMonitor;
+import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
+import org.apache.hadoop.yarn.server.resourcemanager.recovery.ApplicationsStore.ApplicationStore;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEvent;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType;
@@ -195,6 +194,19 @@ public class RMAppImpl implements RMApp 
   }
 
   @Override
+  public String getAMFinalState() {
+    this.readLock.lock();
+    try {
+      if (currentAttempt != null) {
+        return currentAttempt.getAMFinalState();
+      }
+      return "UNKNOWN";
+    } finally {
+      this.readLock.unlock();
+    }
+  }
+  
+  @Override
   public RMAppState getState() {
     this.readLock.lock();
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttempt.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttempt.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttempt.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttempt.java Tue Sep 20 07:40:04 2011
@@ -26,33 +26,103 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.ContainerStatus;
 import org.apache.hadoop.yarn.api.records.NodeId;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
 
+/**
+ * Interface to an Application Attempt in the Resource Manager.
+ * A {@link RMApp} can have multiple app attempts based on 
+ * {@link YarnConfiguration#RM_AM_MAX_RETRIES}. For specific 
+ * implementation take a look at {@link RMAppAttemptImpl}.
+ */
 public interface RMAppAttempt extends EventHandler<RMAppAttemptEvent>{
 
+  /**
+   * Get the application attempt id for this {@link RMAppAttempt}.
+   * @return the {@link ApplicationAttemptId} for this RM attempt.
+   */
   ApplicationAttemptId getAppAttemptId();
 
+  /**
+   * The state of the {@link RMAppAttempt}.
+   * @return the state {@link RMAppAttemptState} of this {@link RMAppAttempt}
+   */
   RMAppAttemptState getAppAttemptState();
-
+  
+  /**
+   * The host on which the {@link RMAppAttempt} is running/ran on.
+   * @return the host on which the {@link RMAppAttempt} ran/is running on.
+   */
   String getHost();
 
+  /**
+   * The rpc port of the {@link RMAppAttempt}.
+   * @return the rpc port of the {@link RMAppAttempt} to which the clients can connect
+   * to.
+   */
   int getRpcPort();
 
+  /**
+   * The url at which the status of the application attempt can be accessed.
+   * @return the url at which the status of the attempt can be accessed.
+   */
   String getTrackingUrl();
 
+  /**
+   * The token required by the clients to talk to the application attempt
+   * @return the token required by the clients to talk to the application attempt
+   */
   String getClientToken();
 
+  /**
+   * Diagnostics information for the application attempt.
+   * @return diagnostics information for the application attempt.
+   */
   StringBuilder getDiagnostics();
 
+  /**
+   * Progress for the application attempt.
+   * @return the progress for this {@link RMAppAttempt}
+   */
   float getProgress();
 
+  /**
+   * The final state set by the AM.
+   * @return the final state that is set by the AM when unregistering itself.
+   */
+  String getAMFinalState();
+  
+  /**
+   * Nodes on which the containers for this {@link RMAppAttempt} ran.
+   * @return the set of nodes that ran any containers from this {@link RMAppAttempt}
+   */
   Set<NodeId> getRanNodes();
 
+  /**
+   * Return a list of the last set of finished containers, resetting the 
+   * finished containers to empty.
+   * @return the list of just finished containers, re setting the finished containers.
+   */
   List<ContainerStatus> pullJustFinishedContainers();
 
+  /**
+   * Return the list of last set of finished containers. This does not reset the 
+   * finished containers.
+   * @return the list of just finished contianers, this does not reset the 
+   * finished containers.
+   */
   List<ContainerStatus> getJustFinishedContainers();
 
+  /**
+   * The container on which the Application Master is running.
+   * @return the {@link Container} on which the application master is running.
+   */
   Container getMasterContainer();
 
+  /**
+   * The application submission context for this {@link RMAppAttempt}.
+   * @return the application submission context for this Application.
+   */
   ApplicationSubmissionContext getSubmissionContext();
 }



Mime
View raw message