hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ss...@apache.org
Subject svn commit: r1400232 [1/2] - in /hadoop/common/branches/MR-3902/hadoop-mapreduce-project: ./ hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/ hadoop-mapreduce-client/hadoop-mapreduce-client-app2/sr...
Date Fri, 19 Oct 2012 19:01:11 GMT
Author: sseth
Date: Fri Oct 19 19:01:10 2012
New Revision: 1400232

URL: http://svn.apache.org/viewvc?rev=1400232&view=rev
Log:
MAPREDUCE-4738. fix and re-enable disabled unit tests in the mr-app2 module. (sseth)

Added:
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/rm/container/
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/rm/container/TestAMContainerHelpers.java
Modified:
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/CHANGES.txt.MR-3902
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/MRAppMaster.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/TaskAttemptListener.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/client/MRClientService.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/event/TaskAttemptEventFailRequest.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/event/TaskAttemptEventKillRequest.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/event/TaskAttemptScheduleEvent.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/impl/TaskAttemptImpl.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/impl/TaskImpl.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/local/LocalContainerRequestor.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/ContainerRequestor.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/RMContainerAllocator.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/RMContainerRequestor.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/container/AMContainerHelpers.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/resources/META-INF/services/org.apache.hadoop.security.SecurityInfo
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/MRApp.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/MRAppBenchmark.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/TestRecovery.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/job/impl/TestTaskAttempt.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/rm/TestRMContainerAllocator.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/webapp/TestAMWebApp.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/webapp/TestAMWebServices.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/webapp/TestAMWebServicesAttempts.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/webapp/TestAMWebServicesJobConf.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/webapp/TestAMWebServicesJobs.java
    hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/webapp/TestAMWebServicesTasks.java

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/CHANGES.txt.MR-3902
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/CHANGES.txt.MR-3902?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/CHANGES.txt.MR-3902 (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/CHANGES.txt.MR-3902 Fri Oct 19 19:01:10 2012
@@ -30,3 +30,5 @@ Branch MR-3902
   MAPREDUCE-4727. Handle successful NM stop requests. (sseth)
 
   MAPREDUCE-4596. Split StateMachine state from states seen by MRClientProtocol for Job, Task and TaskAttempt. (sseth)
+
+  MAPREDUCE-4738. fix and re-enable disabled unit tests in the mr-app2 module. (sseth)

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/MRAppMaster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/MRAppMaster.java?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/MRAppMaster.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/MRAppMaster.java Fri Oct 19 19:01:10 2012
@@ -74,14 +74,17 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app2.metrics.MRAppMetrics;
 import org.apache.hadoop.mapreduce.v2.app2.recover.Recovery;
 import org.apache.hadoop.mapreduce.v2.app2.recover.RecoveryService;
+import org.apache.hadoop.mapreduce.v2.app2.rm.AMSchedulerEvent;
 import org.apache.hadoop.mapreduce.v2.app2.rm.AMSchedulerEventType;
 import org.apache.hadoop.mapreduce.v2.app2.rm.ContainerAllocator;
 import org.apache.hadoop.mapreduce.v2.app2.rm.ContainerRequestor;
 import org.apache.hadoop.mapreduce.v2.app2.rm.NMCommunicatorEventType;
 import org.apache.hadoop.mapreduce.v2.app2.rm.RMCommunicator;
+import org.apache.hadoop.mapreduce.v2.app2.rm.RMCommunicatorEvent;
 import org.apache.hadoop.mapreduce.v2.app2.rm.RMCommunicatorEventType;
 import org.apache.hadoop.mapreduce.v2.app2.rm.RMContainerAllocator;
 import org.apache.hadoop.mapreduce.v2.app2.rm.RMContainerRequestor;
+import org.apache.hadoop.mapreduce.v2.app2.rm.RMContainerRequestor.ContainerRequest;
 import org.apache.hadoop.mapreduce.v2.app2.rm.container.AMContainer;
 import org.apache.hadoop.mapreduce.v2.app2.rm.container.AMContainerEventType;
 import org.apache.hadoop.mapreduce.v2.app2.rm.container.AMContainerMap;
@@ -111,6 +114,7 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.event.AsyncDispatcher;
 import org.apache.hadoop.yarn.event.Dispatcher;
@@ -178,7 +182,8 @@ public class MRAppMaster extends Composi
   private boolean newApiCommitter;
   private OutputCommitter committer;
   private JobEventDispatcher jobEventDispatcher;
-  private JobHistoryEventHandler2 jobHistoryEventHandler;
+  private EventHandler<JobHistoryEvent> jobHistoryEventHandler;
+  private AbstractService stagingDirCleanerService;
   private boolean inRecovery = false;
   private SpeculatorEventDispatcher speculatorEventDispatcher;
   private ContainerRequestor containerRequestor;
@@ -292,10 +297,9 @@ public class MRAppMaster extends Composi
     addIfService(clientService);
 
     //service to log job history events
-    EventHandler<JobHistoryEvent> historyService = 
-        createJobHistoryHandler(context);
+    jobHistoryEventHandler = createJobHistoryHandler(context);
     dispatcher.register(org.apache.hadoop.mapreduce.jobhistory.EventType.class,
-        historyService);
+        jobHistoryEventHandler);
 
     this.jobEventDispatcher = new JobEventDispatcher();
 
@@ -324,10 +328,21 @@ public class MRAppMaster extends Composi
     addIfService(containerLauncher);
     dispatcher.register(NMCommunicatorEventType.class, containerLauncher);
 
+    // service to allocate containers from RM (if non-uber) or to fake it (uber)
+    containerRequestor = createContainerRequestor(clientService, context);
+    addIfService(containerRequestor);
+    dispatcher.register(RMCommunicatorEventType.class, containerRequestor);
+
+    amScheduler = createAMScheduler(containerRequestor, context);
+    addIfService(amScheduler);
+    dispatcher.register(AMSchedulerEventType.class, amScheduler);
+    
     // Add the staging directory cleaner before the history server but after
     // the container allocator so the staging directory is cleaned after
     // the history has been flushed but before unregistering with the RM.
-    addService(createStagingDirCleaningService());
+    this.stagingDirCleanerService = createStagingDirCleaningService();
+    addService(stagingDirCleanerService);
+
 
     // Add the JobHistoryEventHandler last so that it is properly stopped first.
     // This will guarantee that all history-events are flushed before AM goes
@@ -335,7 +350,7 @@ public class MRAppMaster extends Composi
     // Note: Even though JobHistoryEventHandler is started last, if any
     // component creates a JobHistoryEvent in the meanwhile, it will be just be
     // queued inside the JobHistoryEventHandler 
-    addIfService(historyService);
+    addIfService(this.jobHistoryEventHandler);
 
     super.init(conf);
   } // end of init()
@@ -580,44 +595,33 @@ public class MRAppMaster extends Composi
   protected Recovery createRecoveryService(AppContext appContext) {
     return new RecoveryService(appContext, getCommitter());
   }
-  
+
   /**
    * Create the RMContainerRequestor.
-   * @param clientService the MR Client Service.
-   * @param appContext the application context.
+   * 
+   * @param clientService
+   *          the MR Client Service.
+   * @param appContext
+   *          the application context.
    * @return an instance of the RMContainerRequestor.
    */
   protected ContainerRequestor createContainerRequestor(
       ClientService clientService, AppContext appContext) {
-    ContainerRequestor containerRequestor;
-    if (job.isUber()) {
-      containerRequestor = new LocalContainerRequestor(clientService,
-          appContext);
-    } else {
-      containerRequestor = new RMContainerRequestor(clientService, appContext);
-    }
-    return containerRequestor;
+    return new ContainerRequestorRouter(clientService, appContext);
   }
 
   /**
    * Create the AM Scheduler.
    * 
-   * @param requestor The Container Requestor.
-   * @param appContext the application context.
+   * @param requestor
+   *          The Container Requestor.
+   * @param appContext
+   *          the application context.
    * @return an instance of the AMScheduler.
    */
   protected ContainerAllocator createAMScheduler(ContainerRequestor requestor,
       AppContext appContext) {
-    if (job.isUber()) {
-      return new LocalContainerAllocator(appContext, jobId, nmHost, nmPort,
-          nmHttpPort, containerID, (TaskUmbilicalProtocol) taskAttemptListener,
-          taskAttemptListener, (RMCommunicator)containerRequestor);
-    } else {
-      // TODO XXX: This is terrible. Assuming RMContainerRequestor is sent in
-      // when non-uberized. Fix RMContainerRequestor to be a proper interface, etc.
-      return new RMContainerAllocator((RMContainerRequestor) requestor,
-          appContext);
-    }
+    return new AMSchedulerRouter(requestor, appContext);
   }
 
   /** Create and initialize (but don't start) a single job. */
@@ -681,9 +685,7 @@ public class MRAppMaster extends Composi
 
   protected EventHandler<JobHistoryEvent> createJobHistoryHandler(
       AppContext context) {
-    this.jobHistoryEventHandler = new JobHistoryEventHandler2(context,
-      getStartCount());
-    return this.jobHistoryEventHandler;
+    return new JobHistoryEventHandler2(context, getStartCount());
   }
 
   protected AbstractService createStagingDirCleaningService() {
@@ -810,7 +812,117 @@ public class MRAppMaster extends Composi
   public TaskAttemptListener getTaskAttemptListener() {
     return taskAttemptListener;
   }
-  
+
+  /**
+   * By the time life-cycle of this router starts, job-init would have already
+   * happened.
+   */
+  private final class ContainerRequestorRouter extends AbstractService
+      implements ContainerRequestor {
+    private final ClientService clientService;
+    private final AppContext context;
+    private ContainerRequestor real;
+
+    public ContainerRequestorRouter(ClientService clientService,
+        AppContext appContext) {
+      super(ContainerRequestorRouter.class.getName());
+      this.clientService = clientService;
+      this.context = appContext;
+    }
+
+    @Override
+    public void start() {
+      if (job.isUber()) {
+        real = new LocalContainerRequestor(clientService,
+            context);
+      } else {
+        real = new RMContainerRequestor(clientService, context);
+      }
+      ((Service)this.real).init(getConfig());
+      ((Service)this.real).start();
+      super.start();
+    }
+    
+    @Override
+    public void stop() {
+      if (real != null) {
+        ((Service) real).stop();
+      }
+      super.stop();
+    }
+
+    @Override
+    public void handle(RMCommunicatorEvent event) {
+      real.handle(event);
+    }
+
+    @Override
+    public Resource getAvailableResources() {
+      return real.getAvailableResources();
+    }
+
+    @Override
+    public void addContainerReq(ContainerRequest req) {
+      real.addContainerReq(req);
+    }
+
+    @Override
+    public void decContainerReq(ContainerRequest req) {
+      real.decContainerReq(req);
+    }
+    
+    public void setSignalled(boolean isSignalled) {
+      ((RMCommunicator) real).setSignalled(isSignalled);
+    }
+  }
+
+  /**
+   * By the time life-cycle of this router starts, job-init would have already
+   * happened.
+   */
+  private final class AMSchedulerRouter extends AbstractService
+      implements ContainerAllocator {
+    private final ContainerRequestor requestor;
+    private final AppContext context;
+    private ContainerAllocator containerAllocator;
+
+    AMSchedulerRouter(ContainerRequestor requestor,
+        AppContext context) {
+      super(AMSchedulerRouter.class.getName());
+      this.requestor = requestor;
+      this.context = context;
+    }
+
+    @Override
+    public synchronized void start() {
+      if (job.isUber()) {
+        this.containerAllocator = new LocalContainerAllocator(this.context,
+            jobId, nmHost, nmPort, nmHttpPort, containerID,
+            (TaskUmbilicalProtocol) taskAttemptListener, taskAttemptListener,
+            (RMCommunicator) this.requestor);
+      } else {
+        this.containerAllocator = new RMContainerAllocator(this.requestor,
+            this.context);
+      }
+      ((Service)this.containerAllocator).init(getConfig());
+      ((Service)this.containerAllocator).start();
+      super.start();
+    }
+
+    @Override
+    public synchronized void stop() {
+      if (containerAllocator != null) {
+        ((Service) this.containerAllocator).stop();
+        super.stop();
+      }
+    }
+
+    @Override
+    public void handle(AMSchedulerEvent event) {
+      this.containerAllocator.handle(event);
+    }
+  }
+
   public TaskHeartbeatHandler getTaskHeartbeatHandler() {
     return taskHeartbeatHandler;
   }
@@ -974,16 +1086,6 @@ public class MRAppMaster extends Composi
       LOG.info("MRAppMaster launching normal, non-uberized, multi-container "
                + "job " + job.getID() + ".");
     }
-    // service to allocate containers from RM (if non-uber) or to fake it (uber)
-    containerRequestor = createContainerRequestor(clientService, context);
-    addIfService(containerRequestor);
-    ((Service)containerRequestor).init(getConfig());
-    dispatcher.register(RMCommunicatorEventType.class, containerRequestor);
-
-    amScheduler = createAMScheduler(containerRequestor, context);
-    addIfService(amScheduler);
-    ((Service)amScheduler).init(getConfig());
-    dispatcher.register(AMSchedulerEventType.class, amScheduler);
 
     //start all the components
     super.start();
@@ -1155,10 +1257,12 @@ public class MRAppMaster extends Composi
       // that they don't take too long in shutting down
       
       // Signal the RMCommunicator.
-      ((RMCommunicator)appMaster.containerRequestor).setSignalled(true);
+      ((ContainerRequestorRouter) appMaster.containerRequestor)
+          .setSignalled(true);
 
       if(appMaster.jobHistoryEventHandler != null) {
-        appMaster.jobHistoryEventHandler.setSignalled(true);
+        ((JobHistoryEventHandler2) appMaster.jobHistoryEventHandler)
+            .setSignalled(true);
       }
       appMaster.stop();
     }

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/TaskAttemptListener.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/TaskAttemptListener.java?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/TaskAttemptListener.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/TaskAttemptListener.java Fri Oct 19 19:01:10 2012
@@ -20,7 +20,6 @@ package org.apache.hadoop.mapreduce.v2.a
 
 import java.net.InetSocketAddress;
 
-import org.apache.hadoop.mapred.Task;
 import org.apache.hadoop.mapred.WrappedJvmID;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.yarn.api.records.ContainerId;

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/client/MRClientService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/client/MRClientService.java?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/client/MRClientService.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/client/MRClientService.java Fri Oct 19 19:01:10 2012
@@ -68,6 +68,7 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app2.job.event.JobEventType;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskAttemptDiagnosticsUpdateEvent;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskAttemptEvent;
+import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskAttemptEventFailRequest;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskAttemptEventType;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskEvent;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskEventType;
@@ -346,8 +347,7 @@ public class MRClientService extends Abs
       appContext.getEventHandler().handle(
           new TaskAttemptDiagnosticsUpdateEvent(taskAttemptId, message));
       appContext.getEventHandler().handle(
-          new TaskAttemptEvent(taskAttemptId, 
-              TaskAttemptEventType.TA_FAIL_REQUEST));
+          new TaskAttemptEventFailRequest(taskAttemptId, message));
       FailTaskAttemptResponse response = recordFactory.
         newRecordInstance(FailTaskAttemptResponse.class);
       return response;

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/event/TaskAttemptEventFailRequest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/event/TaskAttemptEventFailRequest.java?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/event/TaskAttemptEventFailRequest.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/event/TaskAttemptEventFailRequest.java Fri Oct 19 19:01:10 2012
@@ -29,8 +29,8 @@ public class TaskAttemptEventFailRequest
     this.message = message;
   }
 
+  // TODO: This is not used at the moment.
   public String getMessage() {
     return this.message;
   }
-
-}
+}
\ No newline at end of file

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/event/TaskAttemptEventKillRequest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/event/TaskAttemptEventKillRequest.java?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/event/TaskAttemptEventKillRequest.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/event/TaskAttemptEventKillRequest.java Fri Oct 19 19:01:10 2012
@@ -11,6 +11,7 @@ public class TaskAttemptEventKillRequest
     this.message = message;
   }
 
+  // TODO: This is not used at the moment.
   public String getMessage() {
     return this.message;
   }

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/event/TaskAttemptScheduleEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/event/TaskAttemptScheduleEvent.java?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/event/TaskAttemptScheduleEvent.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/event/TaskAttemptScheduleEvent.java Fri Oct 19 19:01:10 2012
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
 package org.apache.hadoop.mapreduce.v2.app2.job.event;
 
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
@@ -5,14 +22,14 @@ import org.apache.hadoop.mapreduce.v2.ap
 public class TaskAttemptScheduleEvent extends TaskAttemptEvent {
 
   private final boolean rescheduled;
-  
-  public TaskAttemptScheduleEvent(TaskAttemptId id, TaskAttemptEventType type, boolean rescheduled) {
-    super(id, type);
+
+  public TaskAttemptScheduleEvent(TaskAttemptId id, boolean rescheduled) {
+    super(id, TaskAttemptEventType.TA_SCHEDULE);
     this.rescheduled = rescheduled;
   }
 
   public boolean isRescheduled() {
     return this.rescheduled;
   }
-  
+
 }

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/impl/TaskAttemptImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/impl/TaskAttemptImpl.java?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/impl/TaskAttemptImpl.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/impl/TaskAttemptImpl.java Fri Oct 19 19:01:10 2012
@@ -77,8 +77,8 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskEventType;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskTAttemptEvent;
-import org.apache.hadoop.mapreduce.v2.app2.rm.AMSchedulerTALaunchRequestEvent;
 import org.apache.hadoop.mapreduce.v2.app2.rm.AMSchedulerEventTAEnded;
+import org.apache.hadoop.mapreduce.v2.app2.rm.AMSchedulerTALaunchRequestEvent;
 import org.apache.hadoop.mapreduce.v2.app2.speculate.SpeculatorEvent;
 import org.apache.hadoop.mapreduce.v2.app2.taskclean.TaskCleanupEvent;
 import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
@@ -219,10 +219,10 @@ public abstract class TaskAttemptImpl im
         .addTransition(TaskAttemptStateInternal.FAIL_IN_PROGRESS, TaskAttemptStateInternal.FAIL_IN_PROGRESS, EnumSet.of(TaskAttemptEventType.TA_STARTED_REMOTELY, TaskAttemptEventType.TA_STATUS_UPDATE, TaskAttemptEventType.TA_COMMIT_PENDING, TaskAttemptEventType.TA_DONE, TaskAttemptEventType.TA_FAILED, TaskAttemptEventType.TA_TIMED_OUT, TaskAttemptEventType.TA_FAIL_REQUEST, TaskAttemptEventType.TA_KILL_REQUEST, TaskAttemptEventType.TA_NODE_FAILED, TaskAttemptEventType.TA_CONTAINER_TERMINATING))
         
         .addTransition(TaskAttemptStateInternal.KILLED, TaskAttemptStateInternal.KILLED, TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE, DIAGNOSTIC_INFORMATION_UPDATE_TRANSITION)
-        .addTransition(TaskAttemptStateInternal.KILLED, TaskAttemptStateInternal.KILLED, EnumSet.of(TaskAttemptEventType.TA_STARTED_REMOTELY, TaskAttemptEventType.TA_STATUS_UPDATE, TaskAttemptEventType.TA_COMMIT_PENDING, TaskAttemptEventType.TA_DONE, TaskAttemptEventType.TA_FAILED, TaskAttemptEventType.TA_FAIL_REQUEST, TaskAttemptEventType.TA_KILL_REQUEST, TaskAttemptEventType.TA_NODE_FAILED, TaskAttemptEventType.TA_CONTAINER_TERMINATING, TaskAttemptEventType.TA_CONTAINER_TERMINATED))
+        .addTransition(TaskAttemptStateInternal.KILLED, TaskAttemptStateInternal.KILLED, EnumSet.of(TaskAttemptEventType.TA_STARTED_REMOTELY, TaskAttemptEventType.TA_STATUS_UPDATE, TaskAttemptEventType.TA_COMMIT_PENDING, TaskAttemptEventType.TA_DONE, TaskAttemptEventType.TA_FAILED, TaskAttemptEventType.TA_FAIL_REQUEST, TaskAttemptEventType.TA_KILL_REQUEST, TaskAttemptEventType.TA_NODE_FAILED, TaskAttemptEventType.TA_CONTAINER_TERMINATING, TaskAttemptEventType.TA_CONTAINER_TERMINATED, TaskAttemptEventType.TA_TOO_MANY_FETCH_FAILURES))
 
         .addTransition(TaskAttemptStateInternal.FAILED, TaskAttemptStateInternal.FAILED, TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE, DIAGNOSTIC_INFORMATION_UPDATE_TRANSITION)
-        .addTransition(TaskAttemptStateInternal.FAILED, TaskAttemptStateInternal.FAILED, EnumSet.of(TaskAttemptEventType.TA_STARTED_REMOTELY, TaskAttemptEventType.TA_STATUS_UPDATE, TaskAttemptEventType.TA_COMMIT_PENDING, TaskAttemptEventType.TA_DONE, TaskAttemptEventType.TA_FAILED, TaskAttemptEventType.TA_FAIL_REQUEST, TaskAttemptEventType.TA_KILL_REQUEST, TaskAttemptEventType.TA_NODE_FAILED, TaskAttemptEventType.TA_CONTAINER_TERMINATING, TaskAttemptEventType.TA_CONTAINER_TERMINATED))
+        .addTransition(TaskAttemptStateInternal.FAILED, TaskAttemptStateInternal.FAILED, EnumSet.of(TaskAttemptEventType.TA_STARTED_REMOTELY, TaskAttemptEventType.TA_STATUS_UPDATE, TaskAttemptEventType.TA_COMMIT_PENDING, TaskAttemptEventType.TA_DONE, TaskAttemptEventType.TA_FAILED, TaskAttemptEventType.TA_FAIL_REQUEST, TaskAttemptEventType.TA_KILL_REQUEST, TaskAttemptEventType.TA_NODE_FAILED, TaskAttemptEventType.TA_CONTAINER_TERMINATING, TaskAttemptEventType.TA_CONTAINER_TERMINATED, TaskAttemptEventType.TA_TOO_MANY_FETCH_FAILURES))
         
         // TODO XXX: FailRequest / KillRequest at SUCCEEDED need to consider Map / Reduce task.
         .addTransition(TaskAttemptStateInternal.SUCCEEDED, TaskAttemptStateInternal.SUCCEEDED, TaskAttemptEventType.TA_DIAGNOSTICS_UPDATE, DIAGNOSTIC_INFORMATION_UPDATE_TRANSITION)
@@ -801,6 +801,10 @@ public abstract class TaskAttemptImpl im
     sendEvent(new TaskCleanupEvent(this.attemptId, this.committer, taContext));
   }
 
+  protected String[] resolveHosts(String[] src) {
+    return TaskAttemptImplHelpers.resolveHosts(src);
+  }
+
   protected SingleArcTransition<TaskAttemptImpl, TaskAttemptEvent> 
       createScheduleTransition() {
     return new ScheduleTaskattempt();
@@ -836,7 +840,7 @@ public abstract class TaskAttemptImpl im
         for (String host : ta.dataLocalHosts) {
           racks.add(RackResolver.resolve(host).getNetworkLocation());
         }
-        hostArray = TaskAttemptImplHelpers.resolveHosts(ta.dataLocalHosts);
+        hostArray = ta.resolveHosts(ta.dataLocalHosts);
         rackArray = racks.toArray(new String[racks.size()]);
       }
 

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/impl/TaskImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/impl/TaskImpl.java?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/impl/TaskImpl.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/job/impl/TaskImpl.java Fri Oct 19 19:01:10 2012
@@ -67,7 +67,6 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app2.job.event.JobTaskAttemptCompletedEvent;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.JobTaskEvent;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskAttemptEventKillRequest;
-import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskAttemptEventType;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskAttemptScheduleEvent;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskEvent;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskEventType;
@@ -615,7 +614,7 @@ public abstract class TaskImpl implement
     ++numberUncompletedAttempts;
     //schedule the nextAttemptNumber
     eventHandler.handle(new TaskAttemptScheduleEvent(attempt.getID(),
-        TaskAttemptEventType.TA_SCHEDULE, failedAttempts > 0));
+        failedAttempts > 0));
     
   }
 

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/local/LocalContainerRequestor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/local/LocalContainerRequestor.java?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/local/LocalContainerRequestor.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/local/LocalContainerRequestor.java Fri Oct 19 19:01:10 2012
@@ -31,11 +31,13 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app2.rm.ContainerRequestor;
 import org.apache.hadoop.mapreduce.v2.app2.rm.RMCommunicator;
 import org.apache.hadoop.mapreduce.v2.app2.rm.RMCommunicatorEvent;
+import org.apache.hadoop.mapreduce.v2.app2.rm.RMContainerRequestor.ContainerRequest;
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
 import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
 import org.apache.hadoop.yarn.api.records.AMResponse;
 import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.ResourceRequest;
 import org.apache.hadoop.yarn.util.BuilderUtils;
 
@@ -125,4 +127,19 @@ public class LocalContainerRequestor ext
       break;
     }
   }
+
+  @Override
+  public Resource getAvailableResources() {
+    throw new YarnException("Unexpected call to getAvailableResource");
+  }
+
+  @Override
+  public void addContainerReq(ContainerRequest req) {
+    throw new YarnException("Unexpected call to addContainerReq");
+  }
+
+  @Override
+  public void decContainerReq(ContainerRequest req) {
+    throw new YarnException("Unexpected call to decContainerReq");
+  }
 }

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/ContainerRequestor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/ContainerRequestor.java?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/ContainerRequestor.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/ContainerRequestor.java Fri Oct 19 19:01:10 2012
@@ -1,27 +1,31 @@
 /**
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
 package org.apache.hadoop.mapreduce.v2.app2.rm;
 
+import org.apache.hadoop.mapreduce.v2.app2.rm.RMContainerRequestor.ContainerRequest;
+import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.event.EventHandler;
 
 public interface ContainerRequestor extends EventHandler<RMCommunicatorEvent> {
-  
-  
+  public Resource getAvailableResources();
 
+  public void addContainerReq(ContainerRequest req);
+
+  public void decContainerReq(ContainerRequest req);
 }

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/RMContainerAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/RMContainerAllocator.java?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/RMContainerAllocator.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/RMContainerAllocator.java Fri Oct 19 19:01:10 2012
@@ -109,7 +109,7 @@ public class RMContainerAllocator extend
   protected final Clock clock;
   protected Job job;
   protected final JobId jobId;
-  private final RMContainerRequestor requestor;
+  private final ContainerRequestor requestor;
   @SuppressWarnings("rawtypes")
   private final EventHandler eventHandler;
   private final AMContainerMap containerMap;
@@ -180,7 +180,7 @@ public class RMContainerAllocator extend
   BlockingQueue<AMSchedulerEvent> eventQueue
     = new LinkedBlockingQueue<AMSchedulerEvent>();
 
-  public RMContainerAllocator(RMContainerRequestor requestor,
+  public RMContainerAllocator(ContainerRequestor requestor,
       AppContext appContext) {
     super("RMContainerAllocator");
     this.requestor = requestor;
@@ -188,12 +188,13 @@ public class RMContainerAllocator extend
     this.clock = appContext.getClock();
     this.eventHandler = appContext.getEventHandler();
     ApplicationId appId = appContext.getApplicationID();
-    // JobId should not be required here. 
-    // Currently used for error notification, clc construction, etc. Should not be  
+    // JobId should not be required here.
+    // Currently used for error notification, clc construction, etc. Should not
+    // be
     JobID id = TypeConverter.fromYarn(appId);
     JobId jobId = TypeConverter.toYarn(id);
     this.jobId = jobId;
-    
+
     this.containerMap = appContext.getAllContainers();
   }
 
@@ -531,10 +532,11 @@ public class RMContainerAllocator extend
       AMSchedulerTALaunchRequestEvent event, TaskType taskType,
       int prevComputedSize) {
     if (prevComputedSize == 0) {
-      int supportedMaxContainerCapability = requestor
+      int supportedMaxContainerCapability = appContext.getClusterInfo()
           .getMaxContainerCapability().getMemory();
       prevComputedSize = event.getCapability().getMemory();
-      int minSlotMemSize = requestor.getMinContainerCapability().getMemory();
+      int minSlotMemSize = appContext.getClusterInfo()
+          .getMinContainerCapability().getMemory();
       prevComputedSize = (int) Math.ceil((float) prevComputedSize
           / minSlotMemSize)
           * minSlotMemSize;

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/RMContainerRequestor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/RMContainerRequestor.java?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/RMContainerRequestor.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/RMContainerRequestor.java Fri Oct 19 19:01:10 2012
@@ -143,7 +143,8 @@ public class RMContainerRequestor extend
     super.stop();
   }
 
-  protected Resource getAvailableResources() {
+  @Override
+  public Resource getAvailableResources() {
     return availableResources;
   }
 

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/container/AMContainerHelpers.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/container/AMContainerHelpers.java?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/container/AMContainerHelpers.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/java/org/apache/hadoop/mapreduce/v2/app2/rm/container/AMContainerHelpers.java Fri Oct 19 19:01:10 2012
@@ -60,6 +60,8 @@ import org.apache.hadoop.yarn.util.Apps;
 import org.apache.hadoop.yarn.util.BuilderUtils;
 import org.apache.hadoop.yarn.util.ConverterUtils;
 
+import com.google.common.annotations.VisibleForTesting;
+
 public class AMContainerHelpers {
 
   private static final Log LOG = LogFactory.getLog(AMContainerHelpers.class);
@@ -221,7 +223,8 @@ public class AMContainerHelpers {
     return container;
   }
 
-  static ContainerLaunchContext createContainerLaunchContext(
+  @VisibleForTesting
+  public static ContainerLaunchContext createContainerLaunchContext(
       Map<ApplicationAccessType, String> applicationACLs,
       ContainerId containerID, JobConf jobConf, TaskType taskType,
       Token<JobTokenIdentifier> jobToken,

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/resources/META-INF/services/org.apache.hadoop.security.SecurityInfo
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/resources/META-INF/services/org.apache.hadoop.security.SecurityInfo?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/resources/META-INF/services/org.apache.hadoop.security.SecurityInfo (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/main/resources/META-INF/services/org.apache.hadoop.security.SecurityInfo Fri Oct 19 19:01:10 2012
@@ -1 +1 @@
-org.apache.hadoop.mapreduce.v2.app.MRClientSecurityInfo
+org.apache.hadoop.mapreduce.v2.app2.MRClientSecurityInfo

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/MRApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/MRApp.java?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/MRApp.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/MRApp.java Fri Oct 19 19:01:10 2012
@@ -65,6 +65,7 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app2.job.event.JobEventType;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.JobFinishEvent;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskAttemptEvent;
+import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskAttemptEventFailRequest;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskAttemptEventKillRequest;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskAttemptEventType;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskAttemptRemoteStartEvent;
@@ -516,8 +517,8 @@ public class MRApp extends MRAppMaster {
   }
 
   // appAcls and attemptToContainerIdMap shared between various mocks.
-  private Map<ApplicationAccessType, String> appAcls = new HashMap<ApplicationAccessType, String>();
-  private Map<TaskAttemptId, ContainerId> attemptToContainerIdMap = new HashMap<TaskAttemptId, ContainerId>();
+  protected Map<ApplicationAccessType, String> appAcls = new HashMap<ApplicationAccessType, String>();
+  protected Map<TaskAttemptId, ContainerId> attemptToContainerIdMap = new HashMap<TaskAttemptId, ContainerId>();
   
   protected class MockContainerLauncher implements ContainerLauncher {
 
@@ -620,7 +621,8 @@ public class MRApp extends MRAppMaster {
     return new MRAppAMScheduler();
   }
 
-  protected class MRAppAMScheduler extends AbstractService implements ContainerAllocator{
+  protected class MRAppAMScheduler extends AbstractService implements
+      ContainerAllocator {
     private int containerCount;
     
     MRAppAMScheduler() {
@@ -845,8 +847,7 @@ public class MRApp extends MRAppMaster {
                   "Kill requested"));
     } else if (finalState == TaskAttemptState.FAILED) {
       getContext().getEventHandler().handle(
-          new TaskAttemptEvent(taskAttemptId,
-              TaskAttemptEventType.TA_FAIL_REQUEST));
+          new TaskAttemptEventFailRequest(taskAttemptId, null));
     }
   }
 }

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/MRAppBenchmark.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/MRAppBenchmark.java?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/MRAppBenchmark.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/MRAppBenchmark.java Fri Oct 19 19:01:10 2012
@@ -24,14 +24,26 @@ import java.util.concurrent.BlockingQueu
 import java.util.concurrent.LinkedBlockingQueue;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.TypeConverter;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.JobState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.app2.client.ClientService;
 import org.apache.hadoop.mapreduce.v2.app2.job.Job;
-import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskAttemptContainerAssignedEvent;
+import org.apache.hadoop.mapreduce.v2.app2.rm.AMSchedulerEvent;
+import org.apache.hadoop.mapreduce.v2.app2.rm.AMSchedulerEventTAEnded;
+import org.apache.hadoop.mapreduce.v2.app2.rm.AMSchedulerTALaunchRequestEvent;
 import org.apache.hadoop.mapreduce.v2.app2.rm.ContainerAllocator;
-import org.apache.hadoop.mapreduce.v2.app2.rm.ContainerAllocatorEvent;
+import org.apache.hadoop.mapreduce.v2.app2.rm.ContainerRequestor;
 import org.apache.hadoop.mapreduce.v2.app2.rm.RMContainerAllocator;
+import org.apache.hadoop.mapreduce.v2.app2.rm.RMContainerRequestor;
+import org.apache.hadoop.mapreduce.v2.app2.rm.container.AMContainerAssignTAEvent;
+import org.apache.hadoop.mapreduce.v2.app2.rm.container.AMContainerEvent;
+import org.apache.hadoop.mapreduce.v2.app2.rm.container.AMContainerEventType;
+import org.apache.hadoop.mapreduce.v2.app2.rm.container.AMContainerLaunchRequestEvent;
+import org.apache.hadoop.mapreduce.v2.app2.rm.container.AMContainerState;
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.api.AMRMProtocol;
 import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
@@ -46,8 +58,6 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.hadoop.yarn.api.records.ResourceRequest;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
-import org.apache.hadoop.yarn.factories.RecordFactory;
-import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.service.AbstractService;
 import org.apache.hadoop.yarn.util.BuilderUtils;
 import org.apache.hadoop.yarn.util.Records;
@@ -58,8 +68,6 @@ import org.junit.Test;
 
 public class MRAppBenchmark {
 
-  private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
-
   /**
    * Runs memory and time benchmark with Mock MRApp.
    */
@@ -108,8 +116,8 @@ public class MRAppBenchmark {
     }
     
     @Override
-    protected ContainerAllocator createContainerAllocator(
-        ClientService clientService, AppContext context) {
+    protected ContainerAllocator createAMScheduler(ContainerRequestor requestor,
+        AppContext appContext) {
       return new ThrottledContainerAllocator();
     }
     
@@ -117,13 +125,13 @@ public class MRAppBenchmark {
         implements ContainerAllocator {
       private int containerCount;
       private Thread thread;
-      private BlockingQueue<ContainerAllocatorEvent> eventQueue =
-        new LinkedBlockingQueue<ContainerAllocatorEvent>();
+      private BlockingQueue<AMSchedulerEvent> eventQueue =
+        new LinkedBlockingQueue<AMSchedulerEvent>();
       public ThrottledContainerAllocator() {
         super("ThrottledContainerAllocator");
       }
       @Override
-      public void handle(ContainerAllocatorEvent event) {
+      public void handle(AMSchedulerEvent event) {
         try {
           eventQueue.put(event);
         } catch (InterruptedException e) {
@@ -133,34 +141,72 @@ public class MRAppBenchmark {
       @Override
       public void start() {
         thread = new Thread(new Runnable() {
+          @SuppressWarnings("unchecked")
           @Override
           public void run() {
-            ContainerAllocatorEvent event = null;
+            AMSchedulerEvent event = null;
             while (!Thread.currentThread().isInterrupted()) {
               try {
                 if (concurrentRunningTasks < maxConcurrentRunningTasks) {
                   event = eventQueue.take();
-                  ContainerId cId = 
-                      recordFactory.newRecordInstance(ContainerId.class);
-                  cId.setApplicationAttemptId(
-                      getContext().getApplicationAttemptId());
-                  cId.setId(containerCount++);
-                  //System.out.println("Allocating " + containerCount);
-                  
-                  Container container = 
-                      recordFactory.newRecordInstance(Container.class);
-                  container.setId(cId);
-                  NodeId nodeId = recordFactory.newRecordInstance(NodeId.class);
-                  nodeId.setHost("dummy");
-                  nodeId.setPort(1234);
-                  container.setNodeId(nodeId);
-                  container.setContainerToken(null);
-                  container.setNodeHttpAddress("localhost:8042");
-                  getContext().getEventHandler()
-                      .handle(
-                      new TaskAttemptContainerAssignedEvent(event
-                          .getAttemptID(), container, null));
-                  concurrentRunningTasks++;
+                  switch(event.getType()) {
+                  case S_TA_LAUNCH_REQUEST:
+                    AMSchedulerTALaunchRequestEvent lEvent = (AMSchedulerTALaunchRequestEvent)event;
+                    ContainerId cId = Records.newRecord(ContainerId.class);
+                    cId.setApplicationAttemptId(getContext().getApplicationAttemptId());
+                    cId.setId(containerCount++);
+                    NodeId nodeId = BuilderUtils.newNodeId(NM_HOST, NM_PORT);
+                    Container container = BuilderUtils.newContainer(cId, nodeId,
+                        NM_HOST + ":" + NM_HTTP_PORT, null, null, null);
+                    
+                    getContext().getAllContainers().addContainerIfNew(container);
+                    getContext().getAllNodes().nodeSeen(nodeId);
+                    
+                    JobID id = TypeConverter.fromYarn(getContext().getApplicationID());
+                    JobId jobId = TypeConverter.toYarn(id);
+                    
+                    attemptToContainerIdMap.put(lEvent.getAttemptID(), cId);
+                    if (getContext().getAllContainers().get(cId).getState() == AMContainerState.ALLOCATED) {
+                    
+                      AMContainerLaunchRequestEvent lrEvent = new AMContainerLaunchRequestEvent(
+                          cId, jobId, lEvent.getAttemptID().getTaskId().getTaskType(),
+                          lEvent.getJobToken(), lEvent.getCredentials(), false,
+                          new JobConf(getContext().getJob(jobId).getConf()));
+                      getContext().getEventHandler().handle(lrEvent);
+                    }
+                    
+                    getContext().getEventHandler().handle(
+                        new AMContainerAssignTAEvent(cId, lEvent.getAttemptID(), lEvent
+                            .getRemoteTask()));
+                    concurrentRunningTasks++;
+                    break;
+                    
+                  case S_TA_ENDED:
+                    // Send out a Container_stop_request.
+                    AMSchedulerEventTAEnded sEvent = (AMSchedulerEventTAEnded) event;
+                    switch (sEvent.getState()) {
+                    case FAILED:
+                    case KILLED:
+                      getContext().getEventHandler().handle(
+                          new AMContainerEvent(attemptToContainerIdMap.remove(sEvent
+                              .getAttemptID()), AMContainerEventType.C_STOP_REQUEST));
+                      break;
+                    case SUCCEEDED:
+                      // No re-use in MRApp. Stop the container.
+                      getContext().getEventHandler().handle(
+                          new AMContainerEvent(attemptToContainerIdMap.remove(sEvent
+                              .getAttemptID()), AMContainerEventType.C_STOP_REQUEST));
+                      break;
+                    default:
+                      throw new YarnException("Unexpected state: " + sEvent.getState());
+                    }
+                  case S_CONTAINERS_ALLOCATED:
+                    break;
+                  case S_CONTAINER_COMPLETED:
+                    break;
+                  default:
+                      break;
+                  }
                 } else {
                   Thread.sleep(1000);
                 }
@@ -192,9 +238,16 @@ public class MRAppBenchmark {
     run(new MRApp(maps, reduces, true, this.getClass().getName(), true) {
 
       @Override
-      protected ContainerAllocator createContainerAllocator(
-          ClientService clientService, AppContext context) {
-        return new RMContainerAllocator(clientService, context) {
+      protected ContainerAllocator createAMScheduler(
+          ContainerRequestor requestor, AppContext appContext) {
+        return new RMContainerAllocator((RMContainerRequestor) requestor,
+            appContext);
+      }
+
+      @Override
+      protected ContainerRequestor createContainerRequestor(
+          ClientService clientService, AppContext appContext) {
+        return new RMContainerRequestor(clientService, appContext) {
           @Override
           protected AMRMProtocol createSchedulerProxy() {
             return new AMRMProtocol() {

Modified: hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/TestRecovery.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/TestRecovery.java?rev=1400232&r1=1400231&r2=1400232&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/TestRecovery.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app2/src/test/java/org/apache/hadoop/mapreduce/v2/app2/TestRecovery.java Fri Oct 19 19:01:10 2012
@@ -51,6 +51,7 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app2.job.Task;
 import org.apache.hadoop.mapreduce.v2.app2.job.TaskAttempt;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskAttemptEvent;
+import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskAttemptEventFailRequest;
 import org.apache.hadoop.mapreduce.v2.app2.job.event.TaskAttemptEventType;
 import org.apache.hadoop.mapreduce.v2.app2.launcher.ContainerLauncher;
 import org.apache.hadoop.mapreduce.v2.app2.rm.NMCommunicatorEvent;
@@ -123,10 +124,8 @@ public class TestRecovery {
 
     /////////// Play some games with the TaskAttempts of the first task //////
     //send the fail signal to the 1st map task attempt
-    app.getContext().getEventHandler().handle(
-        new TaskAttemptEvent(
-            task1Attempt1.getID(),
-            TaskAttemptEventType.TA_FAIL_REQUEST));
+    app.getContext().getEventHandler()
+        .handle(new TaskAttemptEventFailRequest(task1Attempt1.getID(), null));
     
     app.waitForState(task1Attempt1, TaskAttemptState.FAILED);
 



Mime
View raw message