hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject svn commit: r1227775 - in /hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project: ./ conf/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java...
Date Thu, 05 Jan 2012 19:21:05 GMT
Author: atm
Date: Thu Jan  5 19:21:01 2012
New Revision: 1227775

URL: http://svn.apache.org/viewvc?rev=1227775&view=rev
Log:
Merge trunk into HA branch.

Added:
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestMapReduceChildJVM.java
      - copied unchanged from r1227765, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestMapReduceChildJVM.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/security/
      - copied from r1227765, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/security/
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/security/TestTokenCache.java
      - copied unchanged from r1227765, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/security/TestTokenCache.java
Modified:
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/.gitignore   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/conf/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapReduceChildJVM.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/TaskAttemptListener.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerRemoteLaunchEvent.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttempt.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptUnsuccessfulCompletionEvent.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestCounters.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/lib/TestZKClient.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/pom.xml
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/c++/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/block_forensics/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/build-contrib.xml   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/build.xml   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/data_join/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/eclipse-plugin/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/index/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/vaidya/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/examples/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/java/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/java/org/apache/hadoop/mapred/JobInProgress.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/hdfs/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/io/FileBench.java   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/io/TestSequenceFileMergeProgress.java   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/ipc/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/security/authorize/TestServiceLevelAuthorization.java   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/test/MapredTestDriver.java   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/webapps/job/   (props changed)

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,2 +1,2 @@
-/hadoop/common/trunk/hadoop-mapreduce-project:1152502-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project:1152502-1227765
 /hadoop/core/branches/branch-0.19/mapred:713112

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/.gitignore
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/.gitignore:1161333-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/.gitignore:1161333-1227765
 /hadoop/core/branches/branch-0.19/mapred/.gitignore:713112
 /hadoop/core/trunk/.gitignore:784664-785643

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt Thu Jan  5 19:21:01 2012
@@ -180,6 +180,8 @@ Release 0.23.1 - Unreleased
 
     MAPREDUCE-3610. Remove use of the 'dfs.block.size' config for default block size fetching. Use FS#getDefaultBlocksize instead. (Sho Shimauchi via harsh)
 
+    MAPREDUCE-3478. Cannot build against ZooKeeper 3.4.0. (Tom White via mahadev)
+
   OPTIMIZATIONS
 
     MAPREDUCE-3567. Extraneous JobConf objects in AM heap. (Vinod Kumar
@@ -191,6 +193,9 @@ Release 0.23.1 - Unreleased
 
     MAPREDUCE-3568. Optimized Job's progress calculations in MR AM. (vinodkv)
 
+    MAPREDUCE-3569. TaskAttemptListener holds a global lock for all
+    task-updates. (Vinod Kumar Vavilapalli via sseth)
+
   BUG FIXES
 
     MAPREDUCE-3221. Reenabled the previously ignored test in TestSubmitJob
@@ -395,6 +400,20 @@ Release 0.23.1 - Unreleased
     MAPREDUCE-1744. DistributedCache creates its own FileSytem instance when 
     adding a file/archive to the path. (Dick King via tucu)
 
+    MAPREDUCE-3529. TokenCache does not cache viewfs credentials correctly
+    (sseth)
+
+    MAPREDUCE-3595. Add missing TestCounters#testCounterValue test from branch
+    1 to 0.23 (Tom White via sseth)
+
+    MAPREDUCE-3566. Fixed MR AM to construct CLC only once across all tasks.
+    (vinodkv via acmurthy) 
+
+    MAPREDUCE-3572. Moved AM event dispatcher to a separate thread for
+    performance reasons. (vinodkv via acmurthy) 
+
+    MAPREDUCE-3615. Fix some ant test failures. (Thomas Graves via sseth)
+
 Release 0.23.0 - 2011-11-01 
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:1161333-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:1161333-1227765
 /hadoop/core/branches/branch-0.19/mapred/CHANGES.txt:713112
 /hadoop/mapreduce/branches/HDFS-641/CHANGES.txt:817878-835964

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/conf/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/conf:1152502-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/conf:1152502-1227765
 /hadoop/core/branches/branch-0.19/mapred/conf:713112
 /hadoop/core/trunk/conf:784664-785643

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapReduceChildJVM.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapReduceChildJVM.java?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapReduceChildJVM.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapReduceChildJVM.java Thu Jan  5 19:21:01 2012
@@ -33,6 +33,7 @@ import org.apache.hadoop.yarn.api.Applic
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
 import org.apache.hadoop.yarn.util.Apps;
 
+@SuppressWarnings("deprecation")
 public class MapReduceChildJVM {
 
   private static String getTaskLogFile(LogName filter) {
@@ -46,7 +47,7 @@ public class MapReduceChildJVM {
           jobConf.get(JobConf.MAPRED_TASK_ENV));
     }
     return jobConf.get(JobConf.MAPRED_REDUCE_TASK_ENV,
-        jobConf.get(jobConf.MAPRED_TASK_ENV));
+        jobConf.get(JobConf.MAPRED_TASK_ENV));
   }
 
   private static String getChildLogLevel(JobConf conf, boolean isMap) {
@@ -68,29 +69,9 @@ public class MapReduceChildJVM {
 
     JobConf conf = task.conf;
 
-    // Shell
-    environment.put(
-        Environment.SHELL.name(), 
-        conf.get(
-            MRJobConfig.MAPRED_ADMIN_USER_SHELL, 
-            MRJobConfig.DEFAULT_SHELL)
-            );
-    
-    // Add pwd to LD_LIBRARY_PATH, add this before adding anything else
-    Apps.addToEnvironment(
-        environment, 
-        Environment.LD_LIBRARY_PATH.name(), 
-        Environment.PWD.$());
-
-    // Add the env variables passed by the user & admin
+    // Add the env variables passed by the user
     String mapredChildEnv = getChildEnv(conf, task.isMapTask());
     Apps.setEnvFromInputString(environment, mapredChildEnv);
-    Apps.setEnvFromInputString(
-        environment, 
-        conf.get(
-            MRJobConfig.MAPRED_ADMIN_USER_ENV, 
-            MRJobConfig.DEFAULT_MAPRED_ADMIN_USER_ENV)
-        );
 
     // Set logging level in the environment.
     // This is so that, if the child forks another "bin/hadoop" (common in

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java Thu Jan  5 19:21:01 2012
@@ -19,14 +19,12 @@
 package org.apache.hadoop.mapred;
 
 import java.io.IOException;
+import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
 import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -64,21 +62,22 @@ import org.apache.hadoop.yarn.service.Co
  * This class HAS to be in this package to access package private 
  * methods/classes.
  */
+@SuppressWarnings({"unchecked" , "deprecation"})
 public class TaskAttemptListenerImpl extends CompositeService 
     implements TaskUmbilicalProtocol, TaskAttemptListener {
 
+  private static final JvmTask TASK_FOR_INVALID_JVM = new JvmTask(null, true);
+
   private static final Log LOG = LogFactory.getLog(TaskAttemptListenerImpl.class);
 
   private AppContext context;
   private Server server;
   protected TaskHeartbeatHandler taskHeartbeatHandler;
   private InetSocketAddress address;
-  private Map<WrappedJvmID, org.apache.hadoop.mapred.Task> jvmIDToActiveAttemptMap = 
-    Collections.synchronizedMap(new HashMap<WrappedJvmID, 
-        org.apache.hadoop.mapred.Task>());
+  private ConcurrentMap<WrappedJvmID, org.apache.hadoop.mapred.Task>
+    jvmIDToActiveAttemptMap
+      = new ConcurrentHashMap<WrappedJvmID, org.apache.hadoop.mapred.Task>();
   private JobTokenSecretManager jobTokenSecretManager = null;
-  private Set<WrappedJvmID> pendingJvms =
-    Collections.synchronizedSet(new HashSet<WrappedJvmID>());
   
   public TaskAttemptListenerImpl(AppContext context,
       JobTokenSecretManager jobTokenSecretManager) {
@@ -123,10 +122,9 @@ public class TaskAttemptListenerImpl ext
 
       server.start();
       InetSocketAddress listenerAddress = server.getListenerAddress();
-      this.address =
-          NetUtils.createSocketAddr(listenerAddress.getAddress()
-              .getLocalHost().getCanonicalHostName()
-              + ":" + listenerAddress.getPort());
+      listenerAddress.getAddress();
+      this.address = NetUtils.createSocketAddr(InetAddress.getLocalHost()
+        .getCanonicalHostName() + ":" + listenerAddress.getPort());
     } catch (IOException e) {
       throw new YarnException(e);
     }
@@ -408,57 +406,59 @@ public class TaskAttemptListenerImpl ext
 
     WrappedJvmID wJvmID = new WrappedJvmID(jvmId.getJobId(), jvmId.isMap,
         jvmId.getId());
-    synchronized(this) {
-      if(pendingJvms.contains(wJvmID)) {
-        org.apache.hadoop.mapred.Task task = jvmIDToActiveAttemptMap.get(wJvmID);
-        if (task != null) { //there may be lag in the attempt getting added here
-         LOG.info("JVM with ID: " + jvmId + " given task: " + task.getTaskID());
-          jvmTask = new JvmTask(task, false);
-
-          //remove the task as it is no more needed and free up the memory
-          //Also we have already told the JVM to process a task, so it is no
-          //longer pending, and further request should ask it to exit.
-          pendingJvms.remove(wJvmID);
-          jvmIDToActiveAttemptMap.remove(wJvmID);
-        }
-      } else {
-        LOG.info("JVM with ID: " + jvmId + " is invalid and will be killed.");
-        jvmTask = new JvmTask(null, true);
-      }
+
+    // Try to look up the task. We remove it directly as we don't give
+    // multiple tasks to a JVM
+    org.apache.hadoop.mapred.Task task = jvmIDToActiveAttemptMap
+        .remove(wJvmID);
+    if (task != null) {
+      LOG.info("JVM with ID: " + jvmId + " given task: " + task.getTaskID());
+      jvmTask = new JvmTask(task, false);
+
+      // remove the task as it is no more needed and free up the memory
+      // Also we have already told the JVM to process a task, so it is no
+      // longer pending, and further request should ask it to exit.
+    } else {
+      LOG.info("JVM with ID: " + jvmId + " is invalid and will be killed.");
+      jvmTask = TASK_FOR_INVALID_JVM;
     }
     return jvmTask;
   }
   
   @Override
-  public synchronized void registerPendingTask(WrappedJvmID jvmID) {
-    //Save this JVM away as one that has not been handled yet
-    pendingJvms.add(jvmID);
+  public void registerPendingTask(
+      org.apache.hadoop.mapred.Task task, WrappedJvmID jvmID) {
+    // Create the mapping so that it is easy to look up
+    // when the jvm comes back to ask for Task.
+
+    // A JVM not present in this map is an illegal task/JVM.
+    jvmIDToActiveAttemptMap.put(jvmID, task);
   }
 
   @Override
   public void registerLaunchedTask(
-      org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID,
-      org.apache.hadoop.mapred.Task task, WrappedJvmID jvmID) {
-    synchronized(this) {
-      //create the mapping so that it is easy to look up
-      //when it comes back to ask for Task.
-      jvmIDToActiveAttemptMap.put(jvmID, task);
-      //This should not need to happen here, but just to be on the safe side
-      if(!pendingJvms.add(jvmID)) {
-        LOG.warn(jvmID+" launched without first being registered");
-      }
-    }
-    //register this attempt
+      org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID) {
+
+    // The task is launched. Register this for expiry-tracking.
+
+    // Timing can cause this to happen after the real JVM launches and gets a
+    // task which is still fine as we will only be tracking for expiry a little
+    // late than usual.
     taskHeartbeatHandler.register(attemptID);
   }
 
   @Override
-  public void unregister(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID,
+  public void unregister(
+      org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID,
       WrappedJvmID jvmID) {
-    //remove the mapping if not already removed
+
+    // Unregistration also comes from the same TaskAttempt which does the
+    // registration. Events are ordered at TaskAttempt, so unregistration will
+    // always come after registration.
+
+    // remove the mapping if not already removed
     jvmIDToActiveAttemptMap.remove(jvmID);
-    //remove the pending if not already removed
-    pendingJvms.remove(jvmID);
+
     //unregister this attempt
     taskHeartbeatHandler.unregister(attemptID);
   }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/TaskAttemptListener.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/TaskAttemptListener.java?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/TaskAttemptListener.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/TaskAttemptListener.java Thu Jan  5 19:21:01 2012
@@ -32,20 +32,21 @@ public interface TaskAttemptListener {
   InetSocketAddress getAddress();
 
   /**
-   * register a JVM with the listener.  This should be called as soon as a 
+   * Register a JVM with the listener.  This should be called as soon as a 
    * JVM ID is assigned to a task attempt, before it has been launched.
+   * @param task the task itself for this JVM.
    * @param jvmID The ID of the JVM .
    */
-  void registerPendingTask(WrappedJvmID jvmID);
+  void registerPendingTask(Task task, WrappedJvmID jvmID);
   
   /**
-   * Register the task and task attempt with the JVM.  This should be called
-   * when the JVM has been launched.
-   * @param attemptID the id of the attempt for this JVM.
-   * @param task the task itself for this JVM.
-   * @param jvmID the id of the JVM handling the task.
+   * Register task attempt. This should be called when the JVM has been
+   * launched.
+   * 
+   * @param attemptID
+   *          the id of the attempt for this JVM.
    */
-  void registerLaunchedTask(TaskAttemptId attemptID, Task task, WrappedJvmID jvmID);
+  void registerLaunchedTask(TaskAttemptId attemptID);
 
   /**
    * Unregister the JVM and the attempt associated with it.  This should be 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java Thu Jan  5 19:21:01 2012
@@ -27,6 +27,7 @@ import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReadWriteLock;
@@ -109,6 +110,7 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
 import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.api.records.URL;
 import org.apache.hadoop.yarn.event.EventHandler;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@@ -154,6 +156,8 @@ public abstract class TaskAttemptImpl im
   private Token<JobTokenIdentifier> jobToken;
   private static AtomicBoolean initialClasspathFlag = new AtomicBoolean();
   private static String initialClasspath = null;
+  private static Object commonContainerSpecLock = new Object();
+  private static ContainerLaunchContext commonContainerSpec = null;
   private static final Object classpathLock = new Object();
   private long launchTime;
   private long finishTime;
@@ -497,29 +501,27 @@ public abstract class TaskAttemptImpl im
 
   /**
    * Create a {@link LocalResource} record with all the given parameters.
-   * TODO: This should pave way for Builder pattern.
    */
-  private static LocalResource createLocalResource(FileSystem fc,
-      RecordFactory recordFactory, Path file, LocalResourceType type,
-      LocalResourceVisibility visibility) throws IOException {
+  private static LocalResource createLocalResource(FileSystem fc, Path file,
+      LocalResourceType type, LocalResourceVisibility visibility)
+      throws IOException {
     FileStatus fstat = fc.getFileStatus(file);
-    LocalResource resource =
-        recordFactory.newRecordInstance(LocalResource.class);
-    resource.setResource(ConverterUtils.getYarnUrlFromPath(fc.resolvePath(fstat
-        .getPath())));
-    resource.setType(type);
-    resource.setVisibility(visibility);
-    resource.setSize(fstat.getLen());
-    resource.setTimestamp(fstat.getModificationTime());
-    return resource;
+    URL resourceURL = ConverterUtils.getYarnUrlFromPath(fc.resolvePath(fstat
+        .getPath()));
+    long resourceSize = fstat.getLen();
+    long resourceModificationTime = fstat.getModificationTime();
+
+    return BuilderUtils.newLocalResource(resourceURL, type, visibility,
+        resourceSize, resourceModificationTime);
   }
 
   /**
    * Lock this on initialClasspath so that there is only one fork in the AM for
-   * getting the initial class-path. TODO: This should go away once we construct
-   * a parent CLC and use it for all the containers.
+   * getting the initial class-path. TODO: We already construct
+   * a parent CLC and use it for all the containers, so this should go away
+   * once the mr-generated-classpath stuff is gone.
    */
-  private String getInitialClasspath() throws IOException {
+  private static String getInitialClasspath() throws IOException {
     synchronized (classpathLock) {
       if (initialClasspathFlag.get()) {
         return initialClasspath;
@@ -534,11 +536,14 @@ public abstract class TaskAttemptImpl im
 
 
   /**
-   * Create the {@link ContainerLaunchContext} for this attempt.
+   * Create the common {@link ContainerLaunchContext} for all attempts.
    * @param applicationACLs 
    */
-  private ContainerLaunchContext createContainerLaunchContext(
-      Map<ApplicationAccessType, String> applicationACLs) {
+  private static ContainerLaunchContext createCommonContainerLaunchContext(
+      Map<ApplicationAccessType, String> applicationACLs, Configuration conf,
+      Token<JobTokenIdentifier> jobToken,
+      final org.apache.hadoop.mapred.JobID oldJobId,
+      Collection<Token<? extends TokenIdentifier>> fsTokens) {
 
     // Application resources
     Map<String, LocalResource> localResources = 
@@ -556,13 +561,13 @@ public abstract class TaskAttemptImpl im
       FileSystem remoteFS = FileSystem.get(conf);
 
       // //////////// Set up JobJar to be localized properly on the remote NM.
-      if (conf.get(MRJobConfig.JAR) != null) {
-        Path remoteJobJar = (new Path(remoteTask.getConf().get(
-              MRJobConfig.JAR))).makeQualified(remoteFS.getUri(), 
-                                               remoteFS.getWorkingDirectory());
+      String jobJar = conf.get(MRJobConfig.JAR);
+      if (jobJar != null) {
+        Path remoteJobJar = (new Path(jobJar)).makeQualified(remoteFS
+            .getUri(), remoteFS.getWorkingDirectory());
         localResources.put(
             MRJobConfig.JOB_JAR,
-            createLocalResource(remoteFS, recordFactory, remoteJobJar,
+            createLocalResource(remoteFS, remoteJobJar,
                 LocalResourceType.FILE, LocalResourceVisibility.APPLICATION));
         LOG.info("The job-jar file on the remote FS is "
             + remoteJobJar.toUri().toASCIIString());
@@ -584,7 +589,7 @@ public abstract class TaskAttemptImpl im
           new Path(remoteJobSubmitDir, MRJobConfig.JOB_CONF_FILE);
       localResources.put(
           MRJobConfig.JOB_CONF_FILE,
-          createLocalResource(remoteFS, recordFactory, remoteJobConfPath,
+          createLocalResource(remoteFS, remoteJobConfPath,
               LocalResourceType.FILE, LocalResourceVisibility.APPLICATION));
       LOG.info("The job-conf file on the remote FS is "
           + remoteJobConfPath.toUri().toASCIIString());
@@ -630,19 +635,81 @@ public abstract class TaskAttemptImpl im
       throw new YarnException(e);
     }
 
-    // Setup environment
-    MapReduceChildJVM.setVMEnv(environment, remoteTask);
+    // Shell
+    environment.put(
+        Environment.SHELL.name(), 
+        conf.get(
+            MRJobConfig.MAPRED_ADMIN_USER_SHELL, 
+            MRJobConfig.DEFAULT_SHELL)
+            );
+
+    // Add pwd to LD_LIBRARY_PATH, add this before adding anything else
+    Apps.addToEnvironment(
+        environment, 
+        Environment.LD_LIBRARY_PATH.name(), 
+        Environment.PWD.$());
+
+    // Add the env variables passed by the admin
+    Apps.setEnvFromInputString(
+        environment, 
+        conf.get(
+            MRJobConfig.MAPRED_ADMIN_USER_ENV, 
+            MRJobConfig.DEFAULT_MAPRED_ADMIN_USER_ENV)
+        );
+
+    // Construct the actual Container
+    // The null fields are per-container and will be constructed for each
+    // container separately.
+    ContainerLaunchContext container = BuilderUtils
+        .newContainerLaunchContext(null, conf
+            .get(MRJobConfig.USER_NAME), null, localResources,
+            environment, null, serviceData, tokens, applicationACLs);
+
+    return container;
+  }
+
+  static ContainerLaunchContext createContainerLaunchContext(
+      Map<ApplicationAccessType, String> applicationACLs,
+      ContainerId containerID, Configuration conf,
+      Token<JobTokenIdentifier> jobToken, Task remoteTask,
+      final org.apache.hadoop.mapred.JobID oldJobId,
+      Resource assignedCapability, WrappedJvmID jvmID,
+      TaskAttemptListener taskAttemptListener,
+      Collection<Token<? extends TokenIdentifier>> fsTokens) {
+
+    synchronized (commonContainerSpecLock) {
+      if (commonContainerSpec == null) {
+        commonContainerSpec = createCommonContainerLaunchContext(
+            applicationACLs, conf, jobToken, oldJobId, fsTokens);
+      }
+    }
+
+    // Fill in the fields needed per-container that are missing in the common
+    // spec.
+
+    // Setup environment by cloning from common env.
+    Map<String, String> env = commonContainerSpec.getEnvironment();
+    Map<String, String> myEnv = new HashMap<String, String>(env.size());
+    myEnv.putAll(env);
+    MapReduceChildJVM.setVMEnv(myEnv, remoteTask);
 
     // Set up the launch command
     List<String> commands = MapReduceChildJVM.getVMCommand(
-        taskAttemptListener.getAddress(), remoteTask,
-        jvmID);
-    
+        taskAttemptListener.getAddress(), remoteTask, jvmID);
+
+    // Duplicate the ByteBuffers for access by multiple containers.
+    Map<String, ByteBuffer> myServiceData = new HashMap<String, ByteBuffer>();
+    for (Entry<String, ByteBuffer> entry : commonContainerSpec
+                .getServiceData().entrySet()) {
+      myServiceData.put(entry.getKey(), entry.getValue().duplicate());
+    }
+
     // Construct the actual Container
-    ContainerLaunchContext container = BuilderUtils
-        .newContainerLaunchContext(containerID, conf
-            .get(MRJobConfig.USER_NAME), assignedCapability, localResources,
-            environment, commands, serviceData, tokens, applicationACLs);
+    ContainerLaunchContext container = BuilderUtils.newContainerLaunchContext(
+        containerID, commonContainerSpec.getUser(), assignedCapability,
+        commonContainerSpec.getLocalResources(), myEnv, commands,
+        myServiceData, commonContainerSpec.getContainerTokens().duplicate(),
+        applicationACLs);
 
     return container;
   }
@@ -1022,7 +1089,7 @@ public abstract class TaskAttemptImpl im
 
   private static class ContainerAssignedTransition implements
       SingleArcTransition<TaskAttemptImpl, TaskAttemptEvent> {
-    @SuppressWarnings({ "unchecked", "deprecation" })
+    @SuppressWarnings({ "unchecked" })
     @Override
     public void transition(final TaskAttemptImpl taskAttempt, 
         TaskAttemptEvent event) {
@@ -1042,24 +1109,21 @@ public abstract class TaskAttemptImpl im
       taskAttempt.jvmID = new WrappedJvmID(
           taskAttempt.remoteTask.getTaskID().getJobID(), 
           taskAttempt.remoteTask.isMapTask(), taskAttempt.containerID.getId());
-      taskAttempt.taskAttemptListener.registerPendingTask(taskAttempt.jvmID);
+      taskAttempt.taskAttemptListener.registerPendingTask(
+          taskAttempt.remoteTask, taskAttempt.jvmID);
       
       //launch the container
       //create the container object to be launched for a given Task attempt
-      taskAttempt.eventHandler.handle(
-          new ContainerRemoteLaunchEvent(taskAttempt.attemptId, 
-              taskAttempt.containerID, 
-              taskAttempt.containerMgrAddress, taskAttempt.containerToken) {
-        @Override
-        public ContainerLaunchContext getContainer() {
-          return taskAttempt.createContainerLaunchContext(cEvent
-              .getApplicationACLs());
-        }
-        @Override
-        public Task getRemoteTask() {  // classic mapred Task, not YARN version
-          return taskAttempt.remoteTask;
-        }
-      });
+      ContainerLaunchContext launchContext = createContainerLaunchContext(
+          cEvent.getApplicationACLs(), taskAttempt.containerID,
+          taskAttempt.conf, taskAttempt.jobToken, taskAttempt.remoteTask,
+          taskAttempt.oldJobId, taskAttempt.assignedCapability,
+          taskAttempt.jvmID, taskAttempt.taskAttemptListener,
+          taskAttempt.fsTokens);
+      taskAttempt.eventHandler.handle(new ContainerRemoteLaunchEvent(
+          taskAttempt.attemptId, taskAttempt.containerID,
+          taskAttempt.containerMgrAddress, taskAttempt.containerToken,
+          launchContext, taskAttempt.remoteTask));
 
       // send event to speculator that our container needs are satisfied
       taskAttempt.eventHandler.handle
@@ -1135,10 +1199,9 @@ public abstract class TaskAttemptImpl im
       taskAttempt.launchTime = taskAttempt.clock.getTime();
       taskAttempt.shufflePort = event.getShufflePort();
 
-      // register it to TaskAttemptListener so that it start listening
-      // for it
-      taskAttempt.taskAttemptListener.registerLaunchedTask(
-          taskAttempt.attemptId, taskAttempt.remoteTask, taskAttempt.jvmID);
+      // register it to TaskAttemptListener so that it can start monitoring it.
+      taskAttempt.taskAttemptListener
+        .registerLaunchedTask(taskAttempt.attemptId);
       //TODO Resolve to host / IP in case of a local address.
       InetSocketAddress nodeHttpInetAddr =
           NetUtils.createSocketAddr(taskAttempt.nodeHttpAddress); // TODO:
@@ -1197,7 +1260,6 @@ public abstract class TaskAttemptImpl im
     @Override
     public void transition(TaskAttemptImpl taskAttempt, 
         TaskAttemptEvent event) {
-      @SuppressWarnings("deprecation")
       TaskAttemptContext taskContext =
         new TaskAttemptContextImpl(taskAttempt.conf,
             TypeConverter.fromYarn(taskAttempt.attemptId));

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerRemoteLaunchEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerRemoteLaunchEvent.java?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerRemoteLaunchEvent.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerRemoteLaunchEvent.java Thu Jan  5 19:21:01 2012
@@ -24,17 +24,31 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
 import org.apache.hadoop.yarn.api.records.ContainerToken;
 
-public abstract class ContainerRemoteLaunchEvent extends ContainerLauncherEvent {
+public class ContainerRemoteLaunchEvent extends ContainerLauncherEvent {
+
+  private final ContainerLaunchContext container;
+  private final Task task;
 
   public ContainerRemoteLaunchEvent(TaskAttemptId taskAttemptID,
       ContainerId containerID, String containerMgrAddress,
-      ContainerToken containerToken) {
-    super(taskAttemptID, containerID, containerMgrAddress,
-        containerToken,
+      ContainerToken containerToken,
+      ContainerLaunchContext containerLaunchContext, Task remoteTask) {
+    super(taskAttemptID, containerID, containerMgrAddress, containerToken,
         ContainerLauncher.EventType.CONTAINER_REMOTE_LAUNCH);
+    this.container = containerLaunchContext;
+    this.task = remoteTask;
   }
-  public abstract ContainerLaunchContext getContainer();
 
-  public abstract Task getRemoteTask();
+  public ContainerLaunchContext getContainer() {
+    return this.container;
+  }
 
+  public Task getRemoteTask() {
+    return this.task;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    return super.equals(obj);
+  }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java Thu Jan  5 19:21:01 2012
@@ -104,10 +104,9 @@ public abstract class RMCommunicator ext
   @Override
   public void start() {
     scheduler= createSchedulerProxy();
-    //LOG.info("Scheduler is " + scheduler);
     register();
     startAllocatorThread();
-    JobID id = TypeConverter.fromYarn(context.getApplicationID());
+    JobID id = TypeConverter.fromYarn(this.applicationId);
     JobId jobId = TypeConverter.toYarn(id);
     job = context.getJob(jobId);
     super.start();

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java Thu Jan  5 19:21:01 2012
@@ -30,18 +30,17 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.JobCounter;
-import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.MRJobConfig;
-import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;
 import org.apache.hadoop.mapreduce.jobhistory.NormalizedResourceEvent;
-import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
@@ -69,7 +68,7 @@ import org.apache.hadoop.yarn.util.RackR
 public class RMContainerAllocator extends RMContainerRequestor
     implements ContainerAllocator {
 
-  private static final Log LOG = LogFactory.getLog(RMContainerAllocator.class);
+  static final Log LOG = LogFactory.getLog(RMContainerAllocator.class);
   
   public static final 
   float DEFAULT_COMPLETED_MAPS_PERCENT_FOR_REDUCE_SLOWSTART = 0.05f;
@@ -77,7 +76,10 @@ public class RMContainerAllocator extend
   private static final Priority PRIORITY_FAST_FAIL_MAP;
   private static final Priority PRIORITY_REDUCE;
   private static final Priority PRIORITY_MAP;
-  
+
+  private Thread eventHandlingThread;
+  private volatile boolean stopEventHandling;
+
   static {
     PRIORITY_FAST_FAIL_MAP = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(Priority.class);
     PRIORITY_FAST_FAIL_MAP.setPriority(5);
@@ -130,7 +132,10 @@ public class RMContainerAllocator extend
   private float reduceSlowStart = 0;
   private long retryInterval;
   private long retrystartTime;
-  
+
+  BlockingQueue<ContainerAllocatorEvent> eventQueue
+    = new LinkedBlockingQueue<ContainerAllocatorEvent>();
+
   public RMContainerAllocator(ClientService clientService, AppContext context) {
     super(clientService, context);
   }
@@ -156,6 +161,40 @@ public class RMContainerAllocator extend
   }
 
   @Override
+  public void start() {
+    this.eventHandlingThread = new Thread() {
+      @SuppressWarnings("unchecked")
+      @Override
+      public void run() {
+
+        ContainerAllocatorEvent event;
+
+        while (!stopEventHandling && !Thread.currentThread().isInterrupted()) {
+          try {
+            event = RMContainerAllocator.this.eventQueue.take();
+          } catch (InterruptedException e) {
+            LOG.error("Returning, interrupted : " + e);
+            return;
+          }
+
+          try {
+            handleEvent(event);
+          } catch (Throwable t) {
+            LOG.error("Error in handling event type " + event.getType()
+                + " to the ContainreAllocator", t);
+            // Kill the AM
+            eventHandler.handle(new JobEvent(getJob().getID(),
+              JobEventType.INTERNAL_ERROR));
+            return;
+          }
+        }
+      }
+    };
+    this.eventHandlingThread.start();
+    super.start();
+  }
+
+  @Override
   protected synchronized void heartbeat() throws Exception {
     LOG.info("Before Scheduling: " + getStat());
     List<Container> allocatedContainers = getResources();
@@ -181,6 +220,8 @@ public class RMContainerAllocator extend
 
   @Override
   public void stop() {
+    this.stopEventHandling = true;
+    eventHandlingThread.interrupt();
     super.stop();
     LOG.info("Final Stats: " + getStat());
   }
@@ -192,10 +233,27 @@ public class RMContainerAllocator extend
   public void setIsReduceStarted(boolean reduceStarted) {
     this.reduceStarted = reduceStarted; 
   }
-  
-  @SuppressWarnings("unchecked")
+
   @Override
-  public synchronized void handle(ContainerAllocatorEvent event) {
+  public void handle(ContainerAllocatorEvent event) {
+    int qSize = eventQueue.size();
+    if (qSize != 0 && qSize % 1000 == 0) {
+      LOG.info("Size of event-queue in RMContainerAllocator is " + qSize);
+    }
+    int remCapacity = eventQueue.remainingCapacity();
+    if (remCapacity < 1000) {
+      LOG.warn("Very low remaining capacity in the event-queue "
+          + "of RMContainerAllocator: " + remCapacity);
+    }
+    try {
+      eventQueue.put(event);
+    } catch (InterruptedException e) {
+      throw new YarnException(e);
+    }
+  }
+
+  @SuppressWarnings({ "unchecked" })
+  protected synchronized void handleEvent(ContainerAllocatorEvent event) {
     LOG.info("Processing the event " + event.toString());
     recalculateReduceSchedule = true;
     if (event.getType() == ContainerAllocator.EventType.CONTAINER_REQ) {
@@ -206,9 +264,7 @@ public class RMContainerAllocator extend
           int minSlotMemSize = getMinContainerCapability().getMemory();
           mapResourceReqt = (int) Math.ceil((float) mapResourceReqt/minSlotMemSize)
               * minSlotMemSize;
-          JobID id = TypeConverter.fromYarn(applicationId);
-          JobId jobId = TypeConverter.toYarn(id);
-          eventHandler.handle(new JobHistoryEvent(jobId, 
+          eventHandler.handle(new JobHistoryEvent(getJob().getID(), 
               new NormalizedResourceEvent(org.apache.hadoop.mapreduce.TaskType.MAP,
               mapResourceReqt)));
           LOG.info("mapResourceReqt:"+mapResourceReqt);
@@ -232,9 +288,7 @@ public class RMContainerAllocator extend
           //round off on slotsize
           reduceResourceReqt = (int) Math.ceil((float) 
               reduceResourceReqt/minSlotMemSize) * minSlotMemSize;
-          JobID id = TypeConverter.fromYarn(applicationId);
-          JobId jobId = TypeConverter.toYarn(id);
-          eventHandler.handle(new JobHistoryEvent(jobId, 
+          eventHandler.handle(new JobHistoryEvent(getJob().getID(), 
               new NormalizedResourceEvent(
                   org.apache.hadoop.mapreduce.TaskType.REDUCE,
               reduceResourceReqt)));

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java Thu Jan  5 19:21:01 2012
@@ -17,8 +17,11 @@
 */
 package org.apache.hadoop.mapred;
 
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.*;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
 
 import java.io.IOException;
 
@@ -68,33 +71,47 @@ public class TestTaskAttemptListenerImpl
     JVMId id = new JVMId("foo",1, true, 1);
     WrappedJvmID wid = new WrappedJvmID(id.getJobId(), id.isMap, id.getId());
 
+    // Verify ask before registration.
     //The JVM ID has not been registered yet so we should kill it.
     JvmContext context = new JvmContext();
     context.jvmId = id; 
     JvmTask result = listener.getTask(context);
     assertNotNull(result);
     assertTrue(result.shouldDie);
-    
-    //Now register the JVM, and see
-    listener.registerPendingTask(wid);
-    result = listener.getTask(context);
-    assertNull(result);
-    
+
+    // Verify ask after registration but before launch
     TaskAttemptId attemptID = mock(TaskAttemptId.class);
     Task task = mock(Task.class);
     //Now put a task with the ID
-    listener.registerLaunchedTask(attemptID, task, wid);
+    listener.registerPendingTask(task, wid);
+    result = listener.getTask(context);
+    assertNotNull(result);
+    assertFalse(result.shouldDie);
+    // Unregister for more testing.
+    listener.unregister(attemptID, wid);
+
+    // Verify ask after registration and launch
+    //Now put a task with the ID
+    listener.registerPendingTask(task, wid);
+    listener.registerLaunchedTask(attemptID);
     verify(hbHandler).register(attemptID);
     result = listener.getTask(context);
     assertNotNull(result);
     assertFalse(result.shouldDie);
-    
+    // Don't unregister yet for more testing.
+
     //Verify that if we call it again a second time we are told to die.
     result = listener.getTask(context);
     assertNotNull(result);
     assertTrue(result.shouldDie);
-    
+
     listener.unregister(attemptID, wid);
+
+    // Verify after unregistration.
+    result = listener.getTask(context);
+    assertNotNull(result);
+    assertTrue(result.shouldDie);
+
     listener.stop();
   }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java Thu Jan  5 19:21:01 2012
@@ -19,6 +19,7 @@
 package org.apache.hadoop.mapreduce.v2.app;
 
 import java.io.File;
+import java.io.FileOutputStream;
 import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.util.EnumSet;
@@ -65,7 +66,9 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent;
 import org.apache.hadoop.mapreduce.v2.app.taskclean.TaskCleaner;
 import org.apache.hadoop.mapreduce.v2.app.taskclean.TaskCleanupEvent;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.Clock;
@@ -88,6 +91,7 @@ import org.apache.hadoop.yarn.util.Build
  * Mock MRAppMaster. Doesn't start RPC servers.
  * No threads are started except of the event Dispatcher thread.
  */
+@SuppressWarnings("unchecked")
 public class MRApp extends MRAppMaster {
   private static final Log LOG = LogFactory.getLog(MRApp.class);
 
@@ -173,7 +177,8 @@ public class MRApp extends MRAppMaster {
   }
 
   public Job submit(Configuration conf) throws Exception {
-    String user = conf.get(MRJobConfig.USER_NAME, "mapred");
+    String user = conf.get(MRJobConfig.USER_NAME, UserGroupInformation
+      .getCurrentUser().getShortUserName());
     conf.set(MRJobConfig.USER_NAME, user);
     conf.set(MRJobConfig.MR_AM_STAGING_DIR, testAbsPath.toString());
     conf.setBoolean(MRJobConfig.MR_AM_CREATE_JH_INTERMEDIATE_BASE_DIR, true);
@@ -187,6 +192,14 @@ public class MRApp extends MRAppMaster {
     start();
     DefaultMetricsSystem.shutdown();
     Job job = getContext().getAllJobs().values().iterator().next();
+
+    // Write job.xml
+    String jobFile = MRApps.getJobFile(conf, user,
+      TypeConverter.fromYarn(job.getID()));
+    LOG.info("Writing job conf to " + jobFile);
+    new File(jobFile).getParentFile().mkdirs();
+    conf.writeXml(new FileOutputStream(jobFile));
+
     return job;
   }
 
@@ -308,16 +321,16 @@ public class MRApp extends MRAppMaster {
     return new TaskAttemptListener(){
       @Override
       public InetSocketAddress getAddress() {
-        return null;
+        return NetUtils.createSocketAddr("localhost:54321");
       }
       @Override
-      public void registerLaunchedTask(TaskAttemptId attemptID, 
-          org.apache.hadoop.mapred.Task task, WrappedJvmID jvmID) {}
+      public void registerLaunchedTask(TaskAttemptId attemptID) {}
       @Override
       public void unregister(TaskAttemptId attemptID, WrappedJvmID jvmID) {
       }
       @Override
-      public void registerPendingTask(WrappedJvmID jvmID) {
+      public void registerPendingTask(org.apache.hadoop.mapred.Task task,
+          WrappedJvmID jvmID) {
       }
     };
   }
@@ -337,12 +350,14 @@ public class MRApp extends MRAppMaster {
     return new MockContainerLauncher();
   }
 
-  class MockContainerLauncher implements ContainerLauncher {
+  protected class MockContainerLauncher implements ContainerLauncher {
 
     //We are running locally so set the shuffle port to -1 
     int shufflePort = -1;
 
-    @SuppressWarnings("unchecked")
+    public MockContainerLauncher() {
+    }
+
     @Override
     public void handle(ContainerLauncherEvent event) {
       switch (event.getType()) {
@@ -474,6 +489,7 @@ public class MRApp extends MRAppMaster {
     }
     @Override
     protected void setup(JobImpl job) throws IOException {
+      super.setup(job);
       job.conf.setInt(MRJobConfig.NUM_REDUCES, reduces);
       job.remoteJobConfFile = new Path("test");
     }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java Thu Jan  5 19:21:01 2012
@@ -40,6 +40,7 @@ import org.junit.Test;
 /**
  * Tests the state machine of MR App.
  */
+@SuppressWarnings("unchecked")
 public class TestMRApp {
 
   @Test

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java Thu Jan  5 19:21:01 2012
@@ -1186,12 +1186,12 @@ public class TestRMContainerAllocator {
 
     public void sendRequests(List<ContainerRequestEvent> reqs) {
       for (ContainerRequestEvent req : reqs) {
-        super.handle(req);
+        super.handleEvent(req);
       }
     }
 
     public void sendFailure(ContainerFailedEvent f) {
-      super.handle(f);
+      super.handleEvent(f);
     }
     
     // API to be used by tests

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttempt.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttempt.java?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttempt.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttempt.java Thu Jan  5 19:21:01 2012
@@ -43,6 +43,7 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.yarn.event.EventHandler;
 import org.junit.Test;
 
+@SuppressWarnings("unchecked")
 public class TestTaskAttempt{
 
   @Test

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptUnsuccessfulCompletionEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptUnsuccessfulCompletionEvent.java?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptUnsuccessfulCompletionEvent.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptUnsuccessfulCompletionEvent.java Thu Jan  5 19:21:01 2012
@@ -103,7 +103,7 @@ public class TaskAttemptUnsuccessfulComp
        (TaskAttemptID id, TaskType taskType,
         String status, long finishTime, 
         String hostname, String error) {
-    this(id, taskType, status, finishTime, hostname, -1, null, error, null);
+    this(id, taskType, status, finishTime, hostname, -1, "", error, null);
   }
 
   TaskAttemptUnsuccessfulCompletionEvent() {}

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/TokenCache.java Thu Jan  5 19:21:01 2012
@@ -19,9 +19,7 @@
 package org.apache.hadoop.mapreduce.security;
 
 import java.io.IOException;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -35,9 +33,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Master;
 import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier;
-import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -90,7 +86,7 @@ public class TokenCache {
       obtainTokensForNamenodesInternal(fs, credentials, conf);
     }
   }
-  
+
   /**
    * get delegation token for a specific FS
    * @param fs
@@ -99,6 +95,7 @@ public class TokenCache {
    * @param conf
    * @throws IOException
    */
+  @SuppressWarnings("deprecation")
   static void obtainTokensForNamenodesInternal(FileSystem fs, 
       Credentials credentials, Configuration conf) throws IOException {
     String delegTokenRenewer = Master.getMasterPrincipal(conf);
@@ -131,7 +128,8 @@ public class TokenCache {
           return;
         }
       }
-      List<Token<?>> tokens = fs.getDelegationTokens(delegTokenRenewer);
+      List<Token<?>> tokens =
+          fs.getDelegationTokens(delegTokenRenewer, credentials);
       if (tokens != null) {
         for (Token<?> token : tokens) {
           credentials.addToken(token.getService(), token);
@@ -141,13 +139,13 @@ public class TokenCache {
       }
       //Call getDelegationToken as well for now - for FS implementations
       // which may not have implmented getDelegationTokens (hftp)
-      Token<?> token = fs.getDelegationToken(delegTokenRenewer);
-      if (token != null) {
-        Text fsNameText = new Text(fsName);
-        token.setService(fsNameText);
-        credentials.addToken(fsNameText, token);
-        LOG.info("Got dt for " + fs.getUri() + ";uri="+ fsName + 
-            ";t.service="+token.getService());
+      if (tokens == null || tokens.size() == 0) {
+        Token<?> token = fs.getDelegationToken(delegTokenRenewer);
+        if (token != null) {
+          credentials.addToken(token.getService(), token);
+          LOG.info("Got dt for " + fs.getUri() + ";uri=" + fsName
+              + ";t.service=" + token.getService());
+        }
       }
     }
   }

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:1166973-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:1166973-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/java/mapred-default.xml:713112
 /hadoop/core/trunk/src/mapred/mapred-default.xml:776175-785643

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestCounters.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestCounters.java?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestCounters.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestCounters.java Thu Jan  5 19:21:01 2012
@@ -21,6 +21,7 @@ import static org.junit.Assert.assertEqu
 
 import java.io.IOException;
 import java.text.ParseException;
+import java.util.Random;
 
 import org.apache.hadoop.mapred.Counters.Counter;
 import org.apache.hadoop.mapreduce.FileSystemCounter;
@@ -98,6 +99,37 @@ public class TestCounters {
     }
   }
   
+  /**
+   * Verify counter value works
+   */
+  @SuppressWarnings("deprecation")
+  @Test
+  public void testCounterValue() {
+    Counters counters = new Counters();
+    final int NUMBER_TESTS = 100;
+    final int NUMBER_INC = 10;
+    final Random rand = new Random();
+    for (int i = 0; i < NUMBER_TESTS; i++) {
+      long initValue = rand.nextInt();
+      long expectedValue = initValue;
+      Counter counter = counters.findCounter("foo", "bar");
+      counter.setValue(initValue);
+      assertEquals("Counter value is not initialized correctly",
+                   expectedValue, counter.getValue());
+      for (int j = 0; j < NUMBER_INC; j++) {
+        int incValue = rand.nextInt();
+        counter.increment(incValue);
+        expectedValue += incValue;
+        assertEquals("Counter value is not incremented correctly",
+                     expectedValue, counter.getValue());
+      }
+      expectedValue = rand.nextInt();
+      counter.setValue(expectedValue);
+      assertEquals("Counter value is not set correctly",
+                   expectedValue, counter.getValue());
+    }
+  }
+  
   @SuppressWarnings("deprecation")
   @Test
   public void testLegacyNames() {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/lib/TestZKClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/lib/TestZKClient.java?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/lib/TestZKClient.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/lib/TestZKClient.java Thu Jan  5 19:21:01 2012
@@ -29,7 +29,7 @@ import java.net.Socket;
 import junit.framework.Assert;
 
 import org.apache.hadoop.yarn.lib.ZKClient;
-import org.apache.zookeeper.server.NIOServerCnxn;
+import org.apache.zookeeper.server.NIOServerCnxnFactory;
 import org.apache.zookeeper.server.ZKDatabase;
 import org.apache.zookeeper.server.ZooKeeperServer;
 import org.apache.zookeeper.server.persistence.FileTxnLog;
@@ -45,7 +45,8 @@ public class TestZKClient  {
 
   protected String hostPort = "127.0.0.1:2000";
   protected int maxCnxns = 0;
-  protected NIOServerCnxn.Factory factory = null;
+  protected NIOServerCnxnFactory factory = null;
+  protected ZooKeeperServer zks;
   protected File tmpDir = null;
 
   public static String send4LetterWord(String host, int port, String cmd)
@@ -144,10 +145,11 @@ public class TestZKClient  {
       BASETEST.mkdirs();
     }
     File dataDir = createTmpDir(BASETEST);
-    ZooKeeperServer zks = new ZooKeeperServer(dataDir, dataDir, 3000);
+    zks = new ZooKeeperServer(dataDir, dataDir, 3000);
     final int PORT = Integer.parseInt(hostPort.split(":")[1]);
     if (factory == null) {
-      factory = new NIOServerCnxn.Factory(new InetSocketAddress(PORT),maxCnxns);
+      factory = new NIOServerCnxnFactory();
+      factory.configure(new InetSocketAddress(PORT), maxCnxns);
     }
     factory.startup(zks);
     Assert.assertTrue("waiting for server up",
@@ -158,8 +160,8 @@ public class TestZKClient  {
   
   @After
   public void tearDown() throws IOException, InterruptedException {
-    if (factory != null) {
-      ZKDatabase zkDb = factory.getZooKeeperServer().getZKDatabase();
+    if (zks != null) {
+      ZKDatabase zkDb = zks.getZKDatabase();
       factory.shutdown();
       try {
         zkDb.close();

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/pom.xml?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/pom.xml Thu Jan  5 19:21:01 2012
@@ -310,7 +310,7 @@
       <dependency>
         <groupId>org.apache.zookeeper</groupId>
         <artifactId>zookeeper</artifactId>
-        <version>3.3.1</version>
+        <version>3.4.2</version>
         <exclusions>
           <exclusion>
             <!-- otherwise seems to drag in junit 3.8.1 via jline -->

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/c++/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/c++:1159757-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/c++:1159757-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/c++:713112
 /hadoop/core/trunk/src/c++:776175-784663

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib:1152502-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib:1152502-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/contrib:713112
 /hadoop/core/trunk/src/contrib:784664-785643

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/block_forensics/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/block_forensics:1152502-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/block_forensics:1152502-1227765
 /hadoop/core/branches/branch-0.19/hdfs/src/contrib/block_forensics:713112
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/block_forensics:713112
 /hadoop/core/trunk/src/contrib/block_forensics:784664-785643

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/build-contrib.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build-contrib.xml:1161333-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build-contrib.xml:1161333-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/build-contrib.xml:713112
 /hadoop/core/trunk/src/contrib/build-contrib.xml:776175-786373

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/build.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build.xml:1161333-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build.xml:1161333-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/build.xml:713112
 /hadoop/core/trunk/src/contrib/build.xml:776175-786373

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/data_join/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/data_join:1159757-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/data_join:1159757-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/data_join:713112
 /hadoop/core/trunk/src/contrib/data_join:776175-786373

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/eclipse-plugin/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/eclipse-plugin:1159757-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/eclipse-plugin:1159757-1227765
 /hadoop/core/branches/branch-0.19/core/src/contrib/eclipse-plugin:713112
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/eclipse-plugin:713112
 /hadoop/core/trunk/src/contrib/eclipse-plugin:776175-785643

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/index/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/index:1159757-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/index:1159757-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/index:713112
 /hadoop/core/trunk/src/contrib/index:776175-786373

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/vaidya/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/vaidya:1159757-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/vaidya:1159757-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/vaidya:713112
 /hadoop/core/trunk/src/contrib/vaidya:776175-786373

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/examples/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/examples:1152502-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/examples:1152502-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/examples:713112
 /hadoop/core/trunk/src/examples:776175-784663

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/java:1152502-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/java:1152502-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/java:713112
 /hadoop/core/trunk/src/mapred:776175-785643

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/java/org/apache/hadoop/mapred/JobInProgress.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/java/org/apache/hadoop/mapred/JobInProgress.java?rev=1227775&r1=1227774&r2=1227775&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/java/org/apache/hadoop/mapred/JobInProgress.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/java/org/apache/hadoop/mapred/JobInProgress.java Thu Jan  5 19:21:01 2012
@@ -3210,7 +3210,7 @@ public class JobInProgress {
             (taskid, 
              taskType, taskStatus.getRunState().toString(),
              finishTime, 
-             taskTrackerHostName, -1, null, diagInfo,
+             taskTrackerHostName, -1, "", diagInfo,
              splits.burst());
     jobHistory.logEvent(tue, taskid.getJobID());
         

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred:1152502-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred:1152502-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/test/mapred:713112
 /hadoop/core/trunk/src/test/mapred:776175-785643

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs:1159757-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs:1159757-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/test/mapred/org/apache/hadoop/fs:713112
 /hadoop/core/trunk/src/test/mapred/org/apache/hadoop/fs:776175-785643
 /hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/fs:817878-835934

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/hdfs:1152502-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/hdfs:1152502-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/test/mapred/org/apache/hadoop/hdfs:713112
 /hadoop/core/trunk/src/test/mapred/org/apache/hadoop/hdfs:776175-785643
 /hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/hdfs:817878-835934

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/io/FileBench.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/io/FileBench.java:1161333-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/io/FileBench.java:1161333-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/test/mapred/org/apache/hadoop/io/FileBench.java:713112
 /hadoop/core/trunk/src/test/mapred/org/apache/hadoop/io/FileBench.java:776175-785643
 /hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/io/FileBench.java:817878-835934

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/io/TestSequenceFileMergeProgress.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/io/TestSequenceFileMergeProgress.java:1161333-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/io/TestSequenceFileMergeProgress.java:1161333-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/test/mapred/org/apache/hadoop/io/TestSequenceFileMergeProgress.java:713112
 /hadoop/core/trunk/src/test/mapred/org/apache/hadoop/io/TestSequenceFileMergeProgress.java:776175-785643
 /hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/io/TestSequenceFileMergeProgress.java:817878-835934

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/ipc/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/ipc:1159757-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/ipc:1159757-1227765
 /hadoop/core/branches/branch-0.19/hdfs/src/test/hdfs-with-mr/org/apache/hadoop/ipc:713112
 /hadoop/core/branches/branch-0.19/mapred/src/test/mapred/org/apache/hadoop/ipc:713112
 /hadoop/core/trunk/src/test/hdfs-with-mr/org/apache/hadoop/ipc:776175-784663

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/security/authorize/TestServiceLevelAuthorization.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/security/authorize/TestServiceLevelAuthorization.java:1161333-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/security/authorize/TestServiceLevelAuthorization.java:1161333-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/test/mapred/org/apache/hadoop/security/authorize/TestServiceLevelAuthorization.java:713112
 /hadoop/core/trunk/src/test/mapred/org/apache/hadoop/security/authorize/TestServiceLevelAuthorization.java:776175-785643
 /hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/security/authorize/TestServiceLevelAuthorization.java:817878-835934

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/test/MapredTestDriver.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/test/MapredTestDriver.java:1161333-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/test/MapredTestDriver.java:1161333-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/test/mapred/org/apache/hadoop/test/MapredTestDriver.java:713112
 /hadoop/core/trunk/src/test/mapred/org/apache/hadoop/test/MapredTestDriver.java:776175-785643
 /hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/test/MapredTestDriver.java:817878-835934

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/webapps/job/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Jan  5 19:21:01 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/webapps/job:1152502-1227258
+/hadoop/common/trunk/hadoop-mapreduce-project/src/webapps/job:1152502-1227765
 /hadoop/core/branches/branch-0.19/mapred/src/webapps/job:713112
 /hadoop/core/trunk/src/webapps/job:776175-785643



Mime
View raw message