hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject svn commit: r1196458 [2/19] - in /hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project: ./ assembly/ bin/ conf/ dev-support/ hadoop-mapreduce-client/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/ hadoop-mapreduce-client/hadoop-mapreduce-cli...
Date Wed, 02 Nov 2011 05:35:03 GMT

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Nov  2 05:34:31 2011
@@ -1,2 +1,2 @@
-/hadoop/common/trunk/hadoop-mapreduce-project:1152502-1179483
+/hadoop/common/trunk/hadoop-mapreduce-project:1152502-1196451
 /hadoop/core/branches/branch-0.19/mapred:713112

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/.gitignore
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Nov  2 05:34:31 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/.gitignore:1161333-1179483
+/hadoop/common/trunk/hadoop-mapreduce-project/.gitignore:1161333-1196451
 /hadoop/core/branches/branch-0.19/mapred/.gitignore:713112
 /hadoop/core/trunk/.gitignore:784664-785643

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt Wed Nov  2 05:34:31 2011
@@ -10,6 +10,8 @@ Trunk (unreleased changes)
     (Plamen Jeliazkov via shv)
 
   IMPROVEMENTS
+    MAPREDUCE-3008. Improvements to cumulative CPU emulation for short running 
+                    tasks in Gridmix. (amarrk)
 
     MAPREDUCE-2887 due to HADOOP-7524 Change RPC to allow multiple protocols
                    including multuple versions of the same protocol (sanjay Radia)
@@ -20,6 +22,12 @@ Trunk (unreleased changes)
     MAPREDUCE-2836. Provide option to fail jobs when submitted to non-existent
     fair scheduler pools. (Ahmed Radwan via todd)
 
+    MAPREDUCE-3171. normalize nodemanager native code compilation with common/hdfs 
+    native. (tucu)
+
+    MAPREDUCE-3149. Add a test to verify that TokenCache handles file system 
+    uri with no authority. (John George via jitendra)
+
   BUG FIXES
 
     MAPREDUCE-2950. [Gridmix] TestUserResolve fails in trunk. 
@@ -32,9 +40,17 @@ Trunk (unreleased changes)
     findBugs, correct links to findBugs artifacts and no links to the
     artifacts when there are no warnings. (Tom White via vinodkv).
 
-    MAPREDUCE-3081. Fix vaidya startup script. (gkesavan via suhas).
+    MAPREDUCE-3183. hadoop-assemblies/src/main/resources/assemblies/hadoop-mapreduce-dist.xml 
+    missing license header. (Hitesh Shah via tucu).
+
+    MAPREDUCE-3003. Publish MR JARs to Maven snapshot repository. (tucu)
+
+    MAPREDUCE-3204. mvn site:site fails on MapReduce. (tucu)
+
+    MAPREDUCE-3014. Rename and invert logic of '-cbuild' profile to 'native' and off 
+    by default. (tucu)
 
-Release 0.23.0 - Unreleased
+Release 0.23.0 - 2011-11-01 
 
   INCOMPATIBLE CHANGES
 
@@ -75,12 +91,26 @@ Release 0.23.0 - Unreleased
     MAPREDUCE-2037. Capture intermediate progress, CPU and memory usage for
     tasks. (Dick King via acmurthy) 
 
+    MAPREDUCE-279. MapReduce 2.0. Merging MR-279 branch into trunk. Contributed by
+    Arun C Murthy, Christopher Douglas, Devaraj Das, Greg Roelofs, Jeffrey
+    Naisbitt, Josh Wills, Jonathan Eagles, Krishna Ramachandran, Luke Lu, Mahadev
+    Konar, Robert Evans, Sharad Agarwal, Siddharth Seth, Thomas Graves, and Vinod
+    Kumar Vavilapalli.
+
     MAPREDUCE-2930. Added the ability to be able to generate graphs from the
     state-machine definitions. (Binglin Chang via vinodkv)
 
     MAPREDUCE-2719. Add a simple, DistributedShell, application to illustrate
     alternate frameworks on YARN. (Hitesh Shah via acmurthy)
 
+    MAPREDUCE-3104. Implemented Application-acls. (vinodkv)
+
+    MAPREDUCE-2708. Designed and implemented MR Application Master recovery to
+    make MR AMs resume their progress after restart. (Sharad Agarwal via vinodkv)
+
+    MAPREDUCE-2858. Added a WebApp Proxy for applications. (Robert Evans via
+    acmurthy) 
+
   IMPROVEMENTS
 
     MAPREDUCE-2187. Reporter sends progress during sort/merge. (Anupam Seth via
@@ -318,6 +348,9 @@ Release 0.23.0 - Unreleased
     MAPREDUCE-2726. Added job-file to the AM and JobHistoryServer web
     interfaces. (Jeffrey Naisbitt via vinodkv)
 
+    MAPREDUCE-2880. Improve classpath-construction for mapreduce AM and
+    containers. (Arun C Murthy via vinodkv)
+
     MAPREDUCE-3055. Simplified ApplicationAttemptId passing to
     ApplicationMaster via environment variable. (vinodkv)
 
@@ -338,9 +371,6 @@ Release 0.23.0 - Unreleased
 
     MAPREDUCE-3134. Added documentation the CapacityScheduler. (acmurthy) 
 
-    MAPREDUCE-3138. Add a utility to help applications bridge changes in 
-    Context Objects APIs due to MAPREDUCE-954. (omalley via acmurthy)
-
     MAPREDUCE-3013. Removed YarnConfiguration.YARN_SECURITY_INFO and its usage
     as it doesn't affect security any more. (vinodkv)
 
@@ -351,9 +381,68 @@ Release 0.23.0 - Unreleased
     the outputs of tasks from a crashed job so as to support MR Application
     Master recovery. (Sharad Agarwal and Arun C Murthy via vinodkv)
 
-    MAPREDUCE-2738. Added the missing cluster level statisticss on the RM web
+    MAPREDUCE-2738. Added the missing cluster level statistics on the RM web
     UI. (Robert Joseph Evans via vinodkv)
 
+    MAPREDUCE-2988. Reenabled TestLinuxContainerExecutor reflecting the
+    current NodeManager code. (Robert Joseph Evans via vinodkv) 
+
+    MAPREDUCE-3161. Improved some javadocs and fixed some typos in
+    YARN. (Todd Lipcon via vinodkv)
+
+    MAPREDUCE-3148. Ported MAPREDUCE-2702 to old mapred api for aiding task
+    recovery. (acmurthy) 
+
+    MAPREDUCE-3133. Running a set of methods in a Single Test Class.
+    (Jonathan Eagles via mahadev)
+
+    MAPREDUCE-3059. QueueMetrics do not have metrics for aggregate 
+    containers-allocated and aggregate containers-released.
+    (Devaraj K via mahadev)
+   
+    MAPREDUCE-3187. Add names for various unnamed threads in MR2.
+    (Todd Lipcon and Siddharth Seth via mahadev)
+
+    MAPREDUCE-3136. Added documentation for setting up Hadoop clusters in both
+    non-secure and secure mode for both HDFS & YARN. (acmurthy)
+
+    MAPREDUCE-3068. Added a whitelist of environment variables for containers
+    from the NodeManager and set MALLOC_ARENA_MAX for all daemons and
+    containers. (Chris Riccomini via acmurthy)
+
+    MAPREDUCE-3144. Augmented JobHistory with the information needed for
+    serving aggregated logs. (Siddharth Seth via vinodkv)
+  
+    MAPREDUCE-3163. JobClient spews errors when killing MR2 job.
+    (mahadev)
+
+    MAPREDUCE-3239. Use new createSocketAddr API in MRv2 to give better 
+    error messages on misconfig (Todd Lipcon via mahadev)
+
+    MAPREDUCE-2747. Cleaned up LinuxContainerExecutor binary sources and changed
+    the configuration to use yarn names. (Robert Joseph Evans via vinodkv)
+
+    MAPREDUCE-3205. Fix memory specifications to be physical rather than
+    virtual, allowing for a ratio between the two to be configurable. (todd
+    via acmurthy) 
+
+    MAPREDUCE-2986. Fixed MiniYARNCluster to support multiple NodeManagers.
+    (Anupam Seth via vinodkv)
+
+    MAPREDUCE-2736. Remove unused contrib components dependent on MR1. (eli)
+
+    MAPREDUCE-2989. Modified JobHistory to link to task and AM logs from the
+    JobHistoryServer. (Siddharth Seth via vinodkv)
+
+    MAPREDUCE-3146. Added a MR specific command line to dump logs for a
+    given TaskAttemptID. (Siddharth Seth via vinodkv)
+
+    MAPREDUCE-3275. Added documentation for AM WebApp Proxy. (Robert Evans via
+    acmurthy)
+
+    MAPREDUCE-3322. Added a better index.html and an brief overview of YARN
+    architecture. (acmurthy) 
+
   OPTIMIZATIONS
 
     MAPREDUCE-2026. Make JobTracker.getJobCounters() and
@@ -365,8 +454,8 @@ Release 0.23.0 - Unreleased
 
     MAPREDUCE-901. Efficient framework counters. (llu via acmurthy)
 
-    MAPREDUCE-2880. Improve classpath-construction for mapreduce AM and
-    containers. (Arun C Murthy via vinodkv)
+    MAPREDUCE-2629. Workaround a JVM class loading quirk which prevents
+    JIT compilation of inner classes methods in ReduceContextImpl.
 
   BUG FIXES
 
@@ -1529,6 +1618,300 @@ Release 0.23.0 - Unreleased
     MAPREDUCE-2913. Fixed TestMRJobs.testFailingMapper to assert the correct
     TaskCompletionEventStatus. (Jonathan Eagles via vinodkv)
 
+    MAPREDUCE-2794. [MR-279] Incorrect metrics value for AvailableGB per 
+    queue per user. (John George via mahadev)
+
+    MAPREDUCE-2783. Fixing RM web-UI to show no tracking-URL when AM
+    crashes. (Eric Payne via vinodkv)
+
+    MAPREDUCE-3141. Fix the broken MRAppMaster to work over YARN in security
+    mode.(vinodkv)
+
+    MAPREDUCE-2751. Modified NodeManager to stop leaving around local files
+    after application finishes. (Siddharth Seth via vinodkv)
+
+    MAPREDUCE-3033. Ensure Master interface pays attention to classic v/s yarn
+    frameworks. (Hitesh Shah via acmurthy)
+
+    MAPREDUCE-2802. Ensure JobHistory filenames have jobId. (Jonathan Eagles
+    via acmurthy) 
+
+    MAPREDUCE-2876. Use a different config for ContainerAllocationExpirer.
+    (Anupam Seth via acmurthy) 
+
+    MAPREDUCE-3153. Fix TestFileOutputCommitter which was broken by
+    MAPREDUCE-2702. (mahadev via acmurthy) 
+
+    MAPREDUCE-3123. Fix NM to quote symlink names to escape special
+    characters. (Hitesh Shah via acmurthy) 
+
+    MAPREDUCE-3154. Fix JobSubmitter to check for output specs before copying
+    job submission files to fail fast. (Abhijit Suresh Shingate via acmurthy) 
+
+    MAPREDUCE-3158. Fix test failures in MRv1 due to default framework being
+    set to yarn. (Hitesh Shah via acmurthy)
+
+    MAPREDUCE-3167. container-executor is not being packaged with the assembly
+    target. (mahadev)
+
+    MAPREDUCE-3020. Fixed TaskAttemptImpl to log the correct node-address for
+    a finished Reduce task. (Chackaravarthy via vinodkv)
+
+    MAPREDUCE-2668. Fixed AuxServices to send a signal on application-finish
+    to all the services. (Thomas Graves via vinodkv)
+
+    MAPREDUCE-3126. Fixed a corner case in CapacityScheduler where headroom
+    wasn't updated on changes to cluster size. (acmurthy) 
+
+    MAPREDUCE-3140. Fixed the invalid JobHistory URL for failed
+    applications. (Subroto Sanyal via vinodkv)
+
+    MAPREDUCE-3125. Modified TaskImpl to consider only non-failed, non-killed
+    task-attempts for obtaining task's progress. (Hitesh Shah via vinodkv)
+
+    MAPREDUCE-2666. Retrieve shuffle port number from JobHistory on MR AM
+    restart. (Jonathan Eagles via acmurthy) 
+
+    MAPREDUCE-2789. Complete schedulingInfo on CLI. (Eric Payne via acmurthy) 
+
+    MAPREDUCE-3170. Fixed job output commit for deep hierarchies. (Hitesh Shah
+    via acmurthy)
+
+    MAPREDUCE-3124. Fixed location of native libs i.e. libhadoop.so for
+    containers. (John George via acmurthy) 
+
+    MAPREDUCE-3057. Job History Server goes of OutOfMemory with 1200 Jobs 
+    and Heap Size set to 10 GB. (Eric Payne via mahadev)
+
+    MAPREDUCE-2840. mr279 TestUberAM.testSleepJob test fails. (jonathan eagles
+    via mahadev)
+
+    MAPREDUCE-3190. Ensure bin/yarn fails early with a clear error message
+    when HADOOP_COMMON_HOME or HADOOP_HDFS_HOME are not set. (todd & acmurthy 
+    via acmurthy)
+
+    MAPREDUCE-3189. Add link decoration back to MR2's CSS. (Todd Lipcon via
+    mahadev)
+    
+    MAPREDUCE-3127. Changed default value of yarn.resourcemanager.acl.enable
+    to true and added some more documentation. (acmurthy) 
+
+    MAPREDUCE-3032. Fixed TaskAttemptImpl so that JobHistory can have error
+    information about failed tasks. (Devaraj K via vinodkv)
+
+    MAPREDUCE-3196. TestLinuxContainerExecutorWithMocks fails on Mac OSX.
+    (Arun Murthy via mahadev)
+
+    MAPREDUCE-3197. TestMRClientService failing on building clean checkout of 
+    branch 0.23 (mahadev)
+
+    MAPREDUCE-2762. Cleanup MR staging directory on completion. (mahadev via
+    acmurthy) 
+
+    MAPREDUCE-3165. Ensure logging options are set correctly for MR AM and
+    tasks. (todd via acmurthy) 
+
+    MAPREDUCE-3203. Fix some javac warnings in MRAppMaster. (mahadev)
+
+    MAPREDUCE-3199. Fixed pom files to include correct log4j configuration for
+    tests. (vinodkv)
+
+    MAPREDUCE-3162. Separated application-init and container-init event types
+    in NodeManager's Application state machine. (Todd Lipcon via vinodkv)
+
+    MAPREDUCE-3176. Fixed ant mapreduce tests that are timing out because
+    of wrong framework name. (Hitesh Shah via vinodkv)
+
+    MAPREDUCE-3181. Fixed MapReduce runtime to load yarn-default.xml and
+    yarn-site.xml. (acmurthy) 
+
+    MAPREDUCE-2788. Normalize resource requests in FifoScheduler
+    appropriately. (Ahmed Radwan via acmurthy) 
+
+    MAPREDUCE-2693. Fix NPE in job-blacklisting. (Hitesh Shah via acmurthy) 
+
+    MAPREDUCE-3208. Fix NPE task/container log appenders. (liangzhwa via
+    acmurthy) 
+
+    MAPREDUCE-3212. Fix usage/help message for bin/yarn. (Bhallamudi Venkata 
+    Siva Kamesh via acmurthy) 
+
+    MAPREDUCE-3179. Ensure failed tests exit with right error code. (Jonathan
+    Eagles via acmurthy)
+
+    MAPREDUCE-3188. Ensure correct shutdown in services. (todd via acmurthy) 
+
+    MAPREDUCE-3226. Fix shutdown of fetcher threads. (vinodkv via acmurthy) 
+
+    MAPREDUCE-3070. Fix NodeManager to use ephemeral ports by default.
+    (Devaraj K via acmurthy) 
+
+    MAPREDUCE-3242. Trunk compilation broken with bad interaction from 
+    MAPREDUCE-3070 and MAPREDUCE-3239. (mahadev)
+
+    MAPREDUCE-3058. Fixed MR YarnChild to report failure when task throws an
+    error and thus prevent a hanging task and job. (vinodkv)
+
+    MAPREDUCE-3087. Fixed the mapreduce classpath to correctly include the
+    generated-classpath file needed for tests. (Ravi Prakash via vinodkv)
+
+    MAPREDUCE-3233. Fixed a bug in MR Job so as to be able to restart the
+    application on AM crash. (Mahadev Konar via vinodkv)
+
+    MAPREDUCE-3028. Added job-end notification support. (Ravi Prakash via
+    acmurthy) 
+
+    MAPREDUCE-3249. Ensure shuffle-port is correctly used duringMR AM recovery. 
+    (vinodkv via acmurthy) 
+
+    MAPREDUCE-3252. Fix map tasks to not rewrite data an extra time when
+    map output fits in spill buffer. (todd)
+
+    MAPREDUCE-3159. Ensure DefaultContainerExecutor doesn't delete application
+    directories during app-init. (todd via acmurthy)
+
+    MAPREDUCE-3248. Fixed log4j properties. (vinodkv via acmurthy) 
+
+    MAPREDUCE-2746. Yarn servers can't communicate with each other with 
+    hadoop.security.authorization set to true (acmurthy via mahadev)
+
+    MAPREDUCE-2821. Added missing fields (resourcePerMap & resourcePerReduce)
+    to JobSummary logs. (mahadev via acmurthy)
+
+    MAPREDUCE-3253. Fixed ContextFactory to clone JobContext correctly.
+    (acmurthy) 
+
+    MAPREDUCE-3263. Fixed the MAPREDUCE-3028 commit which broke MR1. (Hitesh
+    Shah via acmurthy) 
+
+    MAPREDUCE-3269. Fixed log4j properties to correctly set logging options
+    for JobHistoryServer vis-a-vis JobSummary logs. (mahadev via acmurthy) 
+
+    MAPREDUCE-2977. Fix ResourceManager to renew HDFS delegation tokens for
+    applications. (acmurthy) 
+
+    MAPREDUCE-3250. When AM restarts, client keeps reconnecting to the new AM 
+    and prints a lots of logs. (vinodkv via mahadev)
+
+    MAPREDUCE-3254. Fixed streaming to set the job.jar by using the right
+    JobConf ctor. (acmurthy) 
+
+    MAPREDUCE-3264. mapreduce.job.user.name needs to be set automatically.
+    (acmurthy via mahadev)
+
+    MAPREDUCE-3175. Add authorization to admin web-pages such as /stacks, /jmx
+    etc. (Jonathan Eagles via acmurthy)
+
+    MAPREDUCE-3257. Added authorization checks for the protocol between
+    ResourceManager and ApplicationMaster. (vinodkv via acmurthy) 
+
+    MAPREDUCE-3259. Added java.library.path of NodeManager to
+    ContainerLocalizer in LinuxContainerExecutor. (Kihwal Lee via acmurthy) 
+
+    MAPREDUCE-3279. Fixed TestJobHistoryParsing which assumed user name to be
+    mapred all the time. (Siddharth Seth via acmurthy)
+
+    MAPREDUCE-3240. Fixed NodeManager to be able to forcefully cleanup its
+    containers (process-trees) irrespective of whether the container succeeded,
+    or killed. (Hitesh Shah via vinodkv)
+
+    MAPREDUCE-3281. Fixed a bug in TestLinuxContainerExecutorWithMocks. (vinodkv)
+
+    MAPREDUCE-3228. Fixed MR AM to timeout RPCs to bad NodeManagers. (vinodkv
+    via acmurthy)
+
+    MAPREDUCE-3284. Moved JobQueueClient to hadoop-mapreduce-client-core.
+    (acmurthy) 
+
+    MAPREDUCE-3282. bin/mapred job -list throws exception. (acmurthy via 
+    mahadev)
+
+    MAPREDUCE-3186. User jobs are getting hanged if the Resource manager
+    process goes down and comes up while job is getting executed. 
+    (Eric Payne via mahadev)
+
+    MAPREDUCE-3209. Jenkins reports 160 FindBugs warnings (mahadev)
+
+    MAPREDUCE-3258. Fixed AM & JobHistory web-ui to display counters properly.
+    (Siddharth Seth via acmurthy)
+
+    MAPREDUCE-3290. Fixed a NPE in ClientRMService. (acmurthy) 
+
+    MAPREDUCE-3185. RM Web UI does not sort the columns in some cases.
+    (Jonathan Eagles via mahadev)
+
+    MAPREDUCE-3292. In secure mode job submission fails with Provider 
+    org.apache.hadoop.mapreduce.security.token.JobTokenIndentifier$Renewer 
+    not found. (mahadev)
+
+    MAPREDUCE-3296. Fixed the remaining nine FindBugs warnings. (vinodkv)
+
+    MAPREDUCE-2775. Fixed ResourceManager and NodeManager to force a
+    decommissioned node to shutdown. (Devaraj K via vinodkv)
+
+    MAPREDUCE-3304. Fixed intermittent test failure due to a race in
+    TestRMContainerAllocator#testBlackListedNodes. (Ravi Prakash via acmurthy) 
+
+    MAPREDUCE-3306. Fixed a bug in NodeManager ApplicationImpl that was causing
+    NodeManager to crash. (vinodkv)
+
+    MAPREDUCE-3256. Added authorization checks for the protocol between
+    NodeManager and ApplicationMaster. (vinodkv via acmurthy) 
+
+    MAPREDUCE-3274. Fixed a race condition in MRAppMaster that was causing a
+    task-scheduling deadlock. (Robert Joseph Evans via vinodkv)
+
+    MAPREDUCE-3313. Fixed initialization of ClusterMetrics which was failing
+    TestResourceTrackerService sometimes. (Hitesh Shah via vinodkv)
+
+    MAPREDUCE-2766. Fixed NM to set secure permissions for files and directories
+    in distributed-cache. (Hitesh Shah via vinodkv)
+
+    MAPREDUCE-2696. Fixed NodeManager to cleanup logs in a thread when logs'
+    aggregation is not enabled. (Siddharth Seth via vinodkv)
+
+    MAPREDUCE-3262. Fixed Container's state-machine in NodeManager to handle
+    a couple of events in failure states correctly. (Hitesh Shah and Siddharth
+    Seth via vinodkv)
+
+    MAPREDUCE-3035. Fixed MR JobHistory to ensure rack information is present.
+    (chakravarthy via acmurthy)
+
+    MAPREDUCE-3321. Disabled a few MR tests for 0.23. (Hitesh Shah via
+    acmurthy) 
+
+    MAPREDUCE-3220. Fixed TestCombineOutputCollector. (Devaraj K via acmurthy) 
+
+    MAPREDUCE-3103. Implement Job ACLs for MRAppMaster. 
+    (mahadev)
+
+    MAPREDUCE-3241. [Rumen] Fix Rumen to ignore the AMStartedEvent. (amarrk)
+
+    MAPREDUCE-3166. [Rumen] Make Rumen use job history api instead of relying
+    on current history file name format. (Ravi Gummadi)
+
+    MAPREDUCE-3157. [Rumen] Fix TraceBuilder to handle 0.20 history file
+    names also. (Ravi Gummadi)
+
+    MAPREDUCE-3081. Fix vaidya startup script. (gkesavan via suhas).
+
+    MAPREDUCE-2764. Fix renewal of dfs delegation tokens. (Owen via jitendra)
+
+    MAPREDUCE-3192. Fix Javadoc warning in JobClient.java and Cluster.java.
+    (jitendra)
+
+    MAPREDUCE-3237. Move LocalJobRunner to hadoop-mapreduce-client-core.
+    (tomwhite via acmurthy) 
+
+    MAPREDUCE-3316. Rebooted link is not working properly. 
+    (Bhallamudi Venkata Siva Kamesh via mahadev)
+
+    MAPREDUCE-3317. Rumen TraceBuilder is emiting null as hostname.
+    (Ravi Gummadi via mahadev)
+
+    MAPREDUCE-3332. contrib/raid compile breaks due to changes in hdfs/protocol/datatransfer/
+    Sender#writeBlock related to checksum handling (Hitesh Shah via mahadev)
+
 Release 0.22.0 - Unreleased
 
   INCOMPATIBLE CHANGES
@@ -1745,6 +2128,9 @@ Release 0.22.0 - Unreleased
     MAPREDUCE-2505. Explain how to use ACLs in the fair scheduler.
     (matei via eli)
 
+    MAPREDUCE-3138. Add a utility to help applications bridge changes in 
+    Context Objects APIs due to MAPREDUCE-954. (omalley via acmurthy)
+
   OPTIMIZATIONS
 
     MAPREDUCE-1354. Enhancements to JobTracker for better performance and

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Nov  2 05:34:31 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:1161333-1179483
+/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:1161333-1196451
 /hadoop/core/branches/branch-0.19/mapred/CHANGES.txt:713112
 /hadoop/mapreduce/branches/HDFS-641/CHANGES.txt:817878-835964

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/INSTALL
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/INSTALL?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/INSTALL (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/INSTALL Wed Nov  2 05:34:31 2011
@@ -2,49 +2,31 @@ To compile  Hadoop Mapreduce next follow
 
 Step 1) Install dependencies for yarn
 
-See http://svn.apache.org/repos/asf/hadoop/common/trunk/hadoop-mapreduce/hadoop-yarn/README
+See http://svn.apache.org/repos/asf/hadoop/common/trunk/hadoop-mapreduce-porject/hadoop-yarn/README
 Make sure protbuf library is in your library path or set: export LD_LIBRARY_PATH=/usr/local/lib
 
 Step 2) Checkout
 
 svn checkout http://svn.apache.org/repos/asf/hadoop/common/trunk
 
-Step 3) Build common
+Step 3) Build
 
-Go to common directory - choose your regular common build command
-Example: mvn clean install package -Pbintar -DskipTests
+Go to common directory - choose your regular common build command. For example:
 
-Step 4) Build HDFS 
-
-Go to hdfs directory
-ant veryclean mvn-install -Dresolvers=internal 
-
-Step 5) Build yarn and mapreduce
-
-Go to mapreduce directory
 export MAVEN_OPTS=-Xmx512m
+mvn clean package -Pdist -Dtar -DskipTests -Pnative
 
-mvn clean install assembly:assembly -DskipTests
-
-Copy in build.properties if appropriate - make sure eclipse.home not set
-ant veryclean tar -Dresolvers=internal 
+You can omit -Pnative it you don't want to build native packages.
 
-You will see a tarball in
-ls target/hadoop-mapreduce-0.24.0-SNAPSHOT-all.tar.gz  
+Step 4) Untar the tarball from hadoop-dist/target/ into a clean and different
+directory, say YARN_HOME.
 
-Step 6) Untar the tarball in a clean and different directory.
-say YARN_HOME. 
-
-Make sure you aren't picking up avro-1.3.2.jar, remove:
-  $HADOOP_COMMON_HOME/share/hadoop/common/lib/avro-1.3.2.jar
-  $YARN_HOME/lib/avro-1.3.2.jar
-
-Step 7)
-Install hdfs/common and start hdfs
+Step 5)
+Start hdfs
 
 To run Hadoop Mapreduce next applications: 
 
-Step 8) export the following variables to where you have things installed:
+Step 6) export the following variables to where you have things installed:
 You probably want to export these in hadoop-env.sh and yarn-env.sh also.
 
 export HADOOP_MAPRED_HOME=<mapred loc>
@@ -54,7 +36,7 @@ export YARN_HOME=directory where you unt
 export HADOOP_CONF_DIR=<conf loc>
 export YARN_CONF_DIR=$HADOOP_CONF_DIR
 
-Step 9) Setup config: for running mapreduce applications, which now are in user land, you need to setup nodemanager with the following configuration in your yarn-site.xml before you start the nodemanager.
+Step 7) Setup config: for running mapreduce applications, which now are in user land, you need to setup nodemanager with the following configuration in your yarn-site.xml before you start the nodemanager.
     <property>
       <name>yarn.nodemanager.aux-services</name>
       <value>mapreduce.shuffle</value>
@@ -65,31 +47,21 @@ Step 9) Setup config: for running mapred
       <value>org.apache.hadoop.mapred.ShuffleHandler</value>
     </property>
 
-Step 10) Modify mapred-site.xml to use yarn framework
+Step 8) Modify mapred-site.xml to use yarn framework
     <property>    
       <name> mapreduce.framework.name</name>
       <value>yarn</value>  
     </property>
 
-Step 11) Create the following symlinks in $HADOOP_COMMON_HOME/share/hadoop/common/lib
-
-ln -s $YARN_HOME/modules/hadoop-mapreduce-client-app-0.24.0-SNAPSHOT.jar .	
-ln -s $YARN_HOME/modules/hadoop-yarn-api-0.24.0-SNAPSHOT.jar .
-ln -s $YARN_HOME/modules/hadoop-mapreduce-client-common-0.24.0-SNAPSHOT.jar .	
-ln -s $YARN_HOME/modules/hadoop-yarn-common-0.24.0-SNAPSHOT.jar .
-ln -s $YARN_HOME/modules/hadoop-mapreduce-client-core-0.24.0-SNAPSHOT.jar .	
-ln -s $YARN_HOME/modules/hadoop-yarn-server-common-0.24.0-SNAPSHOT.jar .
-ln -s $YARN_HOME/modules/hadoop-mapreduce-client-jobclient-0.24.0-SNAPSHOT.jar .
-
-Step 12) cd $YARN_HOME
+Step 9) cd $YARN_HOME
 
-Step 13) bin/yarn-daemon.sh start resourcemanager
+Step 10) bin/yarn-daemon.sh start resourcemanager
 
-Step 14) bin/yarn-daemon.sh start nodemanager
+Step 11) bin/yarn-daemon.sh start nodemanager
 
-Step 15) bin/yarn-daemon.sh start historyserver
+Step 12) bin/yarn-daemon.sh start historyserver
 
-Step 16) You are all set, an example on how to run a mapreduce job is:
+Step 13) You are all set, an example on how to run a mapreduce job is:
 cd $HADOOP_MAPRED_HOME
 ant examples -Dresolvers=internal 
 $HADOOP_COMMON_HOME/bin/hadoop jar $HADOOP_MAPRED_HOME/build/hadoop-mapreduce-examples-0.24.0-SNAPSHOT.jar randomwriter -Dmapreduce.job.user.name=$USER -Dmapreduce.clientfactory.class.name=org.apache.hadoop.mapred.YarnClientFactory -Dmapreduce.randomwriter.bytespermap=10000 -Ddfs.blocksize=536870912 -Ddfs.block.size=536870912 -libjars $YARN_HOME/modules/hadoop-mapreduce-client-jobclient-0.24.0-SNAPSHOT.jar output 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/assembly/all.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/assembly/all.xml?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/assembly/all.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/assembly/all.xml Wed Nov  2 05:34:31 2011
@@ -1,101 +0,0 @@
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
-  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-  xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
-  <id>all</id>
-  <formats>
-    <format>tar.gz</format>
-  </formats>
-  <includeBaseDirectory>true</includeBaseDirectory>
-  <!-- TODO: this layout is wrong. We need module specific bin files in module specific dirs -->
-  <fileSets>
-    <fileSet>
-      <directory>hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/target/classes/bin</directory>
-      <outputDirectory>bin</outputDirectory>
-      <includes>
-        <include>container-executor</include>
-      </includes>
-      <fileMode>0755</fileMode>
-    </fileSet>
-    <fileSet>
-      <directory>hadoop-yarn/bin</directory>
-      <outputDirectory>bin</outputDirectory>
-      <includes>
-        <include>*</include>
-      </includes>
-      <fileMode>0755</fileMode>
-    </fileSet>
-    <fileSet>
-      <directory>bin</directory>
-      <outputDirectory>bin</outputDirectory>
-      <includes>
-        <include>*</include>
-      </includes>
-      <fileMode>0755</fileMode>
-    </fileSet>
-    <fileSet>
-      <directory>hadoop-yarn/conf</directory>
-      <outputDirectory>conf</outputDirectory>
-      <includes>
-        <include>**/*</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <outputDirectory>sources</outputDirectory>
-      <excludes>
-        <exclude>**/*.jar</exclude>
-        <exclude>**/target/**</exclude>
-        <!-- scripts to include later for setting fileMode -->
-        <exclude>**/bin/*</exclude>
-        <exclude>**/scripts/*</exclude>
-        <!-- images that we don't need (and cause problems for our tools) -->
-        <exclude>**/dt-*/images/**</exclude>
-        <!-- until the code that does this is fixed -->
-        <exclude>**/file:/**</exclude>
-        <exclude>**/SecurityAuth.audit*</exclude>
-      </excludes>
-      <includes>
-        <include>assembly/**</include>
-        <include>pom.xml</include>
-        <include>build*.xml</include>
-        <include>ivy.xml</include>
-        <include>ivy/**</include>
-        <include>INSTALL</include>
-        <include>LICENSE.txt</include>
-        <include>mr-client/**</include>
-        <include>hadoop-yarn/**</include>
-        <include>src/**</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <outputDirectory>sources</outputDirectory>
-      <includes>
-        <include>**/bin/*</include>
-        <include>**/scripts/*</include>
-      </includes>
-      <fileMode>0755</fileMode>
-    </fileSet>
-  </fileSets>
-  <moduleSets>
-    <moduleSet>
-      <excludes>
-        <exclude>org.apache.hadoop:hadoop-yarn-server-tests</exclude>
-      </excludes>
-      <binaries>
-        <outputDirectory>modules</outputDirectory>
-        <includeDependencies>false</includeDependencies>
-        <unpack>false</unpack>
-      </binaries>
-    </moduleSet>
-  </moduleSets>
-  <dependencySets>
-    <dependencySet>
-      <useProjectArtifact>false</useProjectArtifact>
-      <outputDirectory>/lib</outputDirectory>
-      <!-- Exclude hadoop artifacts. They will be found via HADOOP* env -->
-      <excludes>
-        <exclude>org.apache.hadoop:hadoop-common</exclude>
-        <exclude>org.apache.hadoop:hadoop-hdfs</exclude>
-      </excludes>
-    </dependencySet>
-  </dependencySets>
-</assembly>

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/bin/mapred
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/bin/mapred?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/bin/mapred (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/bin/mapred Wed Nov  2 05:34:31 2011
@@ -115,5 +115,12 @@ if [ "$COMMAND" = "classpath" ] ; then
   exit
 fi
 
+#turn security logger on the jobtracker
+if [ $COMMAND = "jobtracker" ]; then
+  HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,DRFAS}"
+else
+  HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}"
+fi
+
 export CLASSPATH
 exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS $CLASS "$@"

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/build.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/build.xml?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/build.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/build.xml Wed Nov  2 05:34:31 2011
@@ -39,7 +39,6 @@
   <property name="examples.final.name" value="${name}-examples-${version}"/>
   <property name="tools.final.name" value="${name}-tools-${version}"/>
   <property name="year" value="2009"/>
-  <property name="package.release" value="1"/>
 
   <property name="src.dir" value="${basedir}/src"/>  	
   <property name="mapred.src.dir" value="${src.dir}/java"/> 
@@ -240,17 +239,6 @@
     <equals arg1="${repo}" arg2="staging"/>
   </condition>
 
-  <!-- packaging properties -->
-  <property name="package.prefix" value="/usr"/>
-  <property name="package.conf.dir" value="/etc/hadoop"/>
-  <property name="package.log.dir" value="/var/log/hadoop/mapred"/>
-  <property name="package.pid.dir" value="/var/run/hadoop"/>
-  <property name="package.var.dir" value="/var/lib/hadoop"/>
-  <property name="package.share.dir" value="/share/hadoop/${module}"/>
-  <!-- Use fixed path to build rpm for avoiding rpmbuild conflict with dash path names -->
-  <property name="package.buildroot" value="/tmp/hadoop_mapred_package_build_${user.name}"/>
-  <property name="package.build.dir" value="/tmp/hadoop_mapred_package_build_${user.name}/BUILD"/>
-
   <!-- the normal classpath -->
   <path id="classpath">
     <pathelement location="${build.classes}"/>
@@ -846,8 +834,7 @@
     </antcall>
   </target>
 
-  <target name="nightly" depends="test, tar">
-  </target>
+  <target name="nightly" depends="test" />
 	
   <!-- ================================================================== -->
   <!-- Run optional third-party tool targets                              --> 
@@ -880,10 +867,8 @@
    <subant target="jar">
       <property name="version" value="${version}"/>
       <property name="dist.dir" value="${dist.dir}"/>
-      <fileset file="${contrib.dir}/capacity-scheduler/build.xml"/>
       <fileset file="${contrib.dir}/streaming/build.xml"/>
       <fileset file="${contrib.dir}/gridmix/build.xml"/>
-      <fileset file="${contrib.dir}/mumak/build.xml"/>
     </subant>
  </target>
 
@@ -912,17 +897,13 @@
       <sourcePath path="${mapred.src.dir}"/>
       <sourcePath path="${examples.dir}" />
       <sourcePath path="${tools.src}" />
-      <sourcePath path="${basedir}/src/contrib/capacity-scheduler/src/java" />
       <sourcePath path="${basedir}/src/contrib/streaming/src/java" />
       <sourcePath path="${basedir}/src/contrib/gridmix/src/java" />
-      <sourcePath path="${basedir}/src/contrib/mumak/src/java" />
       <class location="${basedir}/build/${final.name}.jar" />
       <class location="${basedir}/build/${examples.final.name}.jar" />
       <class location="${basedir}/build/${tools.final.name}.jar" />
-      <class location="${basedir}/build/contrib/capacity-scheduler/hadoop-${version}-capacity-scheduler.jar" />
       <class location="${basedir}/build/contrib/streaming/hadoop-${version}-streaming.jar" />
       <class location="${basedir}/build/contrib/gridmix/hadoop-${version}-gridmix.jar" />
-      <class location="${basedir}/build/contrib/mumak/hadoop-${version}-mumak.jar" />
     </findbugs>
 
         <xslt style="${findbugs.home}/src/xsl/default.xsl"
@@ -981,7 +962,6 @@
        <packageset dir="src/contrib/data_join/src/java"/>
        <packageset dir="src/contrib/gridmix/src/java"/>
        <packageset dir="src/contrib/index/src/java"/>
-       <packageset dir="src/contrib/mumak/src/java"/>
        <packageset dir="src/contrib/streaming/src/java"/>
        <packageset dir="src/contrib/vaidya/src/java"/>
        <packageset dir="src/contrib/vertica/src/java"/>
@@ -1042,8 +1022,7 @@
     	<packageset dir="${examples.dir}"/>
 
        <!-- Don't include contrib modules that use the same packages as core
-       MapReduce. This includes capacity-scheduler, dynamic-scheduler,
-       fairscheduler, mumak. See also the javadoc-dev target. -->
+       MapReduce. See also the javadoc-dev target. -->
        <packageset dir="src/contrib/data_join/src/java"/>
        <packageset dir="src/contrib/gridmix/src/java"/>
        <packageset dir="src/contrib/index/src/java"/>
@@ -1155,7 +1134,6 @@
     <mkdir dir="${dist.dir}"/>
     <mkdir dir="${dist.dir}/lib"/>
     <mkdir dir="${dist.dir}/contrib"/>
-    <mkdir dir="${dist.dir}/bin"/>
     <mkdir dir="${dist.dir}/docs"/>
     <mkdir dir="${dist.dir}/docs/api"/>
     <mkdir dir="${dist.dir}/docs/jdiff"/>
@@ -1196,10 +1174,6 @@
       <fileset file="${build.dir}/${tools.final.name}.jar"/> 
     </copy>
     
-    <copy todir="${dist.dir}/bin">
-      <fileset dir="bin"/>
-    </copy>
-
     <copy todir="${dist.dir}/conf">
       <fileset dir="${conf.dir}" excludes="**/*.template"/>
     </copy>
@@ -1237,7 +1211,6 @@
         <fileset file="${dist.dir}/src/c++/pipes/configure"/>
     </chmod>
     <chmod perm="ugo+x" type="file" parallel="false">
-        <fileset dir="${dist.dir}/bin"/>
         <fileset dir="${dist.dir}/src/contrib/">
           <include name="*/bin/*" />
         </fileset>
@@ -1248,290 +1221,6 @@
 
   </target>
 
-  <!-- ================================================================== -->
-  <!-- Make release tarball                                               -->
-  <!-- ================================================================== -->
-  <target name="tar" depends="package" description="Make release tarball">
-    <macro_tar param.destfile="${build.dir}/${final.name}.tar.gz">
-      <param.listofitems>
-        <tarfileset dir="${build.dir}" mode="664">
-          <exclude name="${final.name}/bin/*" />
-          <exclude name="${final.name}/contrib/*/bin/*" />
-          <exclude name="${final.name}/src/examples/pipes/configure"/>
-          <exclude name="${final.name}/src/c++/utils/configure"/>
-          <exclude name="${final.name}/src/c++/pipes/configure"/>
-          <include name="${final.name}/**" />
-        </tarfileset>
-        <tarfileset dir="${build.dir}" mode="755">
-          <include name="${final.name}/bin/*" />
-          <include name="${final.name}/contrib/*/bin/*" />
-          <include name="${final.name}/src/examples/pipes/configure"/>
-          <include name="${final.name}/src/c++/utils/configure"/>
-          <include name="${final.name}/src/c++/pipes/configure"/>
-        </tarfileset>
-      </param.listofitems>
-    </macro_tar>
-  </target>
-
-  <target name="bin-package" depends="compile, jar, examples, tools, jar-test, package-librecordio" 
-		description="assembles artifacts for binary target">
-    <mkdir dir="${dist.dir}"/>
-    <mkdir dir="${dist.dir}/include"/>
-    <mkdir dir="${dist.dir}/lib"/>
-    <mkdir dir="${dist.dir}/${package.share.dir}/contrib"/>
-    <mkdir dir="${dist.dir}/${package.share.dir}/lib"/>
-    <mkdir dir="${dist.dir}/${package.share.dir}/templates"/>
-    <mkdir dir="${dist.dir}/bin"/>
-    <mkdir dir="${dist.dir}/sbin"/>
-
-    <!-- enable this if there is mapred specific dependencies
-    <copy todir="${dist.dir}/${package.share.dir}/lib" includeEmptyDirs="false" flatten="true">
-      <fileset dir="${mapred.ivy.lib.dir}"/>
-    </copy> -->
-
-    <copy todir="${dist.dir}/include" includeEmptyDirs="false">
-      <fileset dir="${build.dir}/c++/${build.platform}/include" 
-               erroronmissingdir="false">
-        <include name="**"/>
-      </fileset>
-    </copy>
-
-    <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
-      <fileset dir="${build.dir}/c++/${build.platform}/lib"
-               erroronmissingdir="false">
-        <include name="**"/>
-      </fileset>
-    </copy>
-
-    <subant target="package">
-      <!--Pass down the version in case its needed again and the target
-      distribution directory so contribs know where to install to.-->
-      <property name="version" value="${version}"/>
-      <property name="dist.dir" value="${dist.dir}/${package.share.dir}"/>
-      <fileset file="${contrib.dir}/build.xml"/>
-    </subant>  	
-
-    <copy todir="${dist.dir}/${package.share.dir}"> 
-      <fileset file="${build.dir}/${final.name}*.jar"/>
-      <fileset file="${build.dir}/${test.final.name}.jar"/>
-      <fileset file="${build.dir}/${examples.final.name}.jar"/>
-      <fileset file="${build.dir}/${tools.final.name}.jar"/>
-    </copy>
-
-    <copy todir="${dist.dir}/bin">
-      <fileset dir="bin">
-        <include name="mapred"/>
-      </fileset>
-    </copy>
-
-    <copy todir="${dist.dir}/libexec">
-      <fileset dir="bin">
-        <include name="mapred-config.sh"/>
-      </fileset>
-    </copy>
-
-    <copy todir="${dist.dir}/sbin">
-      <fileset dir="bin">
-        <include name="start-*.sh"/>
-        <include name="stop-*.sh"/>
-      </fileset>
-    </copy>
-
-    <copy file="${basedir}/src/packages/update-mapred-env.sh" tofile="${dist.dir}/sbin/update-mapred-env.sh"/>
-    <copy file="${basedir}/src/packages/rpm/init.d/hadoop-jobtracker" tofile="${dist.dir}/sbin/hadoop-jobtracker.redhat"/>
-    <copy file="${basedir}/src/packages/rpm/init.d/hadoop-tasktracker" tofile="${dist.dir}/sbin/hadoop-tasktracker.redhat"/>
-    <copy file="${basedir}/src/packages/deb/init.d/hadoop-jobtracker" tofile="${dist.dir}/sbin/hadoop-jobtracker.debian"/>
-    <copy file="${basedir}/src/packages/deb/init.d/hadoop-tasktracker" tofile="${dist.dir}/sbin/hadoop-tasktracker.debian"/>
-    
-    <copy file="${basedir}/src/packages/update-mapred-env.sh" tofile="${dist.dir}/sbin/update-mapred-env.sh"/>
-      	
-    <copy todir="${dist.dir}/etc/hadoop">
-      <fileset dir="${conf.dir}" excludes="**/*.template"/>
-    </copy>
-
-    <copy todir="${dist.dir}/${package.share.dir}/templates">
-      <fileset dir="${basedir}/src/packages/templates/conf" includes="*"/>
-    </copy>
-
-    <copy todir="${dist.dir}/${package.share.dir}/webapps">
-      <fileset dir="${build.webapps}"/>
-    </copy>
-
-    <copy todir="${dist.dir}/share/doc/hadoop/${module}">
-      <fileset dir=".">
-        <include name="*.txt" />
-      </fileset>
-    </copy>
-
-    <chmod perm="ugo+x" type="file" parallel="false">
-        <fileset dir="${dist.dir}/bin"/>
-        <fileset dir="${dist.dir}/sbin"/>
-    </chmod>  	
-  </target>
-
-  <target name="binary-system" depends="bin-package, jar-system, jar-test-system"
-     description="make system test package for deployment">
-    <!--TODO!!!!! fix this shit...-->
-    <copy todir="${system-test-build-dir}/${final.name}">
-      <fileset dir="${dist.dir}">
-      </fileset>
-    </copy>
-    <copy todir="${system-test-build-dir}/${final.name}/conf">
-      <fileset dir="${test.src.dir}/system/conf/"/>
-    </copy>
-    <copy todir="${system-test-build-dir}">
-      <fileset dir="${build.dir}">
-        <include name="${test.final.name}.jar"/>
-        <include name="${examples.final.name}.jar"/>
-      </fileset>
-    </copy>
-    <copy tofile="${system-test-build-dir}/${final.name}/lib/hadoop-common-${hadoop-common.version}.jar"
-      file="${system-test-build-dir}/ivy/lib/${ant.project.name}/system/hadoop-common-${herriot.suffix}-${hadoop-common.version}.jar"
-      overwrite="true"/>
-    <copy tofile="${system-test-build-dir}/${final.name}/lib/hadoop-hdfs-${version}.jar"
-      file="${system-test-build-dir}/ivy/lib/${ant.project.name}/system/hadoop-hdfs-${herriot.suffix}-${version}.jar"
-      overwrite="true"/>
-    <copy tofile="${system-test-build-dir}/${final.name}/${final.name}.jar"
-      file="${system-test-build-dir}/${instrumented.final.name}.jar" overwrite="true"/>
-    <copy tofile="${system-test-build-dir}/${final.name}/${final.name}-sources.jar"
-      file="${system-test-build-dir}/${instrumented.final.name}-sources.jar" overwrite="true"/>
-    <macro_tar 
-      param.destfile="${system-test-build-dir}/${final.name}-bin.${herriot.suffix}.tar.gz">
-        <param.listofitems>
-          <tarfileset dir="${system-test-build-dir}" mode="664">
-            <exclude name="${final.name}/bin/*" />
-            <exclude name="${final.name}/src/**" />
-            <exclude name="${final.name}/docs/**" />
-            <include name="${final.name}/**" />
-          </tarfileset>
-        </param.listofitems>
-      </macro_tar>
-  </target>
-  
-  <target name="binary" depends="bin-package" description="Make tarball without source and documentation">
-    <macro_tar param.destfile="${build.dir}/${final.name}-bin.tar.gz">
-      <param.listofitems>
-        <tarfileset dir="${build.dir}" mode="664">
-          <exclude name="${final.name}/bin/*" />
-          <exclude name="${final.name}/src/**" />
-          <exclude name="${final.name}/docs/**" />
-          <include name="${final.name}/**" />
-        </tarfileset>
-        <tarfileset dir="${build.dir}" mode="755">
-          <include name="${final.name}/bin/*" />
-        </tarfileset>
-      </param.listofitems>
-    </macro_tar>
-  </target>
-
-  <target name="rpm" depends="binary" description="Make rpm package">
-    <mkdir dir="${package.buildroot}/BUILD" />
-    <mkdir dir="${package.buildroot}/RPMS" />
-    <mkdir dir="${package.buildroot}/SRPMS" />
-    <mkdir dir="${package.buildroot}/SOURCES" />
-    <mkdir dir="${package.buildroot}/SPECS" />
-    <copy todir="${package.buildroot}/SOURCES">
-      <fileset dir="${build.dir}">
-        <include name="${final.name}-bin.tar.gz" />
-      </fileset>
-    </copy>
-    <copy file="${src.dir}/packages/rpm/spec/hadoop-mapred.spec" todir="${package.buildroot}/SPECS">
-      <filterchain>
-        <replacetokens>
-          <token key="final.name" value="${final.name}" />
-          <token key="version" value="${_version}" />
-          <token key="package.release" value="${package.release}" />
-          <token key="package.build.dir" value="${package.build.dir}" />
-          <token key="package.prefix" value="${package.prefix}" />
-          <token key="package.conf.dir" value="${package.conf.dir}" />
-          <token key="package.log.dir" value="${package.log.dir}" />
-          <token key="package.pid.dir" value="${package.pid.dir}" />
-          <token key="package.var.dir" value="${package.var.dir}" />
-        </replacetokens>
-      </filterchain>
-    </copy>
-    <rpm specFile="hadoop-mapred.spec" command="-bb --target ${os.arch}" topDir="${package.buildroot}" cleanBuildDir="true" failOnError="true"/>
-    <copy todir="${build.dir}/" flatten="true">
-      <fileset dir="${package.buildroot}/RPMS">
-        <include name="**/*.rpm" />
-      </fileset>
-    </copy>
-    <delete dir="${package.buildroot}" quiet="true" verbose="false"/>
-  </target>
-
-  <target name="deb" depends="ivy-retrieve-package, binary" description="Make deb package">
-    <taskdef name="deb"
-           classname="org.vafer.jdeb.ant.DebAntTask">
-      <classpath refid="ivy-package.classpath" />
-    </taskdef>
-
-    <mkdir dir="${package.build.dir}/hadoop.control" />
-    <mkdir dir="${package.buildroot}/${package.prefix}" />
-    <copy todir="${package.buildroot}/${package.prefix}">
-      <fileset dir="${build.dir}/${final.name}">
-        <include name="**" />
-      </fileset>
-    </copy>
-    <copy todir="${package.build.dir}/hadoop.control">
-      <fileset dir="${src.dir}/packages/deb/hadoop.control">
-        <exclude name="control" />
-      </fileset>
-    </copy>
-    <copy file="${src.dir}/packages/deb/hadoop.control/control" todir="${package.build.dir}/hadoop.control">
-      <filterchain>
-        <replacetokens>
-          <token key="final.name" value="${final.name}" />
-          <token key="version" value="${_version}" />
-          <token key="package.release" value="${package.release}" />
-          <token key="package.build.dir" value="${package.build.dir}" />
-          <token key="package.prefix" value="${package.prefix}" />
-          <token key="package.conf.dir" value="${package.conf.dir}" />
-          <token key="package.log.dir" value="${package.log.dir}" />
-          <token key="package.pid.dir" value="${package.pid.dir}" />
-        </replacetokens>
-      </filterchain>
-    </copy>
-    <deb destfile="${package.buildroot}/${name}_${_version}-${package.release}_${os.arch}.deb" control="${package.build.dir}/hadoop.control">
-      <tarfileset dir="${build.dir}/${final.name}" filemode="644" prefix="${package.prefix}">
-        <exclude name="bin/*" />
-        <exclude name="${package.share.dir}/contrib/*/bin/*" />
-        <exclude name="etc" />
-        <exclude name="etc/**" />
-        <exclude name="libexec/*" />
-        <exclude name="sbin/*" />
-        <include name="**" />
-      </tarfileset>
-      <tarfileset dir="${build.dir}/${final.name}" filemode="755" prefix="${package.prefix}">
-        <include name="bin/*" />
-        <include name="sbin/*" />
-        <exclude name="sbin/*.redhat" />
-        <exclude name="sbin/*.debian" />
-        <include name="libexec/*" />
-        <include name="${package.share.dir}/contrib/*/bin/*" />
-      </tarfileset>
-      <tarfileset dir="${src.dir}/packages" filemode="755" prefix="${package.prefix}/sbin">
-        <include name="*.sh" />
-      </tarfileset>
-      <tarfileset dir="${build.dir}/${final.name}/etc/hadoop" filemode="644" prefix="${package.conf.dir}">
-        <exclude name="configuration.xsl" />
-        <exclude name="hadoop-metrics2.properties" />
-        <exclude name="core-site.xml" />
-        <exclude name="hdfs-site.xml" />
-        <exclude name="mapred-site.xml" />
-        <include name="**" />
-      </tarfileset>
-      <tarfileset dir="${basedir}/src/packages/deb/init.d" filemode="755" prefix="/etc/init.d">
-        <include name="**" />
-      </tarfileset>
-    </deb>
-    <copy todir="${build.dir}/" flatten="true">
-      <fileset dir="${package.buildroot}">
-        <include name="**/${name}*.deb" />
-      </fileset>
-    </copy>
-    <delete dir="${package.buildroot}" quiet="true" verbose="false"/>
-  </target>
-
   <target name="ant-task-download" description="To download mvn-ant-task">
     <get src="${ant_task_repo_url}" dest="${ant_task.jar}" usetimestamp="true"/>
   </target>
@@ -1742,7 +1431,6 @@
         <exclude name="src/test/mapred/org/apache/hadoop/mapred/test.tgz"/>
         <exclude name="src/test/tools/data/rumen/**/*"/>
         <exclude name="src/test/mapred/org/apache/hadoop/mapred/*.txt"/>
-        <exclude name="src/contrib/mumak/src/test/data/*.json"/>
         <exclude name="src/contrib/index/sample/*.txt"/>
         <exclude name="src/test/mapred/org/apache/hadoop/cli/data60bytes"/>
         <exclude name="src/examples/org/apache/hadoop/examples/dancing/puzzle1.dta"/>
@@ -2090,32 +1778,16 @@
                 output="${build.dir.eclipse-tools-classes}" />
         <source path="${contrib.dir}/block_forensics/src/java"
                 output="${build.dir.eclipse-contrib-classes}/block_forensics/main" />
-        <source path="${contrib.dir}/capacity-scheduler/src/java"
-                output="${build.dir.eclipse-contrib-classes}/capacity-scheduler/main" />
-        <source path="${contrib.dir}/capacity-scheduler/src/test"
-                output="${build.dir.eclipse-contrib-classes}/capacity-scheduler/test" />
         <source path="${contrib.dir}/data_join/src/java"
                 output="${build.dir.eclipse-contrib-classes}/data_join/main" />
         <source path="${contrib.dir}/data_join/src/examples"
                 output="${build.dir.eclipse-contrib-classes}/data_join/examples" />
         <source path="${contrib.dir}/data_join/src/test"
                 output="${build.dir.eclipse-contrib-classes}/data_join/test" />
-        <source path="${contrib.dir}/dynamic-scheduler/src/java"
-                output="${build.dir.eclipse-contrib-classes}/dynamic-scheduler/main" />
-        <source path="${contrib.dir}/dynamic-scheduler/src/test"
-                output="${build.dir.eclipse-contrib-classes}/dynamic-scheduler/test" />
-        <source path="${contrib.dir}/fairscheduler/src/java"
-                output="${build.dir.eclipse-contrib-classes}/fairscheduler/main" />
-        <source path="${contrib.dir}/fairscheduler/src/test"
-                output="${build.dir.eclipse-contrib-classes}/fairscheduler/test" />
         <source path="${contrib.dir}/gridmix/src/java"
                 output="${build.dir.eclipse-contrib-classes}/gridmix/main" />
         <source path="${contrib.dir}/gridmix/src/test"
                 output="${build.dir.eclipse-contrib-classes}/gridmix/test" />
-        <source path="${contrib.dir}/mumak/src/java"
-                output="${build.dir.eclipse-contrib-classes}/mumak/main" />
-        <source path="${contrib.dir}/mumak/src/test"
-                output="${build.dir.eclipse-contrib-classes}/mumak/test" />
         <source path="${contrib.dir}/raid/src/java"
                 output="${build.dir.eclipse-contrib-classes}/raid/main" />
         <source path="${contrib.dir}/raid/src/test"

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/conf/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Nov  2 05:34:31 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/conf:1152502-1179483
+/hadoop/common/trunk/hadoop-mapreduce-project/conf:1152502-1196451
 /hadoop/core/branches/branch-0.19/mapred/conf:713112
 /hadoop/core/trunk/conf:784664-785643

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml Wed Nov  2 05:34:31 2011
@@ -160,7 +160,10 @@
      </Match>
      <Match>
        <Class name="org.apache.hadoop.mapred.FileOutputCommitter" />
+       <Or>
        <Method name="commitJob" />
+       <Method name="recoverTask" />
+       </Or>
        <Bug pattern="NM_WRONG_PACKAGE" />
      </Match>
      <Match>
@@ -169,6 +172,7 @@
        <Method name="abortJob" />
        <Method name="commitJob" />
        <Method name="cleanupJob" />
+       <Method name="recoverTask" />
        </Or>
        <Bug pattern="NM_WRONG_PACKAGE_INTENTIONAL" />
      </Match>

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml Wed Nov  2 05:34:31 2011
@@ -16,17 +16,18 @@
   <parent>
     <artifactId>hadoop-mapreduce-client</artifactId>
     <groupId>org.apache.hadoop</groupId>
-    <version>${hadoop-mapreduce.version}</version>
+    <version>0.24.0-SNAPSHOT</version>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-mapreduce-client-app</artifactId>
+  <version>0.24.0-SNAPSHOT</version>
   <name>hadoop-mapreduce-client-app</name>
 
   <properties>
-    <install.file>${project.artifact.file}</install.file>
     <applink.base>${project.build.directory}/${project.name}</applink.base>
-    <mr.basedir>${project.parent.parent.basedir}</mr.basedir>
+    <!-- Needed for generating FindBugs warnings using parent pom -->
+    <mr.basedir>${project.parent.basedir}/../</mr.basedir>
   </properties>
 
   <dependencies>
@@ -42,6 +43,10 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-server-web-proxy</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-yarn-server-common</artifactId>
       <scope>test</scope>
     </dependency>

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java Wed Nov  2 05:34:31 2011
@@ -314,6 +314,8 @@ public class LocalContainerLauncher exte
           ReduceTask reduce = (ReduceTask)task;
 
           // a.k.a. "mapreduce.jobtracker.address" in LocalJobRunner:
+          // set framework name to local to make task local
+          conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.LOCAL_FRAMEWORK_NAME);
           conf.set(MRConfig.MASTER_ADDRESS, "local");  // bypass shuffle
 
           reduce.run(conf, umbilical);
@@ -385,8 +387,9 @@ FIXME:  do we need to do any of this stu
 
 /* FIXME:  may not need renameMapOutputForReduce() anymore?  TEST!
 
-${local.dir}/usercache/$user/appcache/$appId/$contId/ == $cwd for tasks;
-contains task.sh script, which, when executed, creates symlinks and sets up env
+${local.dir}/usercache/$user/appcache/$appId/$contId/ == $cwd for containers;
+contains launch_container.sh script, which, when executed, creates symlinks and 
+sets up env
  "$local.dir"/usercache/$user/appcache/$appId/$contId/file.out
  "$local.dir"/usercache/$user/appcache/$appId/$contId/file.out.idx (?)
  "$local.dir"/usercache/$user/appcache/$appId/output/$taskId/ is where file.out* is moved after MapTask done

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapReduceChildJVM.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapReduceChildJVM.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapReduceChildJVM.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapReduceChildJVM.java Wed Nov  2 05:34:31 2011
@@ -31,8 +31,7 @@ import org.apache.hadoop.mapreduce.MRJob
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.yarn.api.ApplicationConstants;
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
-import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.util.Apps;
 
 public class MapReduceChildJVM {
 
@@ -78,22 +77,24 @@ public class MapReduceChildJVM {
             );
     
     // Add pwd to LD_LIBRARY_PATH, add this before adding anything else
-    MRApps.addToEnvironment(
+    Apps.addToEnvironment(
         environment, 
         Environment.LD_LIBRARY_PATH.name(), 
         Environment.PWD.$());
 
     // Add the env variables passed by the user & admin
     String mapredChildEnv = getChildEnv(conf, task.isMapTask());
-    MRApps.setEnvFromInputString(environment, mapredChildEnv);
-    MRApps.setEnvFromInputString(
+    Apps.setEnvFromInputString(environment, mapredChildEnv);
+    Apps.setEnvFromInputString(
         environment, 
         conf.get(
             MRJobConfig.MAPRED_ADMIN_USER_ENV, 
             MRJobConfig.DEFAULT_MAPRED_ADMIN_USER_ENV)
         );
 
-    // Set logging level
+    // Set logging level in the environment.
+    // This is so that, if the child forks another "bin/hadoop" (common in
+    // streaming) it will have the correct loglevel.
     environment.put(
         "HADOOP_ROOT_LOGGER", 
         getChildLogLevel(conf, task.isMapTask()) + ",CLA"); 
@@ -110,7 +111,7 @@ public class MapReduceChildJVM {
     // properties.
     long logSize = TaskLog.getTaskLogLength(conf);
     Vector<String> logProps = new Vector<String>(4);
-    setupLog4jProperties(logProps, logSize);
+    setupLog4jProperties(task, logProps, logSize);
     Iterator<String> it = logProps.iterator();
     StringBuffer buffer = new StringBuffer();
     while (it.hasNext()) {
@@ -128,6 +129,8 @@ public class MapReduceChildJVM {
         MRJobConfig.STDERR_LOGFILE_ENV,
         getTaskLogFile(TaskLog.LogName.STDERR)
         );
+    environment.put(MRJobConfig.APPLICATION_ATTEMPT_ID_ENV, 
+        	conf.get(MRJobConfig.APPLICATION_ATTEMPT_ID).toString());
   }
 
   private static String getChildJavaOpts(JobConf jobConf, boolean isMapTask) {
@@ -163,11 +166,11 @@ public class MapReduceChildJVM {
     return adminClasspath + " " + userClasspath;
   }
 
-  private static void setupLog4jProperties(Vector<String> vargs,
+  private static void setupLog4jProperties(Task task,
+      Vector<String> vargs,
       long logSize) {
-    vargs.add("-Dlog4j.configuration=container-log4j.properties");
-    vargs.add("-D" + MRJobConfig.TASK_LOG_DIR + "=" + ApplicationConstants.LOG_DIR_EXPANSION_VAR);
-    vargs.add("-D" + MRJobConfig.TASK_LOG_SIZE + "=" + logSize);
+    String logLevel = getChildLogLevel(task.conf, task.isMapTask()); 
+    MRApps.addLog4jSystemProperties(logLevel, logSize, vargs);
   }
 
   public static List<String> getVMCommand(
@@ -222,7 +225,7 @@ public class MapReduceChildJVM {
 
     // Setup the log4j prop
     long logSize = TaskLog.getTaskLogLength(conf);
-    setupLog4jProperties(vargs, logSize);
+    setupLog4jProperties(task, vargs, logSize);
 
     if (conf.getProfileEnabled()) {
       if (conf.getProfileTaskRange(task.isMapTask()

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java Wed Nov  2 05:34:31 2011
@@ -23,16 +23,18 @@ import java.net.InetSocketAddress;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.ipc.ProtocolSignature;
 import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.ipc.RPC.Server;
-import org.apache.hadoop.ipc.VersionedProtocol;
+import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.mapred.SortedRanges.Range;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.TypeConverter;
@@ -48,7 +50,9 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus;
+import org.apache.hadoop.mapreduce.v2.app.security.authorize.MRAMPolicyProvider;
 import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.authorize.PolicyProvider;
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.service.CompositeService;
 
@@ -67,12 +71,14 @@ public class TaskAttemptListenerImpl ext
 
   private AppContext context;
   private Server server;
-  private TaskHeartbeatHandler taskHeartbeatHandler;
+  protected TaskHeartbeatHandler taskHeartbeatHandler;
   private InetSocketAddress address;
-  private Map<WrappedJvmID, org.apache.hadoop.mapred.Task> jvmIDToAttemptMap = 
+  private Map<WrappedJvmID, org.apache.hadoop.mapred.Task> jvmIDToActiveAttemptMap = 
     Collections.synchronizedMap(new HashMap<WrappedJvmID, 
         org.apache.hadoop.mapred.Task>());
   private JobTokenSecretManager jobTokenSecretManager = null;
+  private Set<WrappedJvmID> pendingJvms =
+    Collections.synchronizedSet(new HashSet<WrappedJvmID>());
   
   public TaskAttemptListenerImpl(AppContext context,
       JobTokenSecretManager jobTokenSecretManager) {
@@ -107,6 +113,14 @@ public class TaskAttemptListenerImpl ext
               conf.getInt(MRJobConfig.MR_AM_TASK_LISTENER_THREAD_COUNT, 
                   MRJobConfig.DEFAULT_MR_AM_TASK_LISTENER_THREAD_COUNT),
               false, conf, jobTokenSecretManager);
+      
+      // Enable service authorization?
+      if (conf.getBoolean(
+          CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, 
+          false)) {
+        refreshServiceAcls(conf, new MRAMPolicyProvider());
+      }
+
       server.start();
       InetSocketAddress listenerAddress = server.getListenerAddress();
       this.address =
@@ -118,6 +132,11 @@ public class TaskAttemptListenerImpl ext
     }
   }
 
+  void refreshServiceAcls(Configuration configuration, 
+      PolicyProvider policyProvider) {
+    this.server.refreshServiceAcl(configuration, policyProvider);
+  }
+
   @Override
   public void stop() {
     stopRpcServer();
@@ -302,8 +321,6 @@ public class TaskAttemptListenerImpl ext
     taskAttemptStatus.progress = taskStatus.getProgress();
     LOG.info("Progress of TaskAttempt " + taskAttemptID + " is : "
         + taskStatus.getProgress());
-    // Task sends the diagnostic information to the TT
-    taskAttemptStatus.diagnosticInfo = taskStatus.getDiagnosticInfo();
     // Task sends the updated state-string to the TT.
     taskAttemptStatus.stateString = taskStatus.getStateString();
     // Set the output-size when map-task finishes. Set by the task itself.
@@ -382,35 +399,55 @@ public class TaskAttemptListenerImpl ext
 
     JVMId jvmId = context.jvmId;
     LOG.info("JVM with ID : " + jvmId + " asked for a task");
-
-    // TODO: Is it an authorised container to get a task? Otherwise return null.
-
-    // TODO: Is the request for task-launch still valid?
+    
+    JvmTask jvmTask = null;
+    // TODO: Is it an authorized container to get a task? Otherwise return null.
 
     // TODO: Child.java's firstTaskID isn't really firstTaskID. Ask for update
     // to jobId and task-type.
 
     WrappedJvmID wJvmID = new WrappedJvmID(jvmId.getJobId(), jvmId.isMap,
         jvmId.getId());
-    org.apache.hadoop.mapred.Task task = jvmIDToAttemptMap.get(wJvmID);
-    if (task != null) { //there may be lag in the attempt getting added here
-      LOG.info("JVM with ID: " + jvmId + " given task: " + task.getTaskID());
-      JvmTask jvmTask = new JvmTask(task, false);
-      
-      //remove the task as it is no more needed and free up the memory
-      jvmIDToAttemptMap.remove(wJvmID);
-      
-      return jvmTask;
+    synchronized(this) {
+      if(pendingJvms.contains(wJvmID)) {
+        org.apache.hadoop.mapred.Task task = jvmIDToActiveAttemptMap.get(wJvmID);
+        if (task != null) { //there may be lag in the attempt getting added here
+         LOG.info("JVM with ID: " + jvmId + " given task: " + task.getTaskID());
+          jvmTask = new JvmTask(task, false);
+
+          //remove the task as it is no more needed and free up the memory
+          //Also we have already told the JVM to process a task, so it is no
+          //longer pending, and further request should ask it to exit.
+          pendingJvms.remove(wJvmID);
+          jvmIDToActiveAttemptMap.remove(wJvmID);
+        }
+      } else {
+        LOG.info("JVM with ID: " + jvmId + " is invalid and will be killed.");
+        jvmTask = new JvmTask(null, true);
+      }
     }
-    return null;
+    return jvmTask;
+  }
+  
+  @Override
+  public synchronized void registerPendingTask(WrappedJvmID jvmID) {
+    //Save this JVM away as one that has not been handled yet
+    pendingJvms.add(jvmID);
   }
 
   @Override
-  public void register(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID,
+  public void registerLaunchedTask(
+      org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID,
       org.apache.hadoop.mapred.Task task, WrappedJvmID jvmID) {
-    //create the mapping so that it is easy to look up
-    //when it comes back to ask for Task.
-    jvmIDToAttemptMap.put(jvmID, task);
+    synchronized(this) {
+      //create the mapping so that it is easy to look up
+      //when it comes back to ask for Task.
+      jvmIDToActiveAttemptMap.put(jvmID, task);
+      //This should not need to happen here, but just to be on the safe side
+      if(!pendingJvms.add(jvmID)) {
+        LOG.warn(jvmID+" launched without first being registered");
+      }
+    }
     //register this attempt
     taskHeartbeatHandler.register(attemptID);
   }
@@ -419,8 +456,9 @@ public class TaskAttemptListenerImpl ext
   public void unregister(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID,
       WrappedJvmID jvmID) {
     //remove the mapping if not already removed
-    jvmIDToAttemptMap.remove(jvmID);
-
+    jvmIDToActiveAttemptMap.remove(jvmID);
+    //remove the pending if not already removed
+    pendingJvms.remove(jvmID);
     //unregister this attempt
     taskHeartbeatHandler.unregister(attemptID);
   }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java Wed Nov  2 05:34:31 2011
@@ -177,7 +177,7 @@ class YarnChild {
       ByteArrayOutputStream baos = new ByteArrayOutputStream();
       exception.printStackTrace(new PrintStream(baos));
       if (taskid != null) {
-        umbilical.reportDiagnosticInfo(taskid, baos.toString());
+        umbilical.fatalError(taskid, baos.toString());
       }
     } catch (Throwable throwable) {
       LOG.fatal("Error running child : "
@@ -239,6 +239,14 @@ class YarnChild {
       Token<JobTokenIdentifier> jt) throws IOException {
     final JobConf job = new JobConf(MRJobConfig.JOB_CONF_FILE);
     job.setCredentials(credentials);
+    
+    String appAttemptIdEnv = System
+        .getenv(MRJobConfig.APPLICATION_ATTEMPT_ID_ENV);
+    LOG.debug("APPLICATION_ATTEMPT_ID: " + appAttemptIdEnv);
+    // Set it in conf, so as to be able to be used the the OutputCommitter.
+    job.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID, Integer
+        .parseInt(appAttemptIdEnv));
+
     // set tcp nodelay
     job.setBoolean("ipc.client.tcpnodelay", true);
     job.setClass(MRConfig.TASK_LOCAL_OUTPUT_CLASS,

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java Wed Nov  2 05:34:31 2011
@@ -38,6 +38,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.mapreduce.JobCounter;
 import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.mapreduce.v2.api.records.Counter;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.JobState;
@@ -91,7 +92,8 @@ public class JobHistoryEventHandler exte
   }
 
   /* (non-Javadoc)
-   * @see org.apache.hadoop.yarn.service.AbstractService#init(org.apache.hadoop.conf.Configuration)
+   * @see org.apache.hadoop.yarn.service.AbstractService#init(org.
+   * apache.hadoop.conf.Configuration)
    * Initializes the FileSystem and Path objects for the log and done directories.
    * Creates these directories if they do not already exist.
    */
@@ -155,14 +157,15 @@ public class JobHistoryEventHandler exte
                 + doneDirPath
                 + "] based on conf: "
                 + MRJobConfig.MR_AM_CREATE_JH_INTERMEDIATE_BASE_DIR
-                + ". Either set to true or pre-create this directory with appropriate permissions";
+                + ". Either set to true or pre-create this directory with" +
+                " appropriate permissions";
         LOG.error(message);
         throw new YarnException(message);
       }
       }
     } catch (IOException e) {
-      LOG.error("Failed checking for the existance of history intermediate done directory: ["
-          + doneDirPath + "]");
+      LOG.error("Failed checking for the existance of history intermediate " +
+      		"done directory: [" + doneDirPath + "]");
       throw new YarnException(e);
     }
 
@@ -275,7 +278,7 @@ public class JobHistoryEventHandler exte
    * @param jobId the jobId.
    * @throws IOException
    */
-  protected void setupEventWriter(JobId jobId, JobSubmittedEvent jse)
+  protected void setupEventWriter(JobId jobId)
       throws IOException {
     if (stagingDirPath == null) {
       LOG.error("Log Directory is null, returning");
@@ -285,9 +288,6 @@ public class JobHistoryEventHandler exte
     MetaInfo oldFi = fileMap.get(jobId);
     Configuration conf = getConfig();
 
-    long submitTime = oldFi == null ? jse.getSubmitTime() : oldFi
-        .getJobIndexInfo().getSubmitTime();
-    
     // TODO Ideally this should be written out to the job dir
     // (.staging/jobid/files - RecoveryService will need to be patched)
     Path historyFile = JobHistoryUtils.getStagingJobHistoryFile(
@@ -301,6 +301,8 @@ public class JobHistoryEventHandler exte
     String jobName = context.getJob(jobId).getName();
     EventWriter writer = (oldFi == null) ? null : oldFi.writer;
  
+    Path logDirConfPath =
+        JobHistoryUtils.getStagingConfFile(stagingDirPath, jobId, startCount);
     if (writer == null) {
       try {
         FSDataOutputStream out = stagingDirFS.create(historyFile, true);
@@ -312,31 +314,28 @@ public class JobHistoryEventHandler exte
             + "[" + jobName + "]");
         throw ioe;
       }
-    }
-    
-    Path logDirConfPath = null;
-    if (conf != null) {
-      // TODO Ideally this should be written out to the job dir
-      // (.staging/jobid/files - RecoveryService will need to be patched)
-      logDirConfPath = JobHistoryUtils.getStagingConfFile(stagingDirPath, jobId,
-          startCount);
-      FSDataOutputStream jobFileOut = null;
-      try {
-        if (logDirConfPath != null) {
-          jobFileOut = stagingDirFS.create(logDirConfPath, true);
-          conf.writeXml(jobFileOut);
-          jobFileOut.close();
+      
+      //Write out conf only if the writer isn't already setup.
+      if (conf != null) {
+        // TODO Ideally this should be written out to the job dir
+        // (.staging/jobid/files - RecoveryService will need to be patched)
+        FSDataOutputStream jobFileOut = null;
+        try {
+          if (logDirConfPath != null) {
+            jobFileOut = stagingDirFS.create(logDirConfPath, true);
+            conf.writeXml(jobFileOut);
+            jobFileOut.close();
+          }
+        } catch (IOException e) {
+          LOG.info("Failed to write the job configuration file", e);
+          throw e;
         }
-      } catch (IOException e) {
-        LOG.info("Failed to write the job configuration file", e);
-        throw e;
       }
     }
-    
-    MetaInfo fi = new MetaInfo(historyFile, logDirConfPath, writer, submitTime,
+
+    MetaInfo fi = new MetaInfo(historyFile, logDirConfPath, writer,
         user, jobName, jobId);
     fi.getJobSummary().setJobId(jobId);
-    fi.getJobSummary().setJobSubmitTime(submitTime);
     fileMap.put(jobId, fi);
   }
 
@@ -368,11 +367,9 @@ public class JobHistoryEventHandler exte
     synchronized (lock) {
 
       // If this is JobSubmitted Event, setup the writer
-      if (event.getHistoryEvent().getEventType() == EventType.JOB_SUBMITTED) {
+      if (event.getHistoryEvent().getEventType() == EventType.AM_STARTED) {
         try {
-          JobSubmittedEvent jobSubmittedEvent =
-              (JobSubmittedEvent) event.getHistoryEvent();
-          setupEventWriter(event.getJobID(), jobSubmittedEvent);
+          setupEventWriter(event.getJobID());
         } catch (IOException ioe) {
           LOG.error("Error JobHistoryEventHandler in handleEvent: " + event,
               ioe);
@@ -386,8 +383,11 @@ public class JobHistoryEventHandler exte
       MetaInfo mi = fileMap.get(event.getJobID());
       try {
         HistoryEvent historyEvent = event.getHistoryEvent();
-        mi.writeEvent(historyEvent);
-        processEventForJobSummary(event.getHistoryEvent(), mi.getJobSummary(), event.getJobID());
+        if (! (historyEvent instanceof NormalizedResourceEvent)) {
+          mi.writeEvent(historyEvent);
+        }
+        processEventForJobSummary(event.getHistoryEvent(), mi.getJobSummary(),
+            event.getJobID());
         LOG.info("In HistoryEventHandler "
             + event.getHistoryEvent().getEventType());
       } catch (IOException e) {
@@ -396,6 +396,12 @@ public class JobHistoryEventHandler exte
         throw new YarnException(e);
       }
 
+      if (event.getHistoryEvent().getEventType() == EventType.JOB_SUBMITTED) {
+        JobSubmittedEvent jobSubmittedEvent =
+            (JobSubmittedEvent) event.getHistoryEvent();
+        mi.getJobIndexInfo().setSubmitTime(jobSubmittedEvent.getSubmitTime());
+      }
+     
       // If this is JobFinishedEvent, close the writer and setup the job-index
       if (event.getHistoryEvent().getEventType() == EventType.JOB_FINISHED) {
         try {
@@ -415,7 +421,8 @@ public class JobHistoryEventHandler exte
       if (event.getHistoryEvent().getEventType() == EventType.JOB_FAILED
           || event.getHistoryEvent().getEventType() == EventType.JOB_KILLED) {
         try {
-          JobUnsuccessfulCompletionEvent jucEvent = (JobUnsuccessfulCompletionEvent) event
+          JobUnsuccessfulCompletionEvent jucEvent = 
+              (JobUnsuccessfulCompletionEvent) event
               .getHistoryEvent();
           mi.getJobIndexInfo().setFinishTime(jucEvent.getFinishTime());
           mi.getJobIndexInfo().setNumMaps(jucEvent.getFinishedMaps());
@@ -429,14 +436,25 @@ public class JobHistoryEventHandler exte
     }
   }
 
-  private void processEventForJobSummary(HistoryEvent event, JobSummary summary, JobId jobId) {
+  public void processEventForJobSummary(HistoryEvent event, JobSummary summary, 
+      JobId jobId) {
     // context.getJob could be used for some of this info as well.
     switch (event.getEventType()) {
     case JOB_SUBMITTED:
       JobSubmittedEvent jse = (JobSubmittedEvent) event;
       summary.setUser(jse.getUserName());
       summary.setQueue(jse.getJobQueueName());
+      summary.setJobSubmitTime(jse.getSubmitTime());
       break;
+    case NORMALIZED_RESOURCE:
+      NormalizedResourceEvent normalizedResourceEvent = 
+            (NormalizedResourceEvent) event;
+      if (normalizedResourceEvent.getTaskType() == TaskType.MAP) {
+        summary.setResourcesPerMap(normalizedResourceEvent.getMemory());
+      } else if (normalizedResourceEvent.getTaskType() == TaskType.REDUCE) {
+        summary.setResourcesPerReduce(normalizedResourceEvent.getMemory());
+      }
+      break;  
     case JOB_INITED:
       JobInitedEvent jie = (JobInitedEvent) event;
       summary.setJobLaunchTime(jie.getLaunchTime());
@@ -502,7 +520,8 @@ public class JobHistoryEventHandler exte
 
     if (!mi.isWriterActive()) {
       throw new IOException(
-          "Inactive Writer: Likely received multiple JobFinished / JobUnsuccessful events for JobId: ["
+          "Inactive Writer: Likely received multiple JobFinished / " +
+          "JobUnsuccessful events for JobId: ["
               + jobId + "]");
     }
 
@@ -588,12 +607,13 @@ public class JobHistoryEventHandler exte
     JobIndexInfo jobIndexInfo;
     JobSummary jobSummary;
 
-    MetaInfo(Path historyFile, Path conf, EventWriter writer, long submitTime,
+    MetaInfo(Path historyFile, Path conf, EventWriter writer, 
              String user, String jobName, JobId jobId) {
       this.historyFile = historyFile;
       this.confFile = conf;
       this.writer = writer;
-      this.jobIndexInfo = new JobIndexInfo(submitTime, -1, user, jobName, jobId, -1, -1, null);
+      this.jobIndexInfo = new JobIndexInfo(-1, -1, user, jobName, jobId, -1, -1,
+          null);
       this.jobSummary = new JobSummary();
     }
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobSummary.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobSummary.java?rev=1196458&r1=1196457&r2=1196458&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobSummary.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobSummary.java Wed Nov  2 05:34:31 2011
@@ -34,7 +34,8 @@ public class JobSummary {
   private int numFailedMaps;
   private int numFinishedReduces;
   private int numFailedReduces;
-  // private int numSlotsPerMap; | Doesn't make sense with potentially different
+  private int resourcesPerMap; // resources used per map/min resource
+  private int resourcesPerReduce; // resources used per reduce/min resource
   // resource models
   // private int numSlotsPerReduce; | Doesn't make sense with potentially
   // different resource models
@@ -112,14 +113,14 @@ public class JobSummary {
     this.numFailedMaps = numFailedMaps;
   }
 
-  // public int getNumSlotsPerMap() {
-  // return numSlotsPerMap;
-  // }
-  //
-  // public void setNumSlotsPerMap(int numSlotsPerMap) {
-  // this.numSlotsPerMap = numSlotsPerMap;
-  // }
-
+  public int getResourcesPerMap() {
+    return resourcesPerMap;
+  }
+  
+  public void setResourcesPerMap(int resourcesPerMap) {
+    this.resourcesPerMap = resourcesPerMap;
+  }
+  
   public int getNumFinishedReduces() {
     return numFinishedReduces;
   }
@@ -136,14 +137,14 @@ public class JobSummary {
     this.numFailedReduces = numFailedReduces;
   }
 
-  // public int getNumSlotsPerReduce() {
-  // return numSlotsPerReduce;
-  // }
-  //
-  // public void setNumSlotsPerReduce(int numSlotsPerReduce) {
-  // this.numSlotsPerReduce = numSlotsPerReduce;
-  // }
-
+  public int getResourcesPerReduce() {
+    return this.resourcesPerReduce;
+  }
+  
+  public void setResourcesPerReduce(int resourcesPerReduce) {
+    this.resourcesPerReduce = resourcesPerReduce;
+  }
+  
   public String getUser() {
     return user;
   }
@@ -184,14 +185,6 @@ public class JobSummary {
     this.reduceSlotSeconds = reduceSlotSeconds;
   }
 
-  // public int getClusterSlotCapacity() {
-  // return clusterSlotCapacity;
-  // }
-  //
-  // public void setClusterSlotCapacity(int clusterSlotCapacity) {
-  // this.clusterSlotCapacity = clusterSlotCapacity;
-  // }
-
   public String getJobSummaryString() {
     SummaryBuilder summary = new SummaryBuilder()
       .add("jobId", jobId)
@@ -200,6 +193,8 @@ public class JobSummary {
       .add("firstMapTaskLaunchTime", firstMapTaskLaunchTime)
       .add("firstReduceTaskLaunchTime", firstReduceTaskLaunchTime)
       .add("finishTime", jobFinishTime)
+      .add("resourcesPerMap", resourcesPerMap)
+      .add("resourcesPerReduce", resourcesPerReduce)
       .add("numMaps", numFinishedMaps + numFailedMaps)
       .add("numReduces", numFinishedReduces + numFailedReduces)
       .add("user", user)



Mime
View raw message