hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject svn commit: r1173012 [1/3] - in /hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project: ./ conf/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/ hadoop-mapreduce-client/hadoop-mapreduce-clie...
Date Tue, 20 Sep 2011 07:40:08 GMT
Author: atm
Date: Tue Sep 20 07:40:04 2011
New Revision: 1173012

URL: http://svn.apache.org/viewvc?rev=1173012&view=rev
Log:
Merge trunk into HA branch

Added:
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobConfPage.java
      - copied unchanged from r1173011, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobConfPage.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java
      - copied unchanged from r1173011, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/
      - copied from r1173011, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/
      - copied from r1173011, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java
      - copied unchanged from r1173011, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java
      - copied unchanged from r1173011, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestNodeId.java
      - copied unchanged from r1173011, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestNodeId.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/UndeclaredPoolException.java
      - copied unchanged from r1173011, hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/UndeclaredPoolException.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairSchedulerPoolNames.java
      - copied unchanged from r1173011, hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairSchedulerPoolNames.java
Modified:
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/.gitignore   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/conf/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/conf/capacity-scheduler.xml.template   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebApp.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/JobId.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskAttemptId.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskId.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobIdPBImpl.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptIdPBImpl.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskIdPBImpl.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ReduceTask.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobMonitorAndPrint.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsController.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YarnClientProtocolProvider.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationAttemptIdPBImpl.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/NodeIdPBImpl.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Router.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/HeaderBlock.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationAttemptId.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationId.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMApp.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttempt.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsBlock.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RmController.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/MockAsm.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/MockRMApp.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/c++/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/block_forensics/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/build-contrib.xml   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/build.xml   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/capacity-scheduler/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/data_join/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/dynamic-scheduler/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/eclipse-plugin/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/fairscheduler/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairScheduler.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/PoolManager.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/index/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/streaming/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/vaidya/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/docs/src/documentation/content/xdocs/fair_scheduler.xml
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/examples/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraChecksum.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraGen.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraSort.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/examples/org/apache/hadoop/examples/terasort/TeraValidate.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/java/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/java/org/apache/hadoop/mapred/JobTracker.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/java/org/apache/hadoop/mapred/TaskScheduler.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/hdfs/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/io/FileBench.java   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/io/TestSequenceFileMergeProgress.java   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/ipc/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestClusterStatus.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapred/TestJobCounters.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapreduce/TestMapCollection.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/security/authorize/TestServiceLevelAuthorization.java   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/test/MapredTestDriver.java   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/webapps/job/   (props changed)

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Sep 20 07:40:04 2011
@@ -1,2 +1,2 @@
-/hadoop/common/trunk/hadoop-mapreduce-project:1152502-1171806
+/hadoop/common/trunk/hadoop-mapreduce-project:1152502-1173011
 /hadoop/core/branches/branch-0.19/mapred:713112

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/.gitignore
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Sep 20 07:40:04 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/.gitignore:1161333-1171806
+/hadoop/common/trunk/hadoop-mapreduce-project/.gitignore:1161333-1173011
 /hadoop/core/branches/branch-0.19/mapred/.gitignore:713112
 /hadoop/core/trunk/.gitignore:784664-785643

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt Tue Sep 20 07:40:04 2011
@@ -17,6 +17,9 @@ Trunk (unreleased changes)
     MAPREDUCE-2934. MR portion of HADOOP-7607 - Simplify the RPC proxy cleanup
                     process (atm)
 
+    MAPREDUCE-2836. Provide option to fail jobs when submitted to non-existent
+    fair scheduler pools. (Ahmed Radwan via todd)
+
   BUG FIXES
 
     MAPREDUCE-2784. [Gridmix] Bug fixes in ExecutionSummarizer and 
@@ -298,6 +301,12 @@ Release 0.23.0 - Unreleased
     MAPREDUCE-2672. MR-279: JobHistory Server needs Analysis this job. 
     (Robert Evans via mahadev)
 
+    MAPREDUCE-2965. Streamlined the methods hashCode(), equals(), compareTo()
+    and toString() for all IDs. (Siddharth Seth via vinodkv)
+
+    MAPREDUCE-2726. Added job-file to the AM and JobHistoryServer web
+    interfaces. (Jeffrey Naisbitt via vinodkv)
+
   OPTIMIZATIONS
 
     MAPREDUCE-2026. Make JobTracker.getJobCounters() and
@@ -1333,6 +1342,34 @@ Release 0.23.0 - Unreleased
     MAPREDUCE-3007. Fixed Yarn Mapreduce client to be able to connect to 
     JobHistoryServer in secure mode. (vinodkv)
 
+    MAPREDUCE-2987. Fixed display of logged user on RM Web-UI. (Thomas Graves
+    via acmurthy)
+
+    MAPREDUCE-3006. Fixed MapReduce AM to exit only after properly writing out
+    history file. (vinodkv)
+
+    MAPREDUCE-2925. Fixed Yarn+MR client code to behave saner with completed
+    jobs. (Devaraj K via vinodkv)
+
+    MAPREDUCE-3030. Fixed a bug in NodeId.equals() that was causing RM to
+    reject all NMs. (Devaraj K via vinodkv)
+
+    MAPREDUCE-3042. Fixed default ResourceTracker address. (Chris Riccomini
+    via acmurthy) 
+
+    MAPREDUCE-3038. job history server not starting because conf() missing
+    HsController (Jeffrey Naisbitt via mahadev)
+
+    MAPREDUCE-3004. Fix ReduceTask to not assume 'local' mode in YARN. (Hitesh
+    Shah via acmurthy)
+
+    MAPREDUCE-3017. The Web UI shows FINISHED for killed/successful/failed jobs.
+    (mahadev)
+
+    MAPREDUCE-3040. Fixed extra copy of Configuration in
+    YarnClientProtocolProvider and ensured MiniMRYarnCluster sets JobHistory
+    configuration for tests. (acmurthy) 
+
 Release 0.22.0 - Unreleased
 
   INCOMPATIBLE CHANGES
@@ -1346,6 +1383,9 @@ Release 0.22.0 - Unreleased
     MAPREDUCE-2994. Fixed a bug in ApplicationID parsing that affects RM
     UI. (Devaraj K via vinodkv)
 
+    MAPREDUCE-1788. o.a.h.mapreduce.Job shouldn't make a copy of the JobConf.
+    (Arun Murthy via mahadev)
+
   NEW FEATURES
 
     MAPREDUCE-1804. Stress-test tool for HDFS introduced in HDFS-708.

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Sep 20 07:40:04 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:1161333-1171806
+/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:1161333-1173011
 /hadoop/core/branches/branch-0.19/mapred/CHANGES.txt:713112
 /hadoop/mapreduce/branches/HDFS-641/CHANGES.txt:817878-835964

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/conf/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Sep 20 07:40:04 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/conf:1152502-1171806
+/hadoop/common/trunk/hadoop-mapreduce-project/conf:1152502-1173011
 /hadoop/core/branches/branch-0.19/mapred/conf:713112
 /hadoop/core/trunk/conf:784664-785643

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/conf/capacity-scheduler.xml.template
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Sep 20 07:40:04 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/conf/capacity-scheduler.xml.template:1161333-1171806
+/hadoop/common/trunk/hadoop-mapreduce-project/conf/capacity-scheduler.xml.template:1161333-1173011
 /hadoop/core/branches/branch-0.19/mapred/conf/capacity-scheduler.xml.template:713112
 /hadoop/core/trunk/conf/capacity-scheduler.xml.template:776175-785643

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java Tue Sep 20 07:40:04 2011
@@ -74,7 +74,7 @@ public class JobHistoryEventHandler exte
 
   private BlockingQueue<JobHistoryEvent> eventQueue =
     new LinkedBlockingQueue<JobHistoryEvent>();
-  private Thread eventHandlingThread;
+  protected Thread eventHandlingThread;
   private volatile boolean stopped;
   private final Object lock = new Object();
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java Tue Sep 20 07:40:04 2011
@@ -56,12 +56,14 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType;
 import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
 import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher;
+import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent;
 import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl;
 import org.apache.hadoop.mapreduce.v2.app.local.LocalContainerAllocator;
 import org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics;
 import org.apache.hadoop.mapreduce.v2.app.recover.Recovery;
 import org.apache.hadoop.mapreduce.v2.app.recover.RecoveryService;
 import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator;
+import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent;
 import org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator;
 import org.apache.hadoop.mapreduce.v2.app.speculate.DefaultSpeculator;
 import org.apache.hadoop.mapreduce.v2.app.speculate.Speculator;
@@ -83,6 +85,7 @@ import org.apache.hadoop.yarn.event.Asyn
 import org.apache.hadoop.yarn.event.Dispatcher;
 import org.apache.hadoop.yarn.event.EventHandler;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.service.AbstractService;
 import org.apache.hadoop.yarn.service.CompositeService;
 import org.apache.hadoop.yarn.service.Service;
 
@@ -126,6 +129,7 @@ public class MRAppMaster extends Composi
   private TaskAttemptListener taskAttemptListener;
   private JobTokenSecretManager jobTokenSecretManager =
       new JobTokenSecretManager();
+  private JobEventDispatcher jobEventDispatcher;
 
   private Job job;
   
@@ -148,7 +152,7 @@ public class MRAppMaster extends Composi
 
   @Override
   public void init(final Configuration conf) {
-    context = new RunningAppContext();
+    context = new RunningAppContext(conf);
 
     // Job name is the same as the app name util we support DAG of jobs
     // for an app later
@@ -182,18 +186,17 @@ public class MRAppMaster extends Composi
     //service to log job history events
     EventHandler<JobHistoryEvent> historyService = 
         createJobHistoryHandler(context);
-    addIfService(historyService);
+    dispatcher.register(org.apache.hadoop.mapreduce.jobhistory.EventType.class,
+        historyService);
 
-    JobEventDispatcher synchronousJobEventDispatcher = new JobEventDispatcher();
+    this.jobEventDispatcher = new JobEventDispatcher();
 
     //register the event dispatchers
-    dispatcher.register(JobEventType.class, synchronousJobEventDispatcher);
+    dispatcher.register(JobEventType.class, jobEventDispatcher);
     dispatcher.register(TaskEventType.class, new TaskEventDispatcher());
     dispatcher.register(TaskAttemptEventType.class, 
         new TaskAttemptEventDispatcher());
     dispatcher.register(TaskCleaner.EventType.class, taskCleaner);
-    dispatcher.register(org.apache.hadoop.mapreduce.jobhistory.EventType.class,
-        historyService);
     
     if (conf.getBoolean(MRJobConfig.MAP_SPECULATIVE, false)
         || conf.getBoolean(MRJobConfig.REDUCE_SPECULATIVE, false)) {
@@ -203,10 +206,34 @@ public class MRAppMaster extends Composi
     }
 
     dispatcher.register(Speculator.EventType.class,
-        new SpeculatorEventDispatcher());
+        new SpeculatorEventDispatcher(conf));
 
-    Credentials fsTokens = new Credentials();
+    // service to allocate containers from RM (if non-uber) or to fake it (uber)
+    containerAllocator = createContainerAllocator(clientService, context);
+    addIfService(containerAllocator);
+    dispatcher.register(ContainerAllocator.EventType.class, containerAllocator);
+
+    // corresponding service to launch allocated containers via NodeManager
+    containerLauncher = createContainerLauncher(context);
+    addIfService(containerLauncher);
+    dispatcher.register(ContainerLauncher.EventType.class, containerLauncher);
+
+    // Add the JobHistoryEventHandler last so that it is properly stopped first.
+    // This will guarantee that all history-events are flushed before AM goes
+    // ahead with shutdown.
+    // Note: Even though JobHistoryEventHandler is started last, if any
+    // component creates a JobHistoryEvent in the meanwhile, it will be just be
+    // queued inside the JobHistoryEventHandler 
+    addIfService(historyService);
 
+    super.init(conf);
+  } // end of init()
+
+  /** Create and initialize (but don't start) a single job. */
+  protected Job createJob(Configuration conf) {
+
+    // ////////// Obtain the tokens needed by the job. //////////
+    Credentials fsTokens = new Credentials();
     UserGroupInformation currentUser = null;
 
     try {
@@ -234,66 +261,12 @@ public class MRAppMaster extends Composi
     } catch (IOException e) {
       throw new YarnException(e);
     }
-
-    super.init(conf);
-
-    //---- start of what used to be startJobs() code:
-
-    Configuration config = getConfig();
-
-    job = createJob(config, fsTokens, currentUser.getUserName());
-
-    /** create a job event for job intialization */
-    JobEvent initJobEvent = new JobEvent(job.getID(), JobEventType.JOB_INIT);
-    /** send init to the job (this does NOT trigger job execution) */
-    synchronousJobEventDispatcher.handle(initJobEvent);
-
-    // send init to speculator. This won't yest start as dispatcher isn't
-    // started yet.
-    dispatcher.getEventHandler().handle(
-        new SpeculatorEvent(job.getID(), clock.getTime()));
-
-    // JobImpl's InitTransition is done (call above is synchronous), so the
-    // "uber-decision" (MR-1220) has been made.  Query job and switch to
-    // ubermode if appropriate (by registering different container-allocator
-    // and container-launcher services/event-handlers).
-
-    if (job.isUber()) {
-      LOG.info("MRAppMaster uberizing job " + job.getID()
-               + " in local container (\"uber-AM\").");
-    } else {
-      LOG.info("MRAppMaster launching normal, non-uberized, multi-container "
-               + "job " + job.getID() + ".");
-    }
-
-    // service to allocate containers from RM (if non-uber) or to fake it (uber)
-    containerAllocator =
-        createContainerAllocator(clientService, context, job.isUber());
-    addIfService(containerAllocator);
-    dispatcher.register(ContainerAllocator.EventType.class, containerAllocator);
-    if (containerAllocator instanceof Service) {
-      ((Service) containerAllocator).init(config);
-    }
-
-    // corresponding service to launch allocated containers via NodeManager
-    containerLauncher = createContainerLauncher(context, job.isUber());
-    addIfService(containerLauncher);
-    dispatcher.register(ContainerLauncher.EventType.class, containerLauncher);
-    if (containerLauncher instanceof Service) {
-      ((Service) containerLauncher).init(config);
-    }
-
-  } // end of init()
-
-  /** Create and initialize (but don't start) a single job. 
-   * @param fsTokens */
-  protected Job createJob(Configuration conf, Credentials fsTokens, 
-      String user) {
+    // ////////// End of obtaining the tokens needed by the job. //////////
 
     // create single job
     Job newJob = new JobImpl(appID, conf, dispatcher.getEventHandler(),
         taskAttemptListener, jobTokenSecretManager, fsTokens, clock, startCount,
-        completedTasksFromPreviousRun, metrics, user);
+        completedTasksFromPreviousRun, metrics, currentUser.getUserName());
     ((RunningAppContext) context).jobs.put(newJob.getID(), newJob);
 
     dispatcher.register(JobFinishEvent.Type.class,
@@ -388,19 +361,13 @@ public class MRAppMaster extends Composi
   }
 
   protected ContainerAllocator createContainerAllocator(
-      ClientService clientService, AppContext context, boolean isLocal) {
-    //return new StaticContainerAllocator(context);
-    return isLocal
-        ? new LocalContainerAllocator(clientService, context)
-        : new RMContainerAllocator(clientService, context);
+      final ClientService clientService, final AppContext context) {
+    return new ContainerAllocatorRouter(clientService, context);
   }
 
-  protected ContainerLauncher createContainerLauncher(AppContext context,
-                                                      boolean isLocal) {
-    return isLocal
-        ? new LocalContainerLauncher(context,
-            (TaskUmbilicalProtocol) taskAttemptListener)
-        : new ContainerLauncherImpl(context);
+  protected ContainerLauncher
+      createContainerLauncher(final AppContext context) {
+    return new ContainerLauncherRouter(context);
   }
 
   //TODO:should have an interface for MRClientService
@@ -440,9 +407,96 @@ public class MRAppMaster extends Composi
     return taskAttemptListener;
   }
 
-  class RunningAppContext implements AppContext {
+  /**
+   * By the time life-cycle of this router starts, job-init would have already
+   * happened.
+   */
+  private final class ContainerAllocatorRouter extends AbstractService
+      implements ContainerAllocator {
+    private final ClientService clientService;
+    private final AppContext context;
+    private ContainerAllocator containerAllocator;
+
+    ContainerAllocatorRouter(ClientService clientService,
+        AppContext context) {
+      super(ContainerAllocatorRouter.class.getName());
+      this.clientService = clientService;
+      this.context = context;
+    }
+
+    @Override
+    public synchronized void start() {
+      if (job.isUber()) {
+        this.containerAllocator = new LocalContainerAllocator(
+            this.clientService, this.context);
+      } else {
+        this.containerAllocator = new RMContainerAllocator(
+            this.clientService, this.context);
+      }
+      ((Service)this.containerAllocator).init(getConfig());
+      ((Service)this.containerAllocator).start();
+      super.start();
+    }
+
+    @Override
+    public synchronized void stop() {
+      ((Service)this.containerAllocator).stop();
+      super.stop();
+    }
+
+    @Override
+    public void handle(ContainerAllocatorEvent event) {
+      this.containerAllocator.handle(event);
+    }
+  }
+
+  /**
+   * By the time life-cycle of this router starts, job-init would have already
+   * happened.
+   */
+  private final class ContainerLauncherRouter extends AbstractService
+      implements ContainerLauncher {
+    private final AppContext context;
+    private ContainerLauncher containerLauncher;
+
+    ContainerLauncherRouter(AppContext context) {
+      super(ContainerLauncherRouter.class.getName());
+      this.context = context;
+    }
+
+    @Override
+    public synchronized void start() {
+      if (job.isUber()) {
+        this.containerLauncher = new LocalContainerLauncher(context,
+            (TaskUmbilicalProtocol) taskAttemptListener);
+      } else {
+        this.containerLauncher = new ContainerLauncherImpl(context);
+      }
+      ((Service)this.containerLauncher).init(getConfig());
+      ((Service)this.containerLauncher).start();
+      super.start();
+    }
+
+    @Override
+    public void handle(ContainerLauncherEvent event) {
+        this.containerLauncher.handle(event);
+    }
+
+    @Override
+    public synchronized void stop() {
+      ((Service)this.containerLauncher).stop();
+      super.stop();
+    }
+  }
+
+  private class RunningAppContext implements AppContext {
 
-    private Map<JobId, Job> jobs = new ConcurrentHashMap<JobId, Job>();
+    private final Map<JobId, Job> jobs = new ConcurrentHashMap<JobId, Job>();
+    private final Configuration conf;
+
+    public RunningAppContext(Configuration config) {
+      this.conf = config;
+    }
 
     @Override
     public ApplicationAttemptId getApplicationAttemptId() {
@@ -481,7 +535,7 @@ public class MRAppMaster extends Composi
 
     @Override
     public CharSequence getUser() {
-      return getConfig().get(MRJobConfig.USER_NAME);
+      return this.conf.get(MRJobConfig.USER_NAME);
     }
 
     @Override
@@ -492,13 +546,45 @@ public class MRAppMaster extends Composi
 
   @Override
   public void start() {
+
+    ///////////////////// Create the job itself.
+    job = createJob(getConfig());
+    // End of creating the job.
+
     // metrics system init is really init & start.
     // It's more test friendly to put it here.
     DefaultMetricsSystem.initialize("MRAppMaster");
 
-    startJobs();
+    /** create a job event for job intialization */
+    JobEvent initJobEvent = new JobEvent(job.getID(), JobEventType.JOB_INIT);
+    /** send init to the job (this does NOT trigger job execution) */
+    // This is a synchronous call, not an event through dispatcher. We want
+    // job-init to be done completely here.
+    jobEventDispatcher.handle(initJobEvent);
+
+    // send init to speculator. This won't yest start as dispatcher isn't
+    // started yet.
+    dispatcher.getEventHandler().handle(
+        new SpeculatorEvent(job.getID(), clock.getTime()));
+
+    // JobImpl's InitTransition is done (call above is synchronous), so the
+    // "uber-decision" (MR-1220) has been made.  Query job and switch to
+    // ubermode if appropriate (by registering different container-allocator
+    // and container-launcher services/event-handlers).
+
+    if (job.isUber()) {
+      LOG.info("MRAppMaster uberizing job " + job.getID()
+               + " in local container (\"uber-AM\").");
+    } else {
+      LOG.info("MRAppMaster launching normal, non-uberized, multi-container "
+               + "job " + job.getID() + ".");
+    }
+
     //start all the components
     super.start();
+
+    // All components have started, start the job.
+    startJobs();
   }
 
   /**
@@ -546,10 +632,14 @@ public class MRAppMaster extends Composi
 
   private class SpeculatorEventDispatcher implements
       EventHandler<SpeculatorEvent> {
+    private final Configuration conf;
+    public SpeculatorEventDispatcher(Configuration config) {
+      this.conf = config;
+    }
     @Override
     public void handle(SpeculatorEvent event) {
-      if (getConfig().getBoolean(MRJobConfig.MAP_SPECULATIVE, false)
-          || getConfig().getBoolean(MRJobConfig.REDUCE_SPECULATIVE, false)) {
+      if (conf.getBoolean(MRJobConfig.MAP_SPECULATIVE, false)
+          || conf.getBoolean(MRJobConfig.REDUCE_SPECULATIVE, false)) {
         // Speculator IS enabled, direct the event to there.
         speculator.handle(event);
       }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebApp.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebApp.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebApp.java Tue Sep 20 07:40:04 2011
@@ -32,6 +32,7 @@ public class AMWebApp extends WebApp imp
     route("/", AppController.class);
     route("/app", AppController.class);
     route(pajoin("/job", JOB_ID), AppController.class, "job");
+    route(pajoin("/conf", JOB_ID), AppController.class, "conf");
     route(pajoin("/jobcounters", JOB_ID), AppController.class, "jobCounters");
     route(pajoin("/tasks", JOB_ID, TASK_TYPE), AppController.class, "tasks");
     route(pajoin("/attempts", JOB_ID, TASK_TYPE, ATTEMPT_STATE),

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java Tue Sep 20 07:40:04 2011
@@ -186,6 +186,21 @@ public class AppController extends Contr
   }
 
   /**
+   * @return the page that will be used to render the /conf page
+   */
+  protected Class<? extends View> confPage() {
+    return JobConfPage.class;
+  }
+
+  /**
+   * Render the /conf page
+   */
+  public void conf() {
+    requireJob();
+    render(confPage());
+  }
+
+  /**
    * Render a BAD_REQUEST error.
    * @param s the error message to include.
    */

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java Tue Sep 20 07:40:04 2011
@@ -52,6 +52,7 @@ public class NavBlock extends HtmlBlock 
         ul().
           li().a(url("job", jobid), "Overview")._().
           li().a(url("jobcounters", jobid), "Counters")._().
+          li().a(url("conf", jobid), "Configuration")._().
           li().a(url("tasks", jobid, "m"), "Map tasks")._().
           li().a(url("tasks", jobid, "r"), "Reduce tasks")._()._();
     }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java Tue Sep 20 07:40:04 2011
@@ -63,6 +63,7 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.taskclean.TaskCleanupEvent;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.Clock;
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -234,11 +235,16 @@ public class MRApp extends MRAppMaster {
   }
 
   @Override
-  protected Job createJob(Configuration conf, Credentials fsTokens,
-      String user) {
-    Job newJob = new TestJob(getAppID(), getDispatcher().getEventHandler(),
+  protected Job createJob(Configuration conf) {
+    UserGroupInformation currentUser = null;
+    try {
+      currentUser = UserGroupInformation.getCurrentUser();
+    } catch (IOException e) {
+      throw new YarnException(e);
+    }
+    Job newJob = new TestJob(conf, getAppID(), getDispatcher().getEventHandler(),
                              getTaskAttemptListener(), getContext().getClock(),
-                             user);
+                             currentUser.getUserName());
     ((AppContext) getContext()).getAllJobs().put(newJob.getID(), newJob);
 
     getDispatcher().register(JobFinishEvent.Type.class,
@@ -279,8 +285,7 @@ public class MRApp extends MRAppMaster {
   }
   
   @Override
-  protected ContainerLauncher createContainerLauncher(AppContext context,
-                                                      boolean isLocal) {
+  protected ContainerLauncher createContainerLauncher(AppContext context) {
     return new MockContainerLauncher();
   }
 
@@ -317,7 +322,7 @@ public class MRApp extends MRAppMaster {
 
   @Override
   protected ContainerAllocator createContainerAllocator(
-      ClientService clientService, AppContext context, boolean isLocal) {
+      ClientService clientService, AppContext context) {
     return new ContainerAllocator(){
       private int containerCount;
       @Override
@@ -369,12 +374,14 @@ public class MRApp extends MRAppMaster {
 
   class TestJob extends JobImpl {
     //override the init transition
+    private final TestInitTransition initTransition = new TestInitTransition(
+        maps, reduces);
     StateMachineFactory<JobImpl, JobState, JobEventType, JobEvent> localFactory
         = stateMachineFactory.addTransition(JobState.NEW,
             EnumSet.of(JobState.INITED, JobState.FAILED),
             JobEventType.JOB_INIT,
             // This is abusive.
-            new TestInitTransition(getConfig(), maps, reduces));
+            initTransition);
 
     private final StateMachine<JobState, JobEventType, JobEvent>
         localStateMachine;
@@ -384,10 +391,10 @@ public class MRApp extends MRAppMaster {
       return localStateMachine;
     }
 
-    public TestJob(ApplicationId appID, EventHandler eventHandler,
-        TaskAttemptListener taskAttemptListener, Clock clock, 
-        String user) {
-      super(appID, new Configuration(), eventHandler, taskAttemptListener,
+    public TestJob(Configuration conf, ApplicationId appID,
+        EventHandler eventHandler, TaskAttemptListener taskAttemptListener,
+        Clock clock, String user) {
+      super(appID, conf, eventHandler, taskAttemptListener,
           new JobTokenSecretManager(), new Credentials(), clock, getStartCount(), 
           getCompletedTaskFromPreviousRun(), metrics, user);
 
@@ -399,17 +406,14 @@ public class MRApp extends MRAppMaster {
 
   //Override InitTransition to not look for split files etc
   static class TestInitTransition extends JobImpl.InitTransition {
-    private Configuration config;
     private int maps;
     private int reduces;
-    TestInitTransition(Configuration config, int maps, int reduces) {
-      this.config = config;
+    TestInitTransition(int maps, int reduces) {
       this.maps = maps;
       this.reduces = reduces;
     }
     @Override
     protected void setup(JobImpl job) throws IOException {
-      job.conf = config;
       job.conf.setInt(MRJobConfig.NUM_REDUCES, reduces);
       job.remoteJobConfFile = new Path("test");
     }

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java Tue Sep 20 07:40:04 2011
@@ -94,7 +94,7 @@ public class MRAppBenchmark {
     
     @Override
     protected ContainerAllocator createContainerAllocator(
-        ClientService clientService, AppContext context, boolean isLocal) {
+        ClientService clientService, AppContext context) {
       return new ThrottledContainerAllocator();
     }
     

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java Tue Sep 20 07:40:04 2011
@@ -169,7 +169,7 @@ public class TestFail {
 
   @Test
   public void testTaskFailWithUnusedContainer() throws Exception {
-    MRApp app = new FailingTaskWithUnusedContainer();
+    MRApp app = new MRAppWithFailingTaskAndUnusedContainer();
     Configuration conf = new Configuration();
     int maxAttempts = 1;
     conf.setInt(MRJobConfig.MAP_MAX_ATTEMPTS, maxAttempts);
@@ -194,21 +194,21 @@ public class TestFail {
     app.waitForState(job, JobState.FAILED);
   }
 
-  static class FailingTaskWithUnusedContainer extends MRApp {
+  static class MRAppWithFailingTaskAndUnusedContainer extends MRApp {
 
-    public FailingTaskWithUnusedContainer() {
+    public MRAppWithFailingTaskAndUnusedContainer() {
       super(1, 0, false, "TaskFailWithUnsedContainer", true);
     }
 
-    protected ContainerLauncher createContainerLauncher(AppContext context,
-        boolean isLocal) {
+    @Override
+    protected ContainerLauncher createContainerLauncher(AppContext context) {
       return new ContainerLauncherImpl(context) {
         @Override
         public void handle(ContainerLauncherEvent event) {
 
           switch (event.getType()) {
           case CONTAINER_REMOTE_LAUNCH:
-            super.handle(event);
+            super.handle(event); // Unused event and container.
             break;
           case CONTAINER_REMOTE_CLEANUP:
             getContext().getEventHandler().handle(

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java Tue Sep 20 07:40:04 2011
@@ -24,10 +24,10 @@ import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.v2.api.records.JobState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
-import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
@@ -195,6 +195,7 @@ public class TestMRApp {
   public static void main(String[] args) throws Exception {
     TestMRApp t = new TestMRApp();
     t.testMapReduce();
+    t.testZeroMapReduces();
     t.testCommitPending();
     t.testCompletedMapsForReduceSlowstart();
     t.testJobError();

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java Tue Sep 20 07:40:04 2011
@@ -158,6 +158,11 @@ public class TestAMWebApp {
     return params;
   }
 
+  @Test public void testConfView() {
+    WebAppTests.testPage(JobConfPage.class, AppContext.class,
+                         new TestAppContext());
+  }
+
   public static void main(String[] args) {
     WebApps.$for("yarn", AppContext.class, new TestAppContext(0, 8, 88, 4)).
         at(58888).inDevMode().start(new AMWebApp()).joinThread();

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/JobId.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/JobId.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/JobId.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/JobId.java Tue Sep 20 07:40:04 2011
@@ -18,13 +18,95 @@
 
 package org.apache.hadoop.mapreduce.v2.api.records;
 
+import java.text.NumberFormat;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 
-public interface JobId {
+/**
+ * <p><code>JobId</code> represents the <em>globally unique</em> 
+ * identifier for a MapReduce job.</p>
+ * 
+ * <p>The globally unique nature of the identifier is achieved by using the 
+ * <em>cluster timestamp</em> from the associated ApplicationId. i.e. 
+ * start-time of the <code>ResourceManager</code> along with a monotonically
+ * increasing counter for the jobId.</p>
+ */
+public abstract class JobId implements Comparable<JobId> {
+
+  /**
+   * Get the associated <em>ApplicationId</em> which represents the 
+   * start time of the <code>ResourceManager</code> and is used to generate 
+   * the globally unique <code>JobId</code>.
+   * @return associated <code>ApplicationId</code>
+   */
   public abstract ApplicationId getAppId();
+  
+  /**
+   * Get the short integer identifier of the <code>JobId</code>
+   * which is unique for all applications started by a particular instance
+   * of the <code>ResourceManager</code>.
+   * @return short integer identifier of the <code>JobId</code>
+   */
   public abstract int getId();
   
   public abstract void setAppId(ApplicationId appId);
   public abstract void setId(int id);
 
-}
+
+  protected static final String JOB = "job";
+  protected static final char SEPARATOR = '_';
+  static final ThreadLocal<NumberFormat> jobIdFormat =
+      new ThreadLocal<NumberFormat>() {
+        @Override
+        public NumberFormat initialValue() {
+          NumberFormat fmt = NumberFormat.getInstance();
+          fmt.setGroupingUsed(false);
+          fmt.setMinimumIntegerDigits(4);
+          return fmt;
+        }
+      };
+
+  @Override
+  public String toString() {
+    StringBuilder builder = new StringBuilder(JOB);
+    builder.append(SEPARATOR);
+    builder.append(getAppId().getClusterTimestamp());
+    builder.append(SEPARATOR);
+    builder.append(jobIdFormat.get().format(getId()));
+    return builder.toString();
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + getAppId().hashCode();
+    result = prime * result + getId();
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (obj == null)
+      return false;
+    if (getClass() != obj.getClass())
+      return false;
+    JobId other = (JobId) obj;
+    if (!this.getAppId().equals(other.getAppId()))
+      return false;
+    if (this.getId() != other.getId())
+      return false;
+    return true;
+  }
+
+  @Override
+  public int compareTo(JobId other) {
+    int appIdComp = this.getAppId().compareTo(other.getAppId());
+    if (appIdComp == 0) {
+      return this.getId() - other.getId();
+    } else {
+      return appIdComp;
+    }
+  }
+}
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskAttemptId.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskAttemptId.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskAttemptId.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskAttemptId.java Tue Sep 20 07:40:04 2011
@@ -18,10 +18,87 @@
 
 package org.apache.hadoop.mapreduce.v2.api.records;
 
-public interface TaskAttemptId {
+/**
+ * <p>
+ * <code>TaskAttemptId</code> represents the unique identifier for a task
+ * attempt. Each task attempt is one particular instance of a Map or Reduce Task
+ * identified by its TaskId.
+ * </p>
+ * 
+ * <p>
+ * TaskAttemptId consists of 2 parts. First part is the <code>TaskId</code>,
+ * that this <code>TaskAttemptId</code> belongs to. Second part is the task
+ * attempt number.
+ * </p>
+ */
+public abstract class TaskAttemptId implements Comparable<TaskAttemptId> {
+  /**
+   * @return the associated TaskId.
+   */
   public abstract TaskId getTaskId();
+
+  /**
+   * @return the attempt id.
+   */
   public abstract int getId();
-  
+
   public abstract void setTaskId(TaskId taskId);
+
   public abstract void setId(int id);
-}
+
+  protected static final String TASKATTEMPT = "attempt";
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + getId();
+    result =
+        prime * result + ((getTaskId() == null) ? 0 : getTaskId().hashCode());
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (obj == null)
+      return false;
+    if (getClass() != obj.getClass())
+      return false;
+    TaskAttemptId other = (TaskAttemptId) obj;
+    if (getId() != other.getId())
+      return false;
+    if (!getTaskId().equals(other.getTaskId()))
+      return false;
+    return true;
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder builder = new StringBuilder(TASKATTEMPT);
+    TaskId taskId = getTaskId();
+    builder.append("_").append(
+        taskId.getJobId().getAppId().getClusterTimestamp());
+    builder.append("_").append(
+        JobId.jobIdFormat.get().format(
+            getTaskId().getJobId().getAppId().getId()));
+    builder.append("_");
+    builder.append(taskId.getTaskType() == TaskType.MAP ? "m" : "r");
+    builder.append("_")
+        .append(TaskId.taskIdFormat.get().format(taskId.getId()));
+    builder.append("_");
+    builder.append(getId());
+    return builder.toString();
+  }
+
+  @Override
+  public int compareTo(TaskAttemptId other) {
+    int taskIdComp = this.getTaskId().compareTo(other.getTaskId());
+    if (taskIdComp == 0) {
+      return this.getId() - other.getId();
+    } else {
+      return taskIdComp;
+    }
+  }
+}
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskId.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskId.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskId.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskId.java Tue Sep 20 07:40:04 2011
@@ -18,12 +18,109 @@
 
 package org.apache.hadoop.mapreduce.v2.api.records;
 
-public interface TaskId {
+import java.text.NumberFormat;
+
+/**
+ * <p>
+ * <code>TaskId</code> represents the unique identifier for a Map or Reduce
+ * Task.
+ * </p>
+ * 
+ * <p>
+ * TaskId consists of 3 parts. First part is <code>JobId</code>, that this Task
+ * belongs to. Second part of the TaskId is either 'm' or 'r' representing
+ * whether the task is a map task or a reduce task. And the third part is the
+ * task number.
+ * </p>
+ */
+public abstract class TaskId implements Comparable<TaskId> {
+
+  /**
+   * @return the associated <code>JobId</code>
+   */
   public abstract JobId getJobId();
-  public abstract  TaskType getTaskType();
+
+  /**
+   * @return the type of the task - MAP/REDUCE
+   */
+  public abstract TaskType getTaskType();
+
+  /**
+   * @return the task number.
+   */
   public abstract int getId();
-  
+
   public abstract void setJobId(JobId jobId);
+
   public abstract void setTaskType(TaskType taskType);
+
   public abstract void setId(int id);
-}
+
+  protected static final String TASK = "task";
+
+  static final ThreadLocal<NumberFormat> taskIdFormat =
+      new ThreadLocal<NumberFormat>() {
+        @Override
+        public NumberFormat initialValue() {
+          NumberFormat fmt = NumberFormat.getInstance();
+          fmt.setGroupingUsed(false);
+          fmt.setMinimumIntegerDigits(6);
+          return fmt;
+        }
+      };
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + getId();
+    result = prime * result + getJobId().hashCode();
+    result = prime * result + getTaskType().hashCode();
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (obj == null)
+      return false;
+    if (getClass() != obj.getClass())
+      return false;
+    TaskId other = (TaskId) obj;
+    if (getId() != other.getId())
+      return false;
+    if (!getJobId().equals(other.getJobId()))
+      return false;
+    if (getTaskType() != other.getTaskType())
+      return false;
+    return true;
+  }
+      
+  @Override
+  public String toString() {
+    StringBuilder builder = new StringBuilder(TASK);
+    JobId jobId = getJobId();
+    builder.append("_").append(jobId.getAppId().getClusterTimestamp());
+    builder.append("_").append(
+        JobId.jobIdFormat.get().format(jobId.getAppId().getId()));
+    builder.append("_");
+    builder.append(getTaskType() == TaskType.MAP ? "m" : "r").append("_");
+    builder.append(taskIdFormat.get().format(getId()));
+    return builder.toString();
+  }
+
+  @Override
+  public int compareTo(TaskId other) {
+    int jobIdComp = this.getJobId().compareTo(other.getJobId());
+    if (jobIdComp == 0) {
+      if (this.getTaskType() == other.getTaskType()) {
+        return this.getId() - other.getId();
+      } else {
+        return this.getTaskType().compareTo(other.getTaskType());
+      }
+    } else {
+      return jobIdComp;
+    }
+  }
+}
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobIdPBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobIdPBImpl.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobIdPBImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobIdPBImpl.java Tue Sep 20 07:40:04 2011
@@ -18,35 +18,21 @@
 
 package org.apache.hadoop.mapreduce.v2.api.records.impl.pb;
 
-import java.text.NumberFormat;
-
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ProtoBase;
 import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
     
-public class JobIdPBImpl extends ProtoBase<JobIdProto> implements JobId {
+public class JobIdPBImpl extends JobId {
 
-  protected static final String JOB = "job";
-  protected static final char SEPARATOR = '_';
-  protected static final NumberFormat idFormat = NumberFormat.getInstance();
-  static {
-    idFormat.setGroupingUsed(false);
-    idFormat.setMinimumIntegerDigits(4);
-  }
-  
-  
   JobIdProto proto = JobIdProto.getDefaultInstance();
   JobIdProto.Builder builder = null;
   boolean viaProto = false;
   
   private ApplicationId applicationId = null;
-//  boolean hasLocalAppId = false;
-  
-  
+
   public JobIdPBImpl() {
     builder = JobIdProto.newBuilder();
   }
@@ -56,17 +42,17 @@ public class JobIdPBImpl extends ProtoBa
     viaProto = true;
   }
 
-  @Override
   public synchronized JobIdProto getProto() {
-  
-      mergeLocalToProto();
+    mergeLocalToProto();
     proto = viaProto ? proto : builder.build();
     viaProto = true;
     return proto;
   }
 
   private synchronized void mergeLocalToBuilder() {
-    if (this.applicationId != null && !((ApplicationIdPBImpl)this.applicationId).getProto().equals(builder.getAppId()))   {
+    if (this.applicationId != null
+        && !((ApplicationIdPBImpl) this.applicationId).getProto().equals(
+            builder.getAppId())) {
       builder.setAppId(convertToProtoFormat(this.applicationId));
     }
   }
@@ -107,7 +93,6 @@ public class JobIdPBImpl extends ProtoBa
       builder.clearAppId();
     }
     this.applicationId = appId;
-//    builder.setAppId(convertToProtoFormat(appId));
   }
   @Override
   public synchronized int getId() {
@@ -121,21 +106,12 @@ public class JobIdPBImpl extends ProtoBa
     builder.setId((id));
   }
 
-  private synchronized ApplicationIdPBImpl convertFromProtoFormat(ApplicationIdProto p) {
+  private ApplicationIdPBImpl convertFromProtoFormat(
+      ApplicationIdProto p) {
     return new ApplicationIdPBImpl(p);
   }
 
-  private synchronized ApplicationIdProto convertToProtoFormat(ApplicationId t) {
-    return ((ApplicationIdPBImpl)t).getProto();
-  }
-  
-  @Override
-  public String toString() {
-    StringBuilder builder = new StringBuilder(JOB);
-    builder.append(SEPARATOR);
-    builder.append(getAppId().getClusterTimestamp());
-    builder.append(SEPARATOR);
-    builder.append(idFormat.format(getId()));
-    return builder.toString();
+  private ApplicationIdProto convertToProtoFormat(ApplicationId t) {
+    return ((ApplicationIdPBImpl) t).getProto();
   }
-}  
+}
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptIdPBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptIdPBImpl.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptIdPBImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptIdPBImpl.java Tue Sep 20 07:40:04 2011
@@ -18,36 +18,19 @@
 
 package org.apache.hadoop.mapreduce.v2.api.records.impl.pb;
 
-
-import java.text.NumberFormat;
-
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
-import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto;
 import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder;
 import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto;
-import org.apache.hadoop.yarn.api.records.ProtoBase;
-
-
     
-public class TaskAttemptIdPBImpl extends ProtoBase<TaskAttemptIdProto> implements TaskAttemptId {
+public class TaskAttemptIdPBImpl extends TaskAttemptId {
   TaskAttemptIdProto proto = TaskAttemptIdProto.getDefaultInstance();
   TaskAttemptIdProto.Builder builder = null;
   boolean viaProto = false;
   
   private TaskId taskId = null;
-  protected static final NumberFormat idFormat = NumberFormat.getInstance();
-  static {
-    idFormat.setGroupingUsed(false);
-    idFormat.setMinimumIntegerDigits(6);
-  }
   
-  protected static final NumberFormat jobidFormat = NumberFormat.getInstance();
-  static {
-    jobidFormat.setGroupingUsed(false);
-    jobidFormat.setMinimumIntegerDigits(4);
-  }
   
   
   public TaskAttemptIdPBImpl() {
@@ -59,20 +42,21 @@ public class TaskAttemptIdPBImpl extends
     viaProto = true;
   }
   
-  public TaskAttemptIdProto getProto() {
+  public synchronized TaskAttemptIdProto getProto() {
       mergeLocalToProto();
     proto = viaProto ? proto : builder.build();
     viaProto = true;
     return proto;
   }
 
-  private void mergeLocalToBuilder() {
-    if (this.taskId != null && !((TaskIdPBImpl)this.taskId).getProto().equals(builder.getTaskId())) {
+  private synchronized void mergeLocalToBuilder() {
+    if (this.taskId != null
+        && !((TaskIdPBImpl) this.taskId).getProto().equals(builder.getTaskId())) {
       builder.setTaskId(convertToProtoFormat(this.taskId));
     }
   }
 
-  private void mergeLocalToProto() {
+  private synchronized void mergeLocalToProto() {
     if (viaProto) 
       maybeInitBuilder();
     mergeLocalToBuilder();
@@ -80,7 +64,7 @@ public class TaskAttemptIdPBImpl extends
     viaProto = true;
   }
 
-  private void maybeInitBuilder() {
+  private synchronized void maybeInitBuilder() {
     if (viaProto || builder == null) {
       builder = TaskAttemptIdProto.newBuilder(proto);
     }
@@ -89,18 +73,18 @@ public class TaskAttemptIdPBImpl extends
     
   
   @Override
-  public int getId() {
+  public synchronized int getId() {
     TaskAttemptIdProtoOrBuilder p = viaProto ? proto : builder;
     return (p.getId());
   }
 
   @Override
-  public void setId(int id) {
+  public synchronized void setId(int id) {
     maybeInitBuilder();
     builder.setId((id));
   }
   @Override
-  public TaskId getTaskId() {
+  public synchronized TaskId getTaskId() {
     TaskAttemptIdProtoOrBuilder p = viaProto ? proto : builder;
     if (this.taskId != null) {
       return this.taskId;
@@ -113,7 +97,7 @@ public class TaskAttemptIdPBImpl extends
   }
 
   @Override
-  public void setTaskId(TaskId taskId) {
+  public synchronized void setTaskId(TaskId taskId) {
     maybeInitBuilder();
     if (taskId == null)
       builder.clearTaskId();
@@ -127,16 +111,4 @@ public class TaskAttemptIdPBImpl extends
   private TaskIdProto convertToProtoFormat(TaskId t) {
     return ((TaskIdPBImpl)t).getProto();
   }
-  
-  @Override
-  public String toString() {
-    String identifier = (getTaskId() == null) ? "none":
-      getTaskId().getJobId().getAppId().getClusterTimestamp() + "_" +
-      jobidFormat.format(getTaskId().getJobId().getAppId().getId()) + "_" +
-      ((getTaskId().getTaskType() == TaskType.MAP) ? "m" : "r") + "_" +
-      idFormat.format(getTaskId().getId()) + "_" +
-       getId();
-      
-    return "attempt_" + identifier;
-  }
-}  
+}
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskIdPBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskIdPBImpl.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskIdPBImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskIdPBImpl.java Tue Sep 20 07:40:04 2011
@@ -18,9 +18,6 @@
 
 package org.apache.hadoop.mapreduce.v2.api.records.impl.pb;
 
-
-import java.text.NumberFormat;
-
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
@@ -29,30 +26,14 @@ import org.apache.hadoop.mapreduce.v2.pr
 import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder;
 import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto;
 import org.apache.hadoop.mapreduce.v2.util.MRProtoUtils;
-import org.apache.hadoop.yarn.api.records.ProtoBase;
-
 
-    
-public class TaskIdPBImpl extends ProtoBase<TaskIdProto> implements TaskId {
+public class TaskIdPBImpl extends TaskId {
   TaskIdProto proto = TaskIdProto.getDefaultInstance();
   TaskIdProto.Builder builder = null;
   boolean viaProto = false;
-  protected static final NumberFormat idFormat = NumberFormat.getInstance();
-  static {
-    idFormat.setGroupingUsed(false);
-    idFormat.setMinimumIntegerDigits(6);
-  }
-  
-  protected static final NumberFormat jobidFormat = NumberFormat.getInstance();
-  static {
-    jobidFormat.setGroupingUsed(false);
-    jobidFormat.setMinimumIntegerDigits(4);
-  }
-  
-  
-  private JobId jobId = null;
-  
-  
+
+  private JobId jobId = null;  
+
   public TaskIdPBImpl() {
     builder = TaskIdProto.newBuilder(proto);
   }
@@ -61,7 +42,7 @@ public class TaskIdPBImpl extends ProtoB
     this.proto = proto;
     viaProto = true;
   }
-  
+
   public synchronized TaskIdProto getProto() {
       mergeLocalToProto();
     proto = viaProto ? proto : builder.build();
@@ -70,7 +51,8 @@ public class TaskIdPBImpl extends ProtoB
   }
 
   private synchronized void mergeLocalToBuilder() {
-    if (this.jobId != null && !((JobIdPBImpl)this.jobId).getProto().equals(builder.getJobId()) ) {
+    if (this.jobId != null
+        && !((JobIdPBImpl) this.jobId).getProto().equals(builder.getJobId())) {
       builder.setJobId(convertToProtoFormat(this.jobId));
     }
   }
@@ -89,8 +71,7 @@ public class TaskIdPBImpl extends ProtoB
     }
     viaProto = false;
   }
-    
-  
+
   @Override
   public synchronized int getId() {
     TaskIdProtoOrBuilder p = viaProto ? proto : builder;
@@ -102,6 +83,7 @@ public class TaskIdPBImpl extends ProtoB
     maybeInitBuilder();
     builder.setId((id));
   }
+
   @Override
   public synchronized JobId getJobId() {
     TaskIdProtoOrBuilder p = viaProto ? proto : builder;
@@ -122,6 +104,7 @@ public class TaskIdPBImpl extends ProtoB
       builder.clearJobId();
     this.jobId = jobId;
   }
+
   @Override
   public synchronized TaskType getTaskType() {
     TaskIdProtoOrBuilder p = viaProto ? proto : builder;
@@ -141,29 +124,19 @@ public class TaskIdPBImpl extends ProtoB
     builder.setTaskType(convertToProtoFormat(taskType));
   }
 
-  private synchronized JobIdPBImpl convertFromProtoFormat(JobIdProto p) {
+  private JobIdPBImpl convertFromProtoFormat(JobIdProto p) {
     return new JobIdPBImpl(p);
   }
 
-  private synchronized JobIdProto convertToProtoFormat(JobId t) {
+  private JobIdProto convertToProtoFormat(JobId t) {
     return ((JobIdPBImpl)t).getProto();
   }
 
-  private synchronized TaskTypeProto convertToProtoFormat(TaskType e) {
+  private TaskTypeProto convertToProtoFormat(TaskType e) {
     return MRProtoUtils.convertToProtoFormat(e);
   }
 
-  private synchronized TaskType convertFromProtoFormat(TaskTypeProto e) {
+  private TaskType convertFromProtoFormat(TaskTypeProto e) {
     return MRProtoUtils.convertFromProtoFormat(e);
   }
-
-  
-  @Override
-  public synchronized String toString() {
-    String jobIdentifier =  (jobId == null) ? "none":
-      jobId.getAppId().getClusterTimestamp() + "_" + 
-      jobidFormat.format(jobId.getAppId().getId()) + "_" + 
-      ((getTaskType() == TaskType.MAP) ? "m":"r") + "_" + idFormat.format(getId());
-    return "task_" + jobIdentifier;
-  }
-}  
+}
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java Tue Sep 20 07:40:04 2011
@@ -535,7 +535,7 @@ public class JobClient extends CLI {
     try {
       conf.setBooleanIfUnset("mapred.mapper.new-api", false);
       conf.setBooleanIfUnset("mapred.reducer.new-api", false);
-      Job job = Job.getInstance(cluster, conf);
+      Job job = Job.getInstance(conf);
       job.submit();
       return new NetworkedJob(job);
     } catch (InterruptedException ie) {

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ReduceTask.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ReduceTask.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ReduceTask.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ReduceTask.java Tue Sep 20 07:40:04 2011
@@ -340,7 +340,14 @@ public class ReduceTask extends Task {
     // Initialize the codec
     codec = initCodec();
     RawKeyValueIterator rIter = null;
-    boolean isLocal = "local".equals(job.get(MRConfig.MASTER_ADDRESS, "local"));
+    
+    boolean isLocal = false; 
+    // local iff framework == classic && master address == local
+    String framework = job.get(MRConfig.FRAMEWORK_NAME, MRConfig.CLASSIC_FRAMEWORK_NAME);
+    if (framework.equals(MRConfig.CLASSIC_FRAMEWORK_NAME)) {
+    	isLocal = "local".equals(job.get(MRConfig.MASTER_ADDRESS, "local"));        	
+    }
+    
     if (!isLocal) {
       Class combinerClass = conf.getCombinerClass();
       CombineOutputCollector combineCollector = 

Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java?rev=1173012&r1=1173011&r2=1173012&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java Tue Sep 20 07:40:04 2011
@@ -112,7 +112,7 @@ public class Cluster {
   private Job[] getJobs(JobStatus[] stats) throws IOException {
     List<Job> jobs = new ArrayList<Job>();
     for (JobStatus stat : stats) {
-      jobs.add(new Job(this, stat, new JobConf(stat.getJobFile())));
+      jobs.add(Job.getInstance(this, stat, new JobConf(stat.getJobFile())));
     }
     return jobs.toArray(new Job[0]);
   }
@@ -152,7 +152,7 @@ public class Cluster {
   public Job getJob(JobID jobId) throws IOException, InterruptedException {
     JobStatus status = client.getJobStatus(jobId);
     if (status != null) {
-      return new Job(this, status, new JobConf(status.getJobFile()));
+      return Job.getInstance(this, status, new JobConf(status.getJobFile()));
     }
     return null;
   }



Mime
View raw message