hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sha...@apache.org
Subject svn commit: r816664 [9/9] - in /hadoop/mapreduce/trunk: ./ conf/ src/benchmarks/gridmix/ src/benchmarks/gridmix/pipesort/ src/benchmarks/gridmix2/ src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/ src/c++/pipes/impl/ src/c++/task-controller...
Date Fri, 18 Sep 2009 15:10:02 GMT
Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSetupAndCleanupFailure.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSetupAndCleanupFailure.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSetupAndCleanupFailure.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSetupAndCleanupFailure.java Fri Sep 18 15:09:48 2009
@@ -27,6 +27,8 @@
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
+import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
 
 /**
  * Tests various failures in setup/cleanup of job, like 
@@ -229,10 +231,9 @@
       dfs = new MiniDFSCluster(conf, 4, true, null);
       fileSys = dfs.getFileSystem();
       JobConf jtConf = new JobConf();
-      jtConf.setInt("mapred.tasktracker.map.tasks.maximum", 1);
-      jtConf.setInt("mapred.tasktracker.reduce.tasks.maximum", 1);
-      jtConf.setLong("mapred.tasktracker.expiry.interval", 10 * 1000);
-      jtConf.setInt("mapred.reduce.copy.backoff", 4);
+      jtConf.setInt(TTConfig.TT_MAP_SLOTS, 1);
+      jtConf.setInt(TTConfig.TT_REDUCE_SLOTS, 1);
+      jtConf.setLong(JTConfig.JT_TRACKER_EXPIRY_INTERVAL, 10 * 1000);
       mr = new MiniMRCluster(taskTrackers, fileSys.getUri().toString(), 1,
                              null, null, jtConf);
       // test setup/cleanup throwing exceptions

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSetupTaskScheduling.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSetupTaskScheduling.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSetupTaskScheduling.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSetupTaskScheduling.java Fri Sep 18 15:09:48 2009
@@ -23,17 +23,13 @@
 
 import org.apache.hadoop.mapred.FakeObjectUtilities.FakeJobInProgress;
 import org.apache.hadoop.mapred.FakeObjectUtilities.FakeJobTracker;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.TaskType;
-import org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import junit.extensions.TestSetup;
-import junit.framework.Test;
 import junit.framework.TestCase;
-import junit.framework.TestSuite;
 
 public class TestSetupTaskScheduling extends TestCase {
 
@@ -98,8 +94,8 @@
 
   public void setUp() throws Exception {
     JobConf conf = new JobConf();
-    conf.set("mapred.job.tracker", "localhost:0");
-    conf.set("mapred.job.tracker.http.address", "0.0.0.0:0");
+    conf.set(JTConfig.JT_IPC_ADDRESS, "localhost:0");
+    conf.set(JTConfig.JT_HTTP_ADDRESS, "0.0.0.0:0");
     jobTracker = new FakeJobTracker(conf, new Clock(), trackers);
     for (String tracker : trackers) {
       FakeObjectUtilities.establishFirstContact(jobTracker, tracker);
@@ -112,13 +108,13 @@
     conf.setSpeculativeExecution(false);
     conf.setNumMapTasks(2);
     conf.setNumReduceTasks(2);
-    conf.set("mapred.max.reduce.failures.percent", ".70");
-    conf.set("mapred.max.map.failures.percent", ".70");
+    conf.set(JobContext.REDUCE_FAILURES_MAXPERCENT, ".70");
+    conf.set(JobContext.MAP_FAILURES_MAX_PERCENT, ".70");
     FakeJobInProgress job = null;
     if (withSetup) {
       job = new FakeJobWithSetupTask(conf, jobTracker);
     } else {
-      conf.setBoolean("mapred.committer.job.setup.cleanup.needed", false);
+      conf.setBoolean(JobContext.SETUP_CLEANUP_NEEDED, false);
       job = new FakeJobInProgress(conf, jobTracker);
     }
     job.setClusterSize(trackers.length);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSeveral.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSeveral.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSeveral.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSeveral.java Fri Sep 18 15:09:48 2009
@@ -44,6 +44,7 @@
 import org.apache.hadoop.mapred.UtilsForTests.KillMapper;
 import org.apache.hadoop.mapred.lib.NullOutputFormat;
 import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.security.UnixUserGroupInformation;
 
 /** 
@@ -100,7 +101,7 @@
         // it with the MiniMRCluster
 
         myListener = new MyListener();
-        conf.set("mapred.job.tracker.handler.count", "1");
+        conf.set(JTConfig.JT_IPC_HANDLER_COUNT, "1");
         mrCluster =   new MiniMRCluster(0, 0,
             numTT, dfs.getFileSystem().getUri().toString(), 
             1, null, null, MR_UGI, new JobConf());
@@ -208,7 +209,8 @@
 
     conf.set("mapred.reducer.class", "testjar.ExternalIdentityReducer");
 
-    conf.setLong("mapred.min.split.size", 1024*1024);
+    conf.setLong(org.apache.hadoop.mapreduce.lib.input.
+      FileInputFormat.SPLIT_MINSIZE, 1024*1024);
 
     conf.setNumReduceTasks(numReduces);
     conf.setJobPriority(JobPriority.HIGH);
@@ -317,9 +319,9 @@
     conf.setOutputFormat(NullOutputFormat.class);
     conf.setJobPriority(JobPriority.HIGH);
 
-    conf.setLong("mapred.map.max.attempts", 1);
+    conf.setLong(JobContext.MAP_MAX_ATTEMPTS, 1);
 
-    conf.set("hadoop.job.history.user.location", "none");
+    conf.set(JobContext.HISTORY_LOCATION, "none");
 
     conf.setNumReduceTasks(0);
 
@@ -368,13 +370,13 @@
     conf.setOutputFormat(NullOutputFormat.class);
     conf.setNumReduceTasks(0);
 
-    conf.setLong("mapred.map.max.attempts", 2);
+    conf.setLong(JobContext.MAP_MAX_ATTEMPTS, 2);
 
     final Path inDir = new Path("./wc/input");
     final Path outDir = new Path("./wc/output");
     final Path histDir = new Path("./wc/history");
 
-    conf.set("hadoop.job.history.user.location", histDir.toString());
+    conf.set(JobContext.HISTORY_LOCATION, histDir.toString());
 
     FileInputFormat.setInputPaths(conf, inDir);
     FileOutputFormat.setOutputPath(conf, outDir);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java Fri Sep 18 15:09:48 2009
@@ -34,6 +34,7 @@
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.util.Progressable;
 
 /**
@@ -66,7 +67,7 @@
 
     // use WordCount example
     FileSystem.setDefaultUri(conf, fileSys);
-    conf.set("mapred.job.tracker", jobTracker);
+    conf.set(JTConfig.JT_IPC_ADDRESS, jobTracker);
     conf.setJobName("foo");
 
     conf.setInputFormat(TextInputFormat.class);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSpeculativeExecution.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSpeculativeExecution.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSpeculativeExecution.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSpeculativeExecution.java Fri Sep 18 15:09:48 2009
@@ -27,6 +27,7 @@
 import org.apache.hadoop.mapred.FakeObjectUtilities.FakeJobInProgress;
 import org.apache.hadoop.mapred.FakeObjectUtilities.FakeJobTracker;
 import org.apache.hadoop.mapred.UtilsForTests.FakeClock;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 public class TestSpeculativeExecution extends TestCase {
 
@@ -49,8 +50,8 @@
       new TestSetup(new TestSuite(TestSpeculativeExecution.class)) {
       protected void setUp() throws Exception {
         JobConf conf = new JobConf();
-        conf.set("mapred.job.tracker", "localhost:0");
-        conf.set("mapred.job.tracker.http.address", "0.0.0.0:0");
+        conf.set(JTConfig.JT_IPC_ADDRESS, "localhost:0");
+        conf.set(JTConfig.JT_HTTP_ADDRESS, "0.0.0.0:0");
         jobTracker = new FakeJobTracker(conf, (clock = new SpecFakeClock()),
             trackers);
         for (String tracker : trackers) {
@@ -109,7 +110,7 @@
     conf.setSpeculativeExecution(true);
     conf.setNumMapTasks(5);
     conf.setNumReduceTasks(5);
-    conf.setFloat("mapred.speculative.execution.slowTaskThreshold", 0.5f);
+    conf.setFloat(JobContext.SPECULATIVE_SLOWTASK_THRESHOLD, 0.5f);
     FakeJobInProgress job = new FakeJobInProgress(conf, jobTracker);    
     job.initTasks();
     //schedule maps
@@ -145,7 +146,7 @@
     conf.setSpeculativeExecution(true);
     conf.setNumMapTasks(5);
     conf.setNumReduceTasks(0);
-    conf.setFloat("mapred.speculative.execution.slowTaskThreshold", 0.5f);
+    conf.setFloat(JobContext.SPECULATIVE_SLOWTASK_THRESHOLD, 0.5f);
     FakeJobInProgress job = new FakeJobInProgress(conf, jobTracker);
     job.initTasks();
 

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSpilledRecordsCounter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSpilledRecordsCounter.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSpilledRecordsCounter.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSpilledRecordsCounter.java Fri Sep 18 15:09:48 2009
@@ -88,10 +88,10 @@
 
     conf.setNumMapTasks(3);
     conf.setNumReduceTasks(1);
-    conf.setInt("io.sort.mb", 1);
-    conf.setInt("io.sort.factor", 2);
-    conf.set("io.sort.record.percent", "0.05");
-    conf.set("io.sort.spill.percent", "0.80");
+    conf.setInt(JobContext.IO_SORT_MB, 1);
+    conf.setInt(JobContext.IO_SORT_FACTOR, 2);
+    conf.set(JobContext.MAP_SORT_RECORD_PERCENT, "0.05");
+    conf.set(JobContext.MAP_SORT_SPILL_PERCENT, "0.80");
 
 
     String TEST_ROOT_DIR = new Path(System.getProperty("test.build.data",

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSubmitJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSubmitJob.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSubmitJob.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSubmitJob.java Fri Sep 18 15:09:48 2009
@@ -20,7 +20,9 @@
 import java.io.IOException;
 
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.SleepJob;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.util.ToolRunner;
 
 import junit.framework.TestCase;
@@ -46,12 +48,12 @@
       throws Exception {
     JobConf jtConf = new JobConf();
     jtConf
-        .setLong(JobTracker.MAPRED_CLUSTER_MAP_MEMORY_MB_PROPERTY, 1 * 1024L);
-    jtConf.setLong(JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY,
+        .setLong(MRConfig.MAPMEMORY_MB, 1 * 1024L);
+    jtConf.setLong(MRConfig.REDUCEMEMORY_MB,
         2 * 1024L);
-    jtConf.setLong(JobTracker.MAPRED_CLUSTER_MAX_MAP_MEMORY_MB_PROPERTY,
+    jtConf.setLong(JTConfig.JT_MAX_MAPMEMORY_MB,
         3 * 1024L);
-    jtConf.setLong(JobTracker.MAPRED_CLUSTER_MAX_REDUCE_MEMORY_MB_PROPERTY,
+    jtConf.setLong(JTConfig.JT_MAX_REDUCEMEMORY_MB,
         4 * 1024L);
 
     miniMRCluster = new MiniMRCluster(0, "file:///", 0, null, null, jtConf);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTTMemoryReporting.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTTMemoryReporting.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTTMemoryReporting.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTTMemoryReporting.java Fri Sep 18 15:09:48 2009
@@ -26,6 +26,8 @@
 import org.apache.hadoop.util.LinuxMemoryCalculatorPlugin;
 import org.apache.hadoop.util.MemoryCalculatorPlugin;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.hadoop.mapreduce.MRConfig;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker;
 
 import junit.framework.TestCase;
@@ -157,10 +159,8 @@
         4 * 1024 * 1024 * 1024L);
     conf.setLong(DummyMemoryCalculatorPlugin.MAXPMEM_TESTING_PROPERTY,
         2 * 1024 * 1024 * 1024L);
-    conf.setLong(JobTracker.MAPRED_CLUSTER_MAP_MEMORY_MB_PROPERTY,
-        512L);
-    conf.setLong(
-        JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY, 1024L);
+    conf.setLong(MRConfig.MAPMEMORY_MB, 512L);
+    conf.setLong(MRConfig.REDUCEMEMORY_MB, 1024L);
     
     try {
       setUpCluster(conf);
@@ -202,9 +202,9 @@
 
   private void setUpCluster(JobConf conf)
                                 throws Exception {
-    conf.setClass("mapred.jobtracker.taskScheduler",
+    conf.setClass(JTConfig.JT_TASK_SCHEDULER,
         TestTTMemoryReporting.FakeTaskScheduler.class, TaskScheduler.class);
-    conf.set("mapred.job.tracker.handler.count", "1");
+    conf.set(JTConfig.JT_IPC_HANDLER_COUNT, "1");
     miniMRCluster = new MiniMRCluster(1, "file:///", 3, null, null, conf);
   }
   

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskFail.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskFail.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskFail.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskFail.java Fri Sep 18 15:09:48 2009
@@ -41,7 +41,7 @@
   implements Mapper<LongWritable, Text, Text, IntWritable> {
     String taskid;
     public void configure(JobConf job) {
-      taskid = job.get("mapred.task.id");
+      taskid = job.get(JobContext.TASK_ATTEMPT_ID);
     }
     public void map (LongWritable key, Text value, 
                      OutputCollector<Text, IntWritable> output, 

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskLimits.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskLimits.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskLimits.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskLimits.java Fri Sep 18 15:09:48 2009
@@ -21,6 +21,8 @@
 import junit.framework.TestCase;
 import java.io.IOException;
 
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
+
 /**
  * A JUnit test to test configured task limits.
  */
@@ -29,8 +31,8 @@
   static void runTest(int maxTasks, int numMaps, int numReds, 
                       boolean shouldFail) throws Exception {
     JobConf conf = new JobConf();
-    conf.setInt("mapred.jobtracker.maxtasks.per.job", maxTasks);
-    conf.set("mapred.job.tracker.handler.count", "1");
+    conf.setInt(JTConfig.JT_TASKS_PER_JOB, maxTasks);
+    conf.set(JTConfig.JT_IPC_HANDLER_COUNT, "1");
     MiniMRCluster mr = new MiniMRCluster(0, "file:///", 1, null, null, conf);
     JobTracker jt = mr.getJobTrackerRunner().getJobTracker();
     JobConf jc = mr.createJobConf();

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerBlacklisting.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerBlacklisting.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerBlacklisting.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerBlacklisting.java Fri Sep 18 15:09:48 2009
@@ -37,6 +37,7 @@
 import org.apache.hadoop.mapred.JobTracker.ReasonForBlackListing;
 import org.apache.hadoop.mapred.TaskTrackerStatus.TaskTrackerHealthStatus;
 import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker;
 
 public class TestTaskTrackerBlacklisting extends TestCase {
@@ -144,9 +145,9 @@
       new TestSetup(new TestSuite(TestTaskTrackerBlacklisting.class)) {
       protected void setUp() throws Exception {
         JobConf conf = new JobConf();
-        conf.set("mapred.job.tracker", "localhost:0");
-        conf.set("mapred.job.tracker.http.address", "0.0.0.0:0");
-        conf.setInt("mapred.max.tracker.blacklists", 1);
+        conf.set(JTConfig.JT_IPC_ADDRESS, "localhost:0");
+        conf.set(JTConfig.JT_HTTP_ADDRESS, "0.0.0.0:0");
+        conf.setInt(JTConfig.JT_MAX_TRACKER_BLACKLISTS, 1);
 
         jobTracker = 
           new FakeJobTracker(conf, (clock = new FakeJobTrackerClock()),
@@ -468,8 +469,8 @@
     conf.setSpeculativeExecution(false);
     conf.setNumMapTasks(0);
     conf.setNumReduceTasks(5);
-    conf.set("mapred.max.reduce.failures.percent", ".70");
-    conf.setBoolean("mapred.committer.job.setup.cleanup.needed", false);
+    conf.set(JobContext.REDUCE_FAILURES_MAXPERCENT, ".70");
+    conf.setBoolean(JobContext.SETUP_CLEANUP_NEEDED, false);
     conf.setMaxTaskFailuresPerTracker(1);
     FakeJobInProgress job = new FakeJobInProgress(conf, jobTracker);
     job.setClusterSize(trackers.length);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerLocalization.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerLocalization.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerLocalization.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerLocalization.java Fri Sep 18 15:09:48 2009
@@ -33,6 +33,7 @@
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.LocalDirAllocator;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.mapreduce.server.tasktracker.Localizer;
@@ -71,7 +72,7 @@
   protected Task task;
   protected String[] localDirs;
   protected static LocalDirAllocator lDirAlloc =
-      new LocalDirAllocator("mapred.local.dir");
+      new LocalDirAllocator(MRConfig.LOCAL_DIR);
   protected Path attemptWorkDir;
   protected File[] attemptLogFiles;
   protected JobConf localizedTaskConf;
@@ -126,7 +127,7 @@
     for (int i = 0; i < numLocalDirs; i++) {
       localDirs[i] = new File(ROOT_MAPRED_LOCAL_DIR, "0_" + i).getPath();
     }
-    trackerFConf.setStrings("mapred.local.dir", localDirs);
+    trackerFConf.setStrings(MRConfig.LOCAL_DIR, localDirs);
 
     // Create the job configuration file. Same as trackerConf in this test.
     Job job = new Job(trackerFConf);
@@ -293,7 +294,7 @@
     for (String dir : localDirs) {
 
       File localDir = new File(dir);
-      assertTrue("mapred.local.dir " + localDir + " isn'task created!",
+      assertTrue(MRConfig.LOCAL_DIR + localDir + " isn'task created!",
           localDir.exists());
 
       File taskTrackerSubDir = new File(localDir, TaskTracker.SUBDIR);
@@ -395,13 +396,13 @@
         .getLocalPathToRead(TaskTracker.getJobWorkDir(task.getUser(), jobId
             .toString()), trackerFConf) != null);
 
-    // Check the setting of job.local.dir and job.jar which will eventually be
+    // Check the setting of mapreduce.job.local.dir and job.jar which will eventually be
     // used by the user's task
     boolean jobLocalDirFlag = false, mapredJarFlag = false;
     String localizedJobLocalDir =
         localizedJobConf.get(TaskTracker.JOB_LOCAL_DIR);
     String localizedJobJar = localizedJobConf.getJar();
-    for (String localDir : localizedJobConf.getStrings("mapred.local.dir")) {
+    for (String localDir : localizedJobConf.getStrings(MRConfig.LOCAL_DIR)) {
       if (localizedJobLocalDir.equals(localDir + Path.SEPARATOR
           + TaskTracker.getJobWorkDir(task.getUser(), jobId.toString()))) {
         jobLocalDirFlag = true;
@@ -415,7 +416,7 @@
         + " is not set properly to the target users directory : "
         + localizedJobLocalDir, jobLocalDirFlag);
     assertTrue(
-        "mapred.jar is not set properly to the target users directory : "
+        "mapreduce.job.jar is not set properly to the target users directory : "
             + localizedJobJar, mapredJarFlag);
   }
 
@@ -447,7 +448,7 @@
     // //////////
 
     // check the functionality of localizeTask
-    for (String dir : trackerFConf.getStrings("mapred.local.dir")) {
+    for (String dir : trackerFConf.getStrings(MRConfig.LOCAL_DIR)) {
       File attemptDir =
           new File(dir, TaskTracker.getLocalTaskDir(task.getUser(), jobId
               .toString(), taskId.toString()));
@@ -501,9 +502,9 @@
 
   protected void checkTaskLocalization()
       throws IOException {
-    // Make sure that the mapred.local.dir is sandboxed
+    // Make sure that the mapreduce.cluster.local.dir is sandboxed
     for (String childMapredLocalDir : localizedTaskConf
-        .getStrings("mapred.local.dir")) {
+        .getStrings(MRConfig.LOCAL_DIR)) {
       assertTrue("Local dir " + childMapredLocalDir + " is not sandboxed !!",
           childMapredLocalDir.endsWith(TaskTracker.getLocalTaskDir(task
               .getUser(), jobId.toString(), taskId.toString(), false)));
@@ -601,7 +602,7 @@
     assertTrue("Some task files are not deleted!! Number of stale paths is "
         + cleanupQueue.stalePaths.size(), cleanupQueue.stalePaths.size() == 0);
 
-    // Check that the empty $mapred.local.dir/taskTracker/$user dirs are still
+    // Check that the empty $mapreduce.cluster.local.dir/taskTracker/$user dirs are still
     // there.
     for (String localDir : localDirs) {
       Path userDir =

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerMemoryManager.java Fri Sep 18 15:09:48 2009
@@ -30,10 +30,13 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
+import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
+import org.apache.hadoop.mapreduce.util.ProcfsBasedProcessTree;
 import org.apache.hadoop.mapreduce.SleepJob;
 import org.apache.hadoop.mapreduce.util.TestProcfsBasedProcessTree;
-import org.apache.hadoop.util.ProcfsBasedProcessTree;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -57,10 +60,10 @@
 
   private void startCluster(JobConf conf)
       throws Exception {
-    conf.set("mapred.job.tracker.handler.count", "1");
-    conf.set("mapred.tasktracker.map.tasks.maximum", "1");
-    conf.set("mapred.tasktracker.reduce.tasks.maximum", "1");
-    conf.set("mapred.tasktracker.tasks.sleeptime-before-sigkill", "0");
+    conf.set(JTConfig.JT_IPC_HANDLER_COUNT, "1");
+    conf.set(TTConfig.TT_MAP_SLOTS, "1");
+    conf.set(TTConfig.TT_REDUCE_SLOTS, "1");
+    conf.set(TTConfig.TT_SLEEP_TIME_BEFORE_SIG_KILL, "0");
     miniMRCluster = new MiniMRCluster(1, "file:///", 1, null, null, conf);
   }
 
@@ -170,11 +173,8 @@
 
     // Start cluster with proper configuration.
     JobConf fConf = new JobConf();
-    fConf.setLong(JobTracker.MAPRED_CLUSTER_MAP_MEMORY_MB_PROPERTY,
-        2 * 1024L);
-    fConf.setLong(
-        JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY,
-        2 * 1024L);
+    fConf.setLong(MRConfig.MAPMEMORY_MB, 2 * 1024L);
+    fConf.setLong(MRConfig.REDUCEMEMORY_MB, 2 * 1024L);
     startCluster(new JobConf());
 
     JobConf conf = new JobConf(miniMRCluster.createJobConf());
@@ -199,13 +199,10 @@
     // Start cluster with proper configuration.
     JobConf fConf = new JobConf();
     // very small value, so that no task escapes to successful completion.
-    fConf.set("mapred.tasktracker.taskmemorymanager.monitoring-interval",
+    fConf.set(TTConfig.TT_MEMORY_MANAGER_MONITORING_INTERVAL,
         String.valueOf(300));
-    fConf.setLong(JobTracker.MAPRED_CLUSTER_MAP_MEMORY_MB_PROPERTY,
-        2 * 1024);
-    fConf.setLong(
-        JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY,
-        2 * 1024);
+    fConf.setLong(MRConfig.MAPMEMORY_MB, 2 * 1024);
+    fConf.setLong(MRConfig.REDUCEMEMORY_MB, 2 * 1024);
     startCluster(fConf);
     runJobExceedingMemoryLimit();
   }
@@ -227,7 +224,7 @@
     // Start cluster with proper configuration.
     JobConf fConf = new JobConf();
     // very small value, so that no task escapes to successful completion.
-    fConf.set("mapred.tasktracker.taskmemorymanager.monitoring-interval",
+    fConf.set(TTConfig.TT_MEMORY_MANAGER_MONITORING_INTERVAL,
         String.valueOf(300));
     //set old values, max vm property per task and upper limit on the tasks
     //vm
@@ -320,16 +317,16 @@
 
     // Start cluster with proper configuration.
     JobConf fConf = new JobConf();
-    fConf.setLong(JobTracker.MAPRED_CLUSTER_MAP_MEMORY_MB_PROPERTY,
+    fConf.setLong(MRConfig.MAPMEMORY_MB,
         1L);
     fConf.setLong(
-        JobTracker.MAPRED_CLUSTER_REDUCE_MEMORY_MB_PROPERTY, 1L);
+        MRConfig.REDUCEMEMORY_MB, 1L);
 
     // Because of the above, the total tt limit is 2mb
     long TASK_TRACKER_LIMIT = 2 * 1024 * 1024L;
 
     // very small value, so that no task escapes to successful completion.
-    fConf.set("mapred.tasktracker.taskmemorymanager.monitoring-interval",
+    fConf.set(TTConfig.TT_MEMORY_MANAGER_MONITORING_INTERVAL,
         String.valueOf(300));
 
     startCluster(fConf);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTextInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTextInputFormat.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTextInputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTextInputFormat.java Fri Sep 18 15:09:48 2009
@@ -359,7 +359,8 @@
     final Text val = new Text();
     LOG.info("Reading a line from /dev/null");
     final Configuration conf = new Configuration(false);
-    conf.setInt("mapred.linerecordreader.maxlength", MAXLINE);
+    conf.setInt(org.apache.hadoop.mapreduce.lib.input.
+                LineRecordReader.MAX_LINE_LENGTH, MAXLINE);
     conf.setInt("io.file.buffer.size", BUF); // used by LRR
     final LineRecordReader lrr = new LineRecordReader(infNull, 0, MAXPOS, conf);
     assertFalse("Read a line from null", lrr.next(key, val));

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTextOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTextOutputFormat.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTextOutputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTextOutputFormat.java Fri Sep 18 15:09:48 2009
@@ -47,7 +47,7 @@
   @SuppressWarnings("unchecked")
   public void testFormat() throws Exception {
     JobConf job = new JobConf();
-    job.set("mapred.task.id", attempt);
+    job.set(JobContext.TASK_ATTEMPT_ID, attempt);
     FileOutputFormat.setOutputPath(job, workDir.getParent().getParent());
     FileOutputFormat.setWorkOutputPath(job, workDir);
     FileSystem fs = workDir.getFileSystem(job);
@@ -99,8 +99,8 @@
   public void testFormatWithCustomSeparator() throws Exception {
     JobConf job = new JobConf();
     String separator = "\u0001";
-    job.set("mapred.textoutputformat.separator", separator);
-    job.set("mapred.task.id", attempt);
+    job.set("mapreduce.output.textoutputformat.separator", separator);
+    job.set(JobContext.TASK_ATTEMPT_ID, attempt);
     FileOutputFormat.setOutputPath(job, workDir.getParent().getParent());
     FileOutputFormat.setWorkOutputPath(job, workDir);
     FileSystem fs = workDir.getFileSystem(job);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTrackerBlacklistAcrossJobs.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTrackerBlacklistAcrossJobs.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTrackerBlacklistAcrossJobs.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTrackerBlacklistAcrossJobs.java Fri Sep 18 15:09:48 2009
@@ -25,6 +25,8 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapred.lib.NullOutputFormat;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
+import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
 
 public class TestTrackerBlacklistAcrossJobs extends TestCase {
   private static final String hosts[] = new String[] {
@@ -36,7 +38,7 @@
     String hostname = "";
     
     public void configure(JobConf job) {
-      this.hostname = job.get("slave.host.name");
+      this.hostname = job.get(TTConfig.TT_HOST_NAME);
     }
     
     public void map(NullWritable key, NullWritable value,
@@ -57,7 +59,7 @@
     fileSys = FileSystem.get(conf);
     // start mr cluster
     JobConf jtConf = new JobConf();
-    jtConf.setInt("mapred.max.tracker.blacklists", 1);
+    jtConf.setInt(JTConfig.JT_MAX_TRACKER_BLACKLISTS, 1);
 
     mr = new MiniMRCluster(3, fileSys.getUri().toString(),
                            1, null, hosts, jtConf);
@@ -65,7 +67,7 @@
     // setup job configuration
     JobConf mrConf = mr.createJobConf();
     JobConf job = new JobConf(mrConf);
-    job.setInt("mapred.max.tracker.failures", 1);
+    job.setInt(JobContext.MAX_TASK_FAILURES_PER_TRACKER, 1);
     job.setNumMapTasks(6);
     job.setNumReduceTasks(0);
     job.setMapperClass(FailOnHostMapper.class);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTrackerDistributedCacheManagerWithLinuxTaskController.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTrackerDistributedCacheManagerWithLinuxTaskController.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTrackerDistributedCacheManagerWithLinuxTaskController.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTrackerDistributedCacheManagerWithLinuxTaskController.java Fri Sep 18 15:09:48 2009
@@ -150,7 +150,7 @@
             "");
     LOG.info("Leading path for cacheFirstFile is : "
         + trailingStringForFirstFile);
-    // The leading mapred.local.dir/0_[0-n]/taskTracker/$user string.
+    // The leading mapreduce.cluster.local.dir/0_[0-n]/taskTracker/$user string.
     String leadingStringForFirstFile =
         cachedFilePath.substring(0, cachedFilePath
             .lastIndexOf(trailingStringForFirstFile));

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTrackerReservation.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTrackerReservation.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTrackerReservation.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestTrackerReservation.java Fri Sep 18 15:09:48 2009
@@ -24,6 +24,7 @@
 
 import org.apache.hadoop.mapred.FakeObjectUtilities.FakeJobInProgress;
 import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker;
 
 import junit.extensions.TestSetup;
@@ -56,8 +57,8 @@
     TestSetup setup = new TestSetup(new TestSuite(TestTrackerReservation.class)) {
       protected void setUp() throws Exception {
         JobConf conf = new JobConf();
-        conf.set("mapred.job.tracker", "localhost:0");
-        conf.set("mapred.job.tracker.http.address", "0.0.0.0:0");
+        conf.set(JTConfig.JT_IPC_ADDRESS, "localhost:0");
+        conf.set(JTConfig.JT_HTTP_ADDRESS, "0.0.0.0:0");
         jobTracker = new FakeJobTracker(conf, new Clock(), trackers);
         for (String tracker : trackers) {
           FakeObjectUtilities.establishFirstContact(jobTracker, tracker);
@@ -91,8 +92,7 @@
     conf.setNumReduceTasks(1);
     conf.setSpeculativeExecution(false);
     
-    conf.setBoolean(
-        "mapred.committer.job.setup.cleanup.needed", false);
+    conf.setBoolean(JobContext.SETUP_CLEANUP_NEEDED, false);
     
     //Set task tracker objects for reservation.
     TaskTracker tt1 = jobTracker.getTaskTracker(trackers[0]);
@@ -179,9 +179,9 @@
     conf.setSpeculativeExecution(false);
     conf.setNumMapTasks(2);
     conf.setNumReduceTasks(2);
-    conf.set("mapred.max.reduce.failures.percent", ".70");
-    conf.set("mapred.max.map.failures.percent", ".70");
-    conf.setBoolean("mapred.committer.job.setup.cleanup.needed", false);
+    conf.set(JobContext.REDUCE_FAILURES_MAXPERCENT, ".70");
+    conf.set(JobContext.MAP_FAILURES_MAX_PERCENT, ".70");
+    conf.setBoolean(JobContext.SETUP_CLEANUP_NEEDED, false);
     conf.setMaxTaskFailuresPerTracker(1);
     FakeJobInProgress job = new FakeJobInProgress(conf, jobTracker);
     job.setClusterSize(trackers.length);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/ThreadedMapBenchmark.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/ThreadedMapBenchmark.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/ThreadedMapBenchmark.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/ThreadedMapBenchmark.java Fri Sep 18 15:09:48 2009
@@ -60,7 +60,7 @@
                                 + "ThreadedMapBenchmark"));
   private static Path INPUT_DIR = new Path(BASE_DIR, "input");
   private static Path OUTPUT_DIR = new Path(BASE_DIR, "output");
-  private static final float FACTOR = 2.3f; // io.sort.mb set to 
+  private static final float FACTOR = 2.3f; // mapreduce.task.io.sort.mb set to 
                                             // (FACTOR * data_size) should 
                                             // result in only 1 spill
 
@@ -247,9 +247,9 @@
       job.setNumMapTasks(numMapsPerHost * cluster.getTaskTrackers());
       job.setNumReduceTasks(1);
       
-      // set io.sort.mb to avoid spill
+      // set mapreduce.task.io.sort.mb to avoid spill
       int ioSortMb = (int)Math.ceil(FACTOR * dataSizePerMap);
-      job.set("io.sort.mb", String.valueOf(ioSortMb));
+      job.set(JobContext.IO_SORT_MB, String.valueOf(ioSortMb));
       fs = FileSystem.get(job);
       
       LOG.info("Running sort with 1 spill per map");
@@ -261,12 +261,12 @@
                + " millisec");
       fs.delete(OUTPUT_DIR, true);
       
-      // set io.sort.mb to have multiple spills
+      // set mapreduce.task.io.sort.mb to have multiple spills
       JobConf spilledJob = new JobConf(job, ThreadedMapBenchmark.class);
       ioSortMb = (int)Math.ceil(FACTOR 
                                 * Math.ceil((double)dataSizePerMap 
                                             / numSpillsPerMap));
-      spilledJob.set("io.sort.mb", String.valueOf(ioSortMb));
+      spilledJob.set(JobContext.IO_SORT_MB, String.valueOf(ioSortMb));
       spilledJob.setJobName("threaded-map-benchmark-spilled");
       spilledJob.setJarByClass(ThreadedMapBenchmark.class);
       

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java Fri Sep 18 15:09:48 2009
@@ -361,9 +361,9 @@
 
     public void configure(JobConf conf) {
       try {
-        String taskId = conf.get("mapred.task.id");
+        String taskId = conf.get(JobContext.TASK_ATTEMPT_ID);
         id = Integer.parseInt(taskId.split("_")[4]);
-        totalMaps = Integer.parseInt(conf.get("mapred.map.tasks"));
+        totalMaps = Integer.parseInt(conf.get(JobContext.NUM_MAPS));
         fs = FileSystem.get(conf);
         signal = new Path(conf.get(getTaskSignalParameter(true)));
       } catch (IOException ioe) {

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/join/TestDatamerge.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/join/TestDatamerge.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/join/TestDatamerge.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/join/TestDatamerge.java Fri Sep 18 15:09:48 2009
@@ -231,7 +231,7 @@
     JobConf job = new JobConf(conf, c);
     Path base = cluster.getFileSystem().makeQualified(new Path("/"+jointype));
     Path[] src = writeSimpleSrc(base, conf, srcs);
-    job.set("mapred.join.expr", CompositeInputFormat.compose(jointype,
+    job.set("mapreduce.join.expr", CompositeInputFormat.compose(jointype,
         SequenceFileInputFormat.class, src));
     job.setInt("testdatamerge.sources", srcs);
     job.setInputFormat(CompositeInputFormat.class);
@@ -302,7 +302,7 @@
       sb.append(",");
     }
     sb.append(CompositeInputFormat.compose(Fake_IF.class,"raboof") + "))");
-    job.set("mapred.join.expr", sb.toString());
+    job.set("mapreduce.join.expr", sb.toString());
     job.setInputFormat(CompositeInputFormat.class);
     Path outf = new Path(base, "out");
     FileOutputFormat.setOutputPath(job, outf);
@@ -352,7 +352,7 @@
     JobConf job = new JobConf();
     Path base = cluster.getFileSystem().makeQualified(new Path("/empty"));
     Path[] src = { new Path(base,"i0"), new Path("i1"), new Path("i2") };
-    job.set("mapred.join.expr", CompositeInputFormat.compose("outer",
+    job.set("mapreduce.join.expr", CompositeInputFormat.compose("outer",
         Fake_IF.class, src));
     job.setInputFormat(CompositeInputFormat.class);
     FileOutputFormat.setOutputPath(job, new Path(base, "out"));

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java Fri Sep 18 15:09:48 2009
@@ -54,7 +54,7 @@
 
     Path base = new Path(testdir, "/empty");
     Path[] src = { new Path(base, "i0"), new Path("i1"), new Path("i2") };
-    job.set("mapred.join.expr", CompositeInputFormat.compose("outer",
+    job.set("mapreduce.join.expr", CompositeInputFormat.compose("outer",
         IF_ClassLoaderChecker.class, src));
 
     CompositeInputFormat<NullWritable> inputFormat = new CompositeInputFormat<NullWritable>();

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java Fri Sep 18 15:09:48 2009
@@ -30,6 +30,7 @@
 import org.apache.hadoop.mapred.HadoopTestCase;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobContext;
 import org.apache.hadoop.mapred.OutputLogFilter;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.TextInputFormat;
@@ -47,7 +48,7 @@
     super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
     conf = createJobConf();
     localConf = createJobConf();
-    localConf.set("map.output.key.field.separator", " ");
+    localConf.set(JobContext.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
   }
   public void configure(String keySpec, int expect) throws Exception {
     Path testdir = new Path("build/test/test.mapred.spill");
@@ -68,7 +69,7 @@
     conf.setOutputKeyComparatorClass(KeyFieldBasedComparator.class);
     conf.setKeyFieldComparatorOptions(keySpec);
     conf.setKeyFieldPartitionerOptions("-k1.1,1.1");
-    conf.set("map.output.key.field.separator", " ");
+    conf.set(JobContext.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
     conf.setMapperClass(InverseMapper.class);
     conf.setReducerClass(IdentityReducer.class);
     if (!fs.mkdirs(testdir)) {

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/lib/TestLineInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/lib/TestLineInputFormat.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/lib/TestLineInputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/lib/TestLineInputFormat.java Fri Sep 18 15:09:48 2009
@@ -54,7 +54,7 @@
     localFs.delete(workDir, true);
     FileInputFormat.setInputPaths(job, workDir);
     int numLinesPerMap = 5;
-    job.setInt("mapred.line.input.format.linespermap", numLinesPerMap);
+    job.setInt("mapreduce.input.lineinputformat.linespermap", numLinesPerMap);
 
     // for a variety of lengths
     for (int length = 0; length < MAX_LENGTH;

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java Fri Sep 18 15:09:48 2009
@@ -23,6 +23,7 @@
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.*;
+import org.apache.hadoop.mapreduce.lib.map.MultithreadedMapper;
 
 import java.io.DataOutputStream;
 import java.io.IOException;
@@ -92,7 +93,7 @@
 
     conf.setMapRunnerClass(MultithreadedMapRunner.class);
     
-    conf.setInt("mapred.map.multithreadedrunner.threads", 2);
+    conf.setInt(MultithreadedMapper.NUM_THREADS, 2);
 
     if (ioEx) {
       conf.setBoolean("multithreaded.ioException", true);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/FailJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/FailJob.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/FailJob.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/FailJob.java Fri Sep 18 15:09:48 2009
@@ -45,11 +45,13 @@
  * Mappers emit a token amount of data.
  */
 public class FailJob extends Configured implements Tool {
+  public static String FAIL_MAP = "mapreduce.failjob.map.fail";
+  public static String FAIL_REDUCE = "mapreduce.failjob.reduce.fail";
   public static class FailMapper 
       extends Mapper<LongWritable, Text, LongWritable, NullWritable> {
     public void map(LongWritable key, Text value, Context context
                ) throws IOException, InterruptedException {
-      if (context.getConfiguration().getBoolean("fail.job.map.fail", true)) {
+      if (context.getConfiguration().getBoolean(FAIL_MAP, true)) {
         throw new RuntimeException("Intentional map failure");
       }
       context.write(key, NullWritable.get());
@@ -61,7 +63,7 @@
 
     public void reduce(LongWritable key, Iterable<NullWritable> values,
                        Context context) throws IOException {
-      if (context.getConfiguration().getBoolean("fail.job.reduce.fail", false)) {
+      if (context.getConfiguration().getBoolean(FAIL_REDUCE, false)) {
       	throw new RuntimeException("Intentional reduce failure");
       }
       context.setStatus("No worries");
@@ -76,8 +78,8 @@
   public Job createJob(boolean failMappers, boolean failReducers, Path inputFile) 
       throws IOException {
     Configuration conf = getConf();
-    conf.setBoolean("fail.job.map.fail", failMappers);
-    conf.setBoolean("fail.job.reduce.fail", failReducers);
+    conf.setBoolean(FAIL_MAP, failMappers);
+    conf.setBoolean(FAIL_REDUCE, failReducers);
     Job job = new Job(conf, "fail");
     job.setJarByClass(FailJob.class);
     job.setMapperClass(FailMapper.class);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/GenericMRLoadGenerator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/GenericMRLoadGenerator.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/GenericMRLoadGenerator.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/GenericMRLoadGenerator.java Fri Sep 18 15:09:48 2009
@@ -29,6 +29,7 @@
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.examples.RandomTextWriter;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -51,7 +52,15 @@
 import org.apache.hadoop.util.ToolRunner;
 
 public class GenericMRLoadGenerator extends Configured implements Tool {
-
+  public static String MAP_PRESERVE_PERCENT = 
+    "mapreduce.loadgen.sort.map.preserve.percent";
+  public static String REDUCE_PRESERVE_PERCENT = 
+    "mapreduce.loadgen.sort.reduce.preserve.percent";
+  public static String INDIRECT_INPUT_FORMAT = 
+    "mapreduce.loadgen.indirect.input.format";
+  public static String INDIRECT_INPUT_FILE = 
+    "mapreduce.loadgen.indirect.input.file";
+  
   protected static int printUsage() {
     System.err.println(
     "Usage: [-m <maps>] [-r <reduces>]\n" +
@@ -93,17 +102,15 @@
           job.setOutputValueClass(
             Class.forName(argv[++i]).asSubclass(Writable.class));
         } else if ("-keepmap".equals(argv[i])) {
-          job.getConfiguration().set("hadoop.sort.map.keep.percent", 
-                argv[++i]);
+          job.getConfiguration().set(MAP_PRESERVE_PERCENT, argv[++i]);
         } else if ("-keepred".equals(argv[i])) {
-          job.getConfiguration().set("hadoop.sort.reduce.keep.percent", 
-                argv[++i]);
+          job.getConfiguration().set(REDUCE_PRESERVE_PERCENT, argv[++i]);
         } else if ("-outdir".equals(argv[i])) {
           FileOutputFormat.setOutputPath(job, new Path(argv[++i]));
         } else if ("-indir".equals(argv[i])) {
           FileInputFormat.addInputPaths(job, argv[++i]);
         } else if ("-inFormatIndirect".equals(argv[i])) {
-          job.getConfiguration().setClass("mapred.indirect.input.format",
+          job.getConfiguration().setClass(INDIRECT_INPUT_FORMAT,
               Class.forName(argv[++i]).asSubclass(InputFormat.class),
               InputFormat.class);
           job.setInputFormatClass(IndirectInputFormat.class);
@@ -140,14 +147,14 @@
       // No input dir? Generate random data
       System.err.println("No input path; ignoring InputFormat");
       confRandom(job);
-    } else if (null != conf.getClass("mapred.indirect.input.format", null)) {
+    } else if (null != conf.getClass(INDIRECT_INPUT_FORMAT, null)) {
       // specified IndirectInputFormat? Build src list
       JobClient jClient = new JobClient(conf);  
       Path sysdir = jClient.getSystemDir();
       Random r = new Random();
       Path indirInputFile = new Path(sysdir,
           Integer.toString(r.nextInt(Integer.MAX_VALUE), 36) + "_files");
-      conf.set("mapred.indirect.input.file", indirInputFile.toString());
+      conf.set(INDIRECT_INPUT_FILE, indirInputFile.toString());
       SequenceFile.Writer writer = SequenceFile.createWriter(
           sysdir.getFileSystem(conf), conf, indirInputFile,
           LongWritable.class, Text.class,
@@ -200,7 +207,7 @@
   static class RandomInputFormat extends InputFormat<Text, Text> {
 
     public List<InputSplit> getSplits(JobContext job) {
-      int numSplits = job.getConfiguration().getInt("mapred.map.tasks", 1);
+      int numSplits = job.getConfiguration().getInt(JobContext.NUM_MAPS, 1);
       List<InputSplit> splits = new ArrayList<InputSplit>();
       for (int i = 0; i < numSplits; ++i) {
         splits.add(new IndirectInputFormat.IndirectSplit(
@@ -260,12 +267,12 @@
 
     public void setup(Context context) {
       Configuration conf = new Configuration();
-      bytesToWrite = conf.getLong("test.randomtextwrite.bytes_per_map",
+      bytesToWrite = conf.getLong(RandomTextWriter.BYTES_PER_MAP,
                                     1*1024*1024*1024);
-      keymin = conf.getInt("test.randomtextwrite.min_words_key", 5);
-      keymax = conf.getInt("test.randomtextwrite.max_words_key", 10);
-      valmin = conf.getInt("test.randomtextwrite.min_words_value", 5);
-      valmax = conf.getInt("test.randomtextwrite.max_words_value", 10);
+      keymin = conf.getInt(RandomTextWriter.MIN_KEY, 5);
+      keymax = conf.getInt(RandomTextWriter.MAX_KEY, 10);
+      valmin = conf.getInt(RandomTextWriter.MIN_VALUE, 5);
+      valmax = conf.getInt(RandomTextWriter.MAX_VALUE, 10);
     }
 
     public void map(Text key, Text val, Context context) 
@@ -303,21 +310,21 @@
 
     Configuration conf = job.getConfiguration();
     final ClusterStatus cluster = new JobClient(conf).getClusterStatus();
-    int numMapsPerHost = conf.getInt("test.randomtextwrite.maps_per_host", 10);
+    int numMapsPerHost = conf.getInt(RandomTextWriter.MAPS_PER_HOST, 10);
     long numBytesToWritePerMap =
-      conf.getLong("test.randomtextwrite.bytes_per_map", 1*1024*1024*1024);
+      conf.getLong(RandomTextWriter.BYTES_PER_MAP, 1*1024*1024*1024);
     if (numBytesToWritePerMap == 0) {
       throw new IOException(
-          "Cannot have test.randomtextwrite.bytes_per_map set to 0");
+          "Cannot have " + RandomTextWriter.BYTES_PER_MAP + " set to 0");
     }
-    long totalBytesToWrite = conf.getLong("test.randomtextwrite.total_bytes",
+    long totalBytesToWrite = conf.getLong(RandomTextWriter.TOTAL_BYTES,
          numMapsPerHost * numBytesToWritePerMap * cluster.getTaskTrackers());
     int numMaps = (int)(totalBytesToWrite / numBytesToWritePerMap);
     if (numMaps == 0 && totalBytesToWrite > 0) {
       numMaps = 1;
-      conf.setLong("test.randomtextwrite.bytes_per_map", totalBytesToWrite);
+      conf.setLong(RandomTextWriter.BYTES_PER_MAP, totalBytesToWrite);
     }
-    conf.setInt("mapred.map.tasks", numMaps);
+    conf.setInt(JobContext.NUM_MAPS, numMaps);
   }
 
 
@@ -331,7 +338,7 @@
 
     public void setup(Context context) {
       this.keep = context.getConfiguration().
-        getFloat("hadoop.sort.map.keep.percent", (float)100.0) / (float)100.0;
+        getFloat(MAP_PRESERVE_PERCENT, (float)100.0) / (float)100.0;
     }
     
     protected void emit(K key, V val, Context context)
@@ -352,7 +359,7 @@
 
     public void setup(Context context) {
       this.keep = context.getConfiguration().getFloat(
-        "hadoop.sort.reduce.keep.percent", (float)100.0) / (float)100.0;
+        REDUCE_PRESERVE_PERCENT, (float)100.0) / (float)100.0;
     }
 
     protected void emit(K key, V val, Context context)
@@ -424,7 +431,7 @@
         throws IOException {
 
       Configuration conf = job.getConfiguration();
-      Path src = new Path(conf.get("mapred.indirect.input.file", null));
+      Path src = new Path(conf.get(INDIRECT_INPUT_FILE, null));
       FileSystem fs = src.getFileSystem(conf);
 
       List<InputSplit> splits = new ArrayList<InputSplit>();
@@ -443,7 +450,7 @@
         TaskAttemptContext context) throws IOException, InterruptedException {
       Configuration conf = context.getConfiguration();
       InputFormat<K, V> indirIF = (InputFormat)ReflectionUtils.newInstance(
-          conf.getClass("mapred.indirect.input.format",
+          conf.getClass(INDIRECT_INPUT_FORMAT,
             SequenceFileInputFormat.class), conf);
       IndirectSplit is = ((IndirectSplit)split);
       return indirIF.createRecordReader(new FileSplit(is.getPath(), 0,

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/MapReduceTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/MapReduceTestUtil.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/MapReduceTestUtil.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/MapReduceTestUtil.java Fri Sep 18 15:09:48 2009
@@ -132,7 +132,7 @@
    */
   public static Job createCopyJob(Configuration conf, Path outdir, 
       Path... indirs) throws Exception {
-    conf.setInt("mapred.map.tasks", 3);
+    conf.setInt(JobContext.NUM_MAPS, 3);
     Job theJob = new Job(conf);
     theJob.setJobName("DataMoveJob");
 
@@ -158,7 +158,7 @@
   public static Job createFailJob(Configuration conf, Path outdir, 
       Path... indirs) throws Exception {
 
-    conf.setInt("mapred.map.max.attempts", 2);
+    conf.setInt(JobContext.MAP_MAX_ATTEMPTS, 2);
     Job theJob = new Job(conf);
     theJob.setJobName("Fail-Job");
 
@@ -370,7 +370,7 @@
   public static TaskAttemptContext createDummyMapTaskAttemptContext(
       Configuration conf) {
     TaskAttemptID tid = new TaskAttemptID("jt", 1, TaskType.MAP, 0, 0);
-    conf.set("mapred.task.id", tid.toString());
+    conf.set(JobContext.TASK_ATTEMPT_ID, tid.toString());
     return new TaskAttemptContext(conf, tid);    
   }
 

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/SleepJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/SleepJob.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/SleepJob.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/SleepJob.java Fri Sep 18 15:09:48 2009
@@ -29,7 +29,6 @@
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapreduce.*;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.util.Tool;
@@ -43,6 +42,12 @@
  * some disk space.
  */
 public class SleepJob extends Configured implements Tool {
+  public static String MAP_SLEEP_COUNT = "mapreduce.sleepjob.map.sleep.count";
+  public static String REDUCE_SLEEP_COUNT = 
+    "mapreduce.sleepjob.reduce.sleep.count";
+  public static String MAP_SLEEP_TIME = "mapreduce.sleepjob.map.sleep.time";
+  public static String REDUCE_SLEEP_TIME = 
+    "mapreduce.sleepjob.reduce.sleep.time";
 
   public static class SleepJobPartitioner extends 
       Partitioner<IntWritable, NullWritable> {
@@ -64,7 +69,7 @@
     public List<InputSplit> getSplits(JobContext jobContext) {
       List<InputSplit> ret = new ArrayList<InputSplit>();
       int numSplits = jobContext.getConfiguration().
-                        getInt("mapred.map.tasks", 1);
+                        getInt(JobContext.NUM_MAPS, 1);
       for (int i = 0; i < numSplits; ++i) {
         ret.add(new EmptySplit());
       }
@@ -75,9 +80,9 @@
         InputSplit ignored, TaskAttemptContext taskContext)
         throws IOException {
       Configuration conf = taskContext.getConfiguration();
-      final int count = conf.getInt("sleep.job.map.sleep.count", 1);
+      final int count = conf.getInt(MAP_SLEEP_COUNT, 1);
       if (count < 0) throw new IOException("Invalid map count: " + count);
-      final int redcount = conf.getInt("sleep.job.reduce.sleep.count", 1);
+      final int redcount = conf.getInt(REDUCE_SLEEP_COUNT, 1);
       if (redcount < 0)
         throw new IOException("Invalid reduce count: " + redcount);
       final int emitPerMapTask = (redcount * taskContext.getNumReduceTasks());
@@ -123,9 +128,9 @@
       throws IOException, InterruptedException {
       Configuration conf = context.getConfiguration();
       this.mapSleepCount =
-        conf.getInt("sleep.job.map.sleep.count", mapSleepCount);
+        conf.getInt(MAP_SLEEP_COUNT, mapSleepCount);
       this.mapSleepDuration =
-        conf.getLong("sleep.job.map.sleep.time" , 100) / mapSleepCount;
+        conf.getLong(MAP_SLEEP_TIME , 100) / mapSleepCount;
     }
 
     public void map(IntWritable key, IntWritable value, Context context
@@ -160,9 +165,9 @@
       throws IOException, InterruptedException {
       Configuration conf = context.getConfiguration();
       this.reduceSleepCount =
-        conf.getInt("sleep.job.reduce.sleep.count", reduceSleepCount);
+        conf.getInt(REDUCE_SLEEP_COUNT, reduceSleepCount);
       this.reduceSleepDuration =
-        conf.getLong("sleep.job.reduce.sleep.time" , 100) / reduceSleepCount;
+        conf.getLong(REDUCE_SLEEP_TIME , 100) / reduceSleepCount;
     }
 
     public void reduce(IntWritable key, Iterable<NullWritable> values,
@@ -192,11 +197,11 @@
                        long reduceSleepTime, int reduceSleepCount) 
       throws IOException {
     Configuration conf = getConf();
-    conf.setLong("sleep.job.map.sleep.time", mapSleepTime);
-    conf.setLong("sleep.job.reduce.sleep.time", reduceSleepTime);
-    conf.setInt("sleep.job.map.sleep.count", mapSleepCount);
-    conf.setInt("sleep.job.reduce.sleep.count", reduceSleepCount);
-    conf.setInt("mapred.map.tasks", numMapper);
+    conf.setLong(MAP_SLEEP_TIME, mapSleepTime);
+    conf.setLong(REDUCE_SLEEP_TIME, reduceSleepTime);
+    conf.setInt(MAP_SLEEP_COUNT, mapSleepCount);
+    conf.setInt(REDUCE_SLEEP_COUNT, reduceSleepCount);
+    conf.setInt(JobContext.NUM_MAPS, numMapper);
     Job job = new Job(conf, "sleep");
     job.setNumReduceTasks(numReducer);
     job.setJarByClass(SleepJob.class);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestValueIterReset.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestValueIterReset.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestValueIterReset.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestValueIterReset.java Fri Sep 18 15:09:48 2009
@@ -530,8 +530,8 @@
       job.setMapOutputValueClass(IntWritable.class);
       job.setOutputKeyClass(IntWritable.class);
       job.setOutputValueClass(IntWritable.class);
-      job.getConfiguration().setInt("mapred.job.reduce.markreset.buffer.size",
-                                    128);  
+      job.getConfiguration().
+        setInt(JobContext.REDUCE_MARKRESET_BUFFER_SIZE,128);  
       job.setInputFormatClass(TextInputFormat.class);
       job.setOutputFormatClass(TextOutputFormat.class);
       FileInputFormat.addInputPath(job,

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java Fri Sep 18 15:09:48 2009
@@ -42,6 +42,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.filecache.TaskDistributedCacheManager;
 import org.apache.hadoop.mapreduce.filecache.TrackerDistributedCacheManager;
+import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
 import org.apache.hadoop.security.UserGroupInformation;
 
 public class TestTrackerDistributedCacheManager extends TestCase {
@@ -85,7 +86,7 @@
         new File(TEST_ROOT_DIR, "cachebasedir").getAbsolutePath();
 
     conf = new Configuration();
-    conf.setLong("local.cache.size", LOCAL_CACHE_LIMIT);
+    conf.setLong(TTConfig.TT_LOCAL_CACHE_SIZE, LOCAL_CACHE_LIMIT);
     conf.setStrings(JobConf.MAPRED_LOCAL_DIR_PROPERTY, localDirs);
     conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "file:///");
 

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java Fri Sep 18 15:09:48 2009
@@ -84,10 +84,10 @@
     System.out.println("inputData:");
     System.out.println(inputData.toString());
 
-    conf.setInt("aggregator.descriptor.num", 1);
-    conf.set("aggregator.descriptor.0", 
+    conf.setInt(ValueAggregatorJobBase.DESCRIPTOR_NUM, 1);
+    conf.set(ValueAggregatorJobBase.DESCRIPTOR + ".0", 
       "UserDefined,org.apache.hadoop.mapreduce.lib.aggregate.AggregatorTests");
-    conf.setLong("aggregate.max.num.unique.values", 14);
+    conf.setLong(UniqValueCount.MAX_NUM_UNIQUE_VALUES, 14);
     
     Job job = new Job(conf);
     FileInputFormat.setInputPaths(job, INPUT_DIR);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java Fri Sep 18 15:09:48 2009
@@ -52,9 +52,10 @@
     StringBuffer expectedOutput = new StringBuffer();
     constructInputOutputData(inputData, expectedOutput, numOfInputLines);
     
-    conf.set("mapred.data.field.separator", "-");
-    conf.set("map.output.key.value.fields.spec", "6,5,1-3:0-");
-    conf.set("reduce.output.key.value.fields.spec", ":4,3,2,1,0,0-");
+    conf.set(FieldSelectionHelper.DATA_FIELD_SEPERATOR, "-");
+    conf.set(FieldSelectionHelper.MAP_OUTPUT_KEY_VALUE_SPEC, "6,5,1-3:0-");
+    conf.set(
+      FieldSelectionHelper.REDUCE_OUTPUT_KEY_VALUE_SPEC, ":4,3,2,1,0,0-");
     Job job = MapReduceTestUtil.createJob(conf, inDir, outDir,
       1, 1, inputData.toString());
     job.setMapperClass(FieldSelectionMapper.class);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java Fri Sep 18 15:09:48 2009
@@ -52,7 +52,7 @@
       new CompositeInputFormat<NullWritable>();
     // create dummy TaskAttemptID
     TaskAttemptID tid = new TaskAttemptID("jt", 1, TaskType.MAP, 0, 0);
-    conf.set("mapred.task.id", tid.toString());
+    conf.set(JobContext.TASK_ATTEMPT_ID, tid.toString());
     inputFormat.createRecordReader(inputFormat.getSplits(new Job(conf)).get(0), 
       new TaskAttemptContext(conf, tid));
   }

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java Fri Sep 18 15:09:48 2009
@@ -28,6 +28,7 @@
 import org.apache.hadoop.mapred.HadoopTestCase;
 import org.apache.hadoop.mapred.OutputLogFilter;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.Reducer;
 import org.apache.hadoop.mapreduce.lib.map.InverseMapper;
@@ -42,7 +43,7 @@
   public TestMRKeyFieldBasedComparator() throws IOException {
     super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
     conf = createJobConf();
-    conf.set("map.output.key.field.separator", " ");
+    conf.set(JobContext.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
   }
   
   private void testComparator(String keySpec, int expect) 
@@ -51,9 +52,9 @@
     Path inDir = new Path(root, "test_cmp/in");
     Path outDir = new Path(root, "test_cmp/out");
     
-    conf.set("mapred.text.key.comparator.options", keySpec);
-    conf.set("mapred.text.key.partitioner.options", "-k1.1,1.1");
-    conf.set("map.output.key.field.separator", " ");
+    conf.set("mapreduce.partition.keycomparator.options", keySpec);
+    conf.set("mapreduce.partition.keypartitioner.options", "-k1.1,1.1");
+    conf.set(JobContext.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
 
     Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 2,
                 line1 +"\n" + line2 + "\n"); 
@@ -119,7 +120,7 @@
   public void testWithoutMRJob(String keySpec, int expect) throws Exception {
     KeyFieldBasedComparator<Void, Void> keyFieldCmp = 
       new KeyFieldBasedComparator<Void, Void>();
-    conf.set("mapred.text.key.comparator.options", keySpec);
+    conf.set("mapreduce.partition.keycomparator.options", keySpec);
     keyFieldCmp.setConf(conf);
     int result = keyFieldCmp.compare(line1_bytes, 0, line1_bytes.length,
         line2_bytes, 0, line2_bytes.length);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java Fri Sep 18 15:09:48 2009
@@ -50,7 +50,7 @@
     // check if the hashcode is correct with specified keyspec
     kfbp = new KeyFieldBasedPartitioner<Text, Text>();
     conf = new Configuration();
-    conf.set("mapred.text.key.partitioner.options", "-k2,2");
+    conf.set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, "-k2,2");
     kfbp.setConf(conf);
     String expectedOutput = "def";
     byte[] eBytes = expectedOutput.getBytes();
@@ -62,7 +62,7 @@
     // test with invalid end index in keyspecs
     kfbp = new KeyFieldBasedPartitioner<Text, Text>();
     conf = new Configuration();
-    conf.set("mapred.text.key.partitioner.options", "-k2,5");
+    conf.set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, "-k2,5");
     kfbp.setConf(conf);
     expectedOutput = "def\txyz";
     eBytes = expectedOutput.getBytes();
@@ -74,7 +74,7 @@
     // test with 0 end index in keyspecs
     kfbp = new KeyFieldBasedPartitioner<Text, Text>();
     conf = new Configuration();
-    conf.set("mapred.text.key.partitioner.options", "-k2");
+    conf.set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, "-k2");
     kfbp.setConf(conf);
     expectedOutput = "def\txyz";
     eBytes = expectedOutput.getBytes();
@@ -86,7 +86,7 @@
     // test with invalid keyspecs
     kfbp = new KeyFieldBasedPartitioner<Text, Text>();
     conf = new Configuration();
-    conf.set("mapred.text.key.partitioner.options", "-k10");
+    conf.set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, "-k10");
     kfbp.setConf(conf);
     assertEquals("Partitioner doesnt work as expected", 0, 
                  kfbp.getPartition(new Text(input), new Text(), numReducers));
@@ -94,7 +94,7 @@
     // test with multiple keyspecs
     kfbp = new KeyFieldBasedPartitioner<Text, Text>();
     conf = new Configuration();
-    conf.set("mapred.text.key.partitioner.options", "-k2,2 -k4,4");
+    conf.set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, "-k2,2 -k4,4");
     kfbp.setConf(conf);
     input = "abc\tdef\tpqr\txyz";
     expectedOutput = "def";
@@ -110,7 +110,7 @@
     // test with invalid start index in keyspecs
     kfbp = new KeyFieldBasedPartitioner<Text, Text>();
     conf = new Configuration();
-    conf.set("mapred.text.key.partitioner.options", "-k2,2 -k30,21 -k4,4 -k5");
+    conf.set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, "-k2,2 -k30,21 -k4,4 -k5");
     kfbp.setConf(conf);
     expectedOutput = "def";
     eBytes = expectedOutput.getBytes();

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java Fri Sep 18 15:09:48 2009
@@ -34,6 +34,7 @@
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.WritableUtils;
+import org.apache.hadoop.mapreduce.JobContext;
 
 public class TestTotalOrderPartitioner extends TestCase {
 
@@ -82,7 +83,7 @@
                                  ).makeQualified(fs);
     Path p = new Path(testdir, testname + "/_partition.lst");
     TotalOrderPartitioner.setPartitionFile(conf, p);
-    conf.setInt("mapred.reduce.tasks", splits.length + 1);
+    conf.setInt(JobContext.NUM_REDUCES, splits.length + 1);
     SequenceFile.Writer w = null;
     try {
       w = SequenceFile.createWriter(fs, conf, p,
@@ -104,7 +105,7 @@
     Configuration conf = new Configuration();
     Path p = TestTotalOrderPartitioner.<Text>writePartitionFile(
         "totalordermemcmp", conf, splitStrings);
-    conf.setClass("mapred.mapoutput.key.class", Text.class, Object.class);
+    conf.setClass(JobContext.MAP_OUTPUT_KEY_CLASS, Text.class, Object.class);
     try {
       partitioner.setConf(conf);
       NullWritable nw = NullWritable.get();
@@ -123,8 +124,8 @@
     Configuration conf = new Configuration();
     Path p = TestTotalOrderPartitioner.<Text>writePartitionFile(
         "totalorderbinarysearch", conf, splitStrings);
-    conf.setBoolean("total.order.partitioner.natural.order", false);
-    conf.setClass("mapred.mapoutput.key.class", Text.class, Object.class);
+    conf.setBoolean(TotalOrderPartitioner.NATURAL_ORDER, false);
+    conf.setClass(JobContext.MAP_OUTPUT_KEY_CLASS, Text.class, Object.class);
     try {
       partitioner.setConf(conf);
       NullWritable nw = NullWritable.get();
@@ -157,9 +158,9 @@
     Arrays.sort(revSplitStrings, new ReverseStringComparator());
     Path p = TestTotalOrderPartitioner.<Text>writePartitionFile(
         "totalordercustomcomparator", conf, revSplitStrings);
-    conf.setBoolean("total.order.partitioner.natural.order", false);
-    conf.setClass("mapred.mapoutput.key.class", Text.class, Object.class);
-    conf.setClass("mapred.output.key.comparator.class",
+    conf.setBoolean(TotalOrderPartitioner.NATURAL_ORDER, false);
+    conf.setClass(JobContext.MAP_OUTPUT_KEY_CLASS, Text.class, Object.class);
+    conf.setClass(JobContext.KEY_COMPARATOR,
       ReverseStringComparator.class, RawComparator.class);
     ArrayList<Check<Text>> revCheck = new ArrayList<Check<Text>>();
     revCheck.add(new Check<Text>(new Text("aaaaa"), 9));

Modified: hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java (original)
+++ hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java Fri Sep 18 15:09:48 2009
@@ -56,6 +56,7 @@
 import org.apache.hadoop.mapred.SequenceFileRecordReader;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.lib.NullOutputFormat;
+import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -424,7 +425,7 @@
     conf.setReducerClass(HArchivesReducer.class);
     conf.setMapOutputKeyClass(IntWritable.class);
     conf.setMapOutputValueClass(Text.class);
-    conf.set("hadoop.job.history.user.location", "none");
+    conf.set(JobContext.HISTORY_LOCATION, "none");
     FileInputFormat.addInputPath(conf, jobDirectory);
     //make sure no speculative execution is done
     conf.setSpeculativeExecution(false);
@@ -459,7 +460,7 @@
       // this is tightly tied to map reduce
       // since it does not expose an api 
       // to get the partition
-      partId = conf.getInt("mapred.task.partition", -1);
+      partId = conf.getInt(JobContext.TASK_PARTITION, -1);
       // create a file name using the partition
       // we need to write to this directory
       tmpOutputDir = FileOutputFormat.getWorkOutputPath(conf);

Modified: hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/Logalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/Logalyzer.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/Logalyzer.java (original)
+++ hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/Logalyzer.java Fri Sep 18 15:09:48 2009
@@ -46,6 +46,7 @@
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.lib.LongSumReducer;
+import org.apache.hadoop.mapreduce.lib.map.RegexMapper;
 
 /**
  * Logalyzer: A utility tool for archiving and analyzing hadoop logs.
@@ -64,7 +65,17 @@
 public class Logalyzer {
   // Constants
   private static Configuration fsConfig = new Configuration();
+  public static String SORT_COLUMNS = 
+    "logalizer.logcomparator.sort.columns";
+  public static String COLUMN_SEPARATOR = 
+    "logalizer.logcomparator.column.separator";
   
+  static {
+    Configuration.addDeprecation("mapred.reducer.sort", 
+      new String[] {SORT_COLUMNS});
+    Configuration.addDeprecation("mapred.reducer.separator", 
+      new String[] {COLUMN_SEPARATOR});
+  }
   /** A {@link Mapper} that extracts text matching a regular expression. */
   public static class LogRegexMapper<K extends WritableComparable>
     extends MapReduceBase
@@ -73,7 +84,7 @@
     private Pattern pattern;
     
     public void configure(JobConf job) {
-      pattern = Pattern.compile(job.get("mapred.mapper.regex"));
+      pattern = Pattern.compile(job.get(RegexMapper.PATTERN));
     }
     
     public void map(K key, Text value,
@@ -105,13 +116,13 @@
       }
       
       //Initialize the specification for *comparision*
-      String sortColumns = this.conf.get("mapred.reducer.sort", null);
+      String sortColumns = this.conf.get(SORT_COLUMNS, null);
       if (sortColumns != null) {
         sortSpec = sortColumns.split(",");
       }
       
       //Column-separator
-      columnSeparator = this.conf.get("mapred.reducer.separator", "");
+      columnSeparator = this.conf.get(COLUMN_SEPARATOR, "");
     }
     
     public Configuration getConf() {
@@ -217,9 +228,9 @@
     grepJob.setInputFormat(TextInputFormat.class);
     
     grepJob.setMapperClass(LogRegexMapper.class);
-    grepJob.set("mapred.mapper.regex", grepPattern);
-    grepJob.set("mapred.reducer.sort", sortColumns);
-    grepJob.set("mapred.reducer.separator", columnSeparator);
+    grepJob.set(RegexMapper.PATTERN, grepPattern);
+    grepJob.set(SORT_COLUMNS, sortColumns);
+    grepJob.set(COLUMN_SEPARATOR, columnSeparator);
     
     grepJob.setCombinerClass(LongSumReducer.class);
     grepJob.setReducerClass(LongSumReducer.class);

Modified: hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/ParsedConfigFile.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/ParsedConfigFile.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/ParsedConfigFile.java (original)
+++ hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/rumen/ParsedConfigFile.java Fri Sep 18 15:09:48 2009
@@ -28,6 +28,8 @@
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.ParserConfigurationException;
 
+import org.apache.hadoop.mapreduce.MRConfig;
+import org.apache.hadoop.mapreduce.JobContext;
 import org.w3c.dom.Document;
 import org.w3c.dom.NodeList;
 import org.w3c.dom.Node;
@@ -153,21 +155,21 @@
           }
         }
 
-        if ("mapred.job.queue.name".equals(attr) && value != null) {
+        if (JobContext.QUEUE_NAME.equals(attr) && value != null) {
           queue = value;
         }
 
-        if ("mapred.job.name".equals(attr) && value != null) {
+        if (JobContext.JOB_NAME.equals(attr) && value != null) {
           jobName = value;
         }
 
-        clusterMapMB = maybeGetIntValue("mapred.cluster.map.memory.mb", attr,
+        clusterMapMB = maybeGetIntValue(MRConfig.MAPMEMORY_MB, attr,
             value, clusterMapMB);
-        clusterReduceMB = maybeGetIntValue("mapred.cluster.reduce.memory.mb",
+        clusterReduceMB = maybeGetIntValue(MRConfig.REDUCEMEMORY_MB,
             attr, value, clusterReduceMB);
-        jobMapMB = maybeGetIntValue("mapred.job.map.memory.mb", attr, value,
+        jobMapMB = maybeGetIntValue(JobContext.MAP_MEMORY_MB, attr, value,
             jobMapMB);
-        jobReduceMB = maybeGetIntValue("mapred.job.reduce.memory.mb", attr,
+        jobReduceMB = maybeGetIntValue(JobContext.REDUCE_MEMORY_MB, attr,
             value, jobReduceMB);
       }
 



Mime
View raw message