hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sha...@apache.org
Subject svn commit: r816664 [8/9] - in /hadoop/mapreduce/trunk: ./ conf/ src/benchmarks/gridmix/ src/benchmarks/gridmix/pipesort/ src/benchmarks/gridmix2/ src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/ src/c++/pipes/impl/ src/c++/task-controller...
Date Fri, 18 Sep 2009 15:10:02 GMT
Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/GenericMRLoadGenerator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/GenericMRLoadGenerator.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/GenericMRLoadGenerator.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/GenericMRLoadGenerator.java Fri Sep 18 15:09:48 2009
@@ -29,6 +29,7 @@
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.examples.RandomTextWriter;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -89,15 +90,18 @@
           job.setOutputValueClass(
             Class.forName(argv[++i]).asSubclass(Writable.class));
         } else if ("-keepmap".equals(argv[i])) {
-          job.set("hadoop.sort.map.keep.percent", argv[++i]);
+          job.set(org.apache.hadoop.mapreduce.
+           GenericMRLoadGenerator.MAP_PRESERVE_PERCENT, argv[++i]);
         } else if ("-keepred".equals(argv[i])) {
-          job.set("hadoop.sort.reduce.keep.percent", argv[++i]);
+          job.set(org.apache.hadoop.mapreduce.
+            GenericMRLoadGenerator.REDUCE_PRESERVE_PERCENT, argv[++i]);
         } else if ("-outdir".equals(argv[i])) {
           FileOutputFormat.setOutputPath(job, new Path(argv[++i]));
         } else if ("-indir".equals(argv[i])) {
           FileInputFormat.addInputPaths(job, argv[++i]);
         } else if ("-inFormatIndirect".equals(argv[i])) {
-          job.setClass("mapred.indirect.input.format",
+          job.setClass(org.apache.hadoop.mapreduce.
+              GenericMRLoadGenerator.INDIRECT_INPUT_FORMAT,
               Class.forName(argv[++i]).asSubclass(InputFormat.class),
               InputFormat.class);
           job.setInputFormat(IndirectInputFormat.class);
@@ -133,14 +137,18 @@
       // No input dir? Generate random data
       System.err.println("No input path; ignoring InputFormat");
       confRandom(job);
-    } else if (null != job.getClass("mapred.indirect.input.format", null)) {
+    } else if (null != job.getClass(
+       org.apache.hadoop.mapreduce.GenericMRLoadGenerator.INDIRECT_INPUT_FORMAT,
+       null)) {
       // specified IndirectInputFormat? Build src list
       JobClient jClient = new JobClient(job);  
       Path sysdir = jClient.getSystemDir();
       Random r = new Random();
       Path indirInputFile = new Path(sysdir,
           Integer.toString(r.nextInt(Integer.MAX_VALUE), 36) + "_files");
-      job.set("mapred.indirect.input.file", indirInputFile.toString());
+      job.set(
+        org.apache.hadoop.mapreduce.GenericMRLoadGenerator.INDIRECT_INPUT_FILE,
+        indirInputFile.toString());
       SequenceFile.Writer writer = SequenceFile.createWriter(
           sysdir.getFileSystem(job), job, indirInputFile,
           LongWritable.class, Text.class,
@@ -249,12 +257,12 @@
     }
 
     public void configure(JobConf job) {
-      bytesToWrite = job.getLong("test.randomtextwrite.bytes_per_map",
+      bytesToWrite = job.getLong(RandomTextWriter.BYTES_PER_MAP,
                                     1*1024*1024*1024);
-      keymin = job.getInt("test.randomtextwrite.min_words_key", 5);
-      keymax = job.getInt("test.randomtextwrite.max_words_key", 10);
-      valmin = job.getInt("test.randomtextwrite.min_words_value", 5);
-      valmax = job.getInt("test.randomtextwrite.max_words_value", 10);
+      keymin = job.getInt(RandomTextWriter.MIN_KEY, 5);
+      keymax = job.getInt(RandomTextWriter.MAX_KEY, 10);
+      valmin = job.getInt(RandomTextWriter.MIN_VALUE, 5);
+      valmax = job.getInt(RandomTextWriter.MAX_VALUE, 10);
     }
 
     public void map(Text key, Text val, OutputCollector<Text,Text> output,
@@ -291,19 +299,19 @@
     job.setMapperClass(RandomMapOutput.class);
 
     final ClusterStatus cluster = new JobClient(job).getClusterStatus();
-    int numMapsPerHost = job.getInt("test.randomtextwrite.maps_per_host", 10);
+    int numMapsPerHost = job.getInt(RandomTextWriter.MAPS_PER_HOST, 10);
     long numBytesToWritePerMap =
-      job.getLong("test.randomtextwrite.bytes_per_map", 1*1024*1024*1024);
+      job.getLong(RandomTextWriter.BYTES_PER_MAP, 1*1024*1024*1024);
     if (numBytesToWritePerMap == 0) {
       throw new IOException(
-          "Cannot have test.randomtextwrite.bytes_per_map set to 0");
+          "Cannot have " + RandomTextWriter.BYTES_PER_MAP + " set to 0");
     }
-    long totalBytesToWrite = job.getLong("test.randomtextwrite.total_bytes",
+    long totalBytesToWrite = job.getLong(RandomTextWriter.TOTAL_BYTES,
          numMapsPerHost * numBytesToWritePerMap * cluster.getTaskTrackers());
     int numMaps = (int)(totalBytesToWrite / numBytesToWritePerMap);
     if (numMaps == 0 && totalBytesToWrite > 0) {
       numMaps = 1;
-      job.setLong("test.randomtextwrite.bytes_per_map", totalBytesToWrite);
+      job.setLong(RandomTextWriter.BYTES_PER_MAP, totalBytesToWrite);
     }
     job.setNumMapTasks(numMaps);
   }
@@ -337,7 +345,9 @@
       extends SampleMapReduceBase<K,V> implements Mapper<K,V,K,V> {
 
     public void configure(JobConf job) {
-      setKeep(job.getFloat("hadoop.sort.map.keep.percent", (float)100.0) /
+      setKeep(job.getFloat(
+    	org.apache.hadoop.mapreduce.GenericMRLoadGenerator.MAP_PRESERVE_PERCENT,
+    	(float)100.0) /
         (float)100.0);
     }
 
@@ -353,7 +363,8 @@
       extends SampleMapReduceBase<K,V> implements Reducer<K,V,K,V> {
 
     public void configure(JobConf job) {
-      setKeep(job.getFloat("hadoop.sort.reduce.keep.percent", (float)100.0) /
+      setKeep(job.getFloat(org.apache.hadoop.mapreduce.
+        GenericMRLoadGenerator.REDUCE_PRESERVE_PERCENT, (float)100.0) /
         (float)100.0);
     }
 
@@ -401,7 +412,9 @@
     public InputSplit[] getSplits(JobConf job, int numSplits)
         throws IOException {
 
-      Path src = new Path(job.get("mapred.indirect.input.file", null));
+      Path src = new Path(job.get(
+        org.apache.hadoop.mapreduce.GenericMRLoadGenerator.INDIRECT_INPUT_FILE,
+        null));
       FileSystem fs = src.getFileSystem(job);
 
       ArrayList<IndirectSplit> splits = new ArrayList<IndirectSplit>(numSplits);
@@ -418,7 +431,8 @@
     public RecordReader getRecordReader(InputSplit split, JobConf job,
         Reporter reporter) throws IOException {
       InputFormat indirIF = (InputFormat)ReflectionUtils.newInstance(
-          job.getClass("mapred.indirect.input.format",
+          job.getClass(org.apache.hadoop.mapreduce.
+            GenericMRLoadGenerator.INDIRECT_INPUT_FORMAT, 
             SequenceFileInputFormat.class), job);
       IndirectSplit is = ((IndirectSplit)split);
       return indirIF.getRecordReader(new FileSplit(is.getPath(), 0,

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/MiniMRCluster.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/MiniMRCluster.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/MiniMRCluster.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/MiniMRCluster.java Fri Sep 18 15:09:48 2009
@@ -28,7 +28,10 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
+import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
 import org.apache.hadoop.net.DNSToSwitchMapping;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.net.NetworkTopology;
@@ -107,7 +110,7 @@
       try {
         jc = (jc == null) ? createJobConf() : createJobConf(jc);
         File f = new File("build/test/mapred/local").getAbsoluteFile();
-        jc.set("mapred.local.dir",f.getAbsolutePath());
+        jc.set(MRConfig.LOCAL_DIR, f.getAbsolutePath());
         jc.setClass("topology.node.switch.mapping.impl", 
             StaticMapping.class, DNSToSwitchMapping.class);
         String id = 
@@ -160,13 +163,13 @@
         conf = createJobConf(cfg);
       }
       if (hostname != null) {
-        conf.set("slave.host.name", hostname);
+        conf.set(TTConfig.TT_HOST_NAME, hostname);
       }
-      conf.set("mapred.task.tracker.http.address", "0.0.0.0:0");
-      conf.set("mapred.task.tracker.report.address", 
+      conf.set(TTConfig.TT_HTTP_ADDRESS, "0.0.0.0:0");
+      conf.set(TTConfig.TT_REPORT_ADDRESS, 
                 "127.0.0.1:" + taskTrackerPort);
       File localDirBase = 
-        new File(conf.get("mapred.local.dir")).getAbsoluteFile();
+        new File(conf.get(MRConfig.LOCAL_DIR)).getAbsoluteFile();
       localDirBase.mkdirs();
       StringBuffer localPath = new StringBuffer();
       for(int i=0; i < numDir; ++i) {
@@ -183,8 +186,8 @@
         }
         localPath.append(localDirs[i]);
       }
-      conf.set("mapred.local.dir", localPath.toString());
-      LOG.info("mapred.local.dir is " +  localPath);
+      conf.set(MRConfig.LOCAL_DIR, localPath.toString());
+      LOG.info(MRConfig.LOCAL_DIR + " is " +  localPath);
       try {
         tt = new TaskTracker(conf);
         isInitialized = true;
@@ -337,11 +340,11 @@
                                   UnixUserGroupInformation ugi) {
     JobConf result = new JobConf(conf);
     FileSystem.setDefaultUri(result, namenode);
-    result.set("mapred.job.tracker", "localhost:"+jobTrackerPort);
-    result.set("mapred.job.tracker.http.address", 
+    result.set(JTConfig.JT_IPC_ADDRESS, "localhost:"+jobTrackerPort);
+    result.set(JTConfig.JT_HTTP_ADDRESS, 
                         "127.0.0.1:" + jobTrackerInfoPort);
     if (ugi != null) {
-      result.set("mapred.system.dir", "/mapred/system");
+      result.set(JTConfig.JT_SYSTEM_DIR, "/mapred/system");
       UnixUserGroupInformation.saveToConf(result,
           UnixUserGroupInformation.UGI_PROPERTY_NAME, ugi);
     }

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/NotificationTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/NotificationTestCase.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/NotificationTestCase.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/NotificationTestCase.java Fri Sep 18 15:09:48 2009
@@ -140,8 +140,8 @@
   protected JobConf createJobConf() {
     JobConf conf = super.createJobConf();
     conf.setJobEndNotificationURI(getNotificationUrlTemplate());
-    conf.setInt("job.end.retry.attempts", 3);
-    conf.setInt("job.end.retry.interval", 200);
+    conf.setInt(JobContext.END_NOTIFICATION_RETRIES, 3);
+    conf.setInt(JobContext.END_NOTIFICATION_RETRIE_INTERVAL, 200);
     return conf;
   }
 

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/ReliabilityTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/ReliabilityTest.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/ReliabilityTest.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/ReliabilityTest.java Fri Sep 18 15:09:48 2009
@@ -35,6 +35,7 @@
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.StringUtils;
@@ -83,7 +84,7 @@
   
   public int run(String[] args) throws Exception {
     Configuration conf = getConf();
-    if ("local".equals(conf.get("mapred.job.tracker", "local"))) {
+    if ("local".equals(conf.get(JTConfig.JT_IPC_ADDRESS, "local"))) {
       displayUsage();
     }
     String[] otherArgs = 
@@ -103,8 +104,8 @@
     
     //to protect against the case of jobs failing even when multiple attempts
     //fail, set some high values for the max attempts
-    conf.setInt("mapred.map.max.attempts", 10);
-    conf.setInt("mapred.reduce.max.attempts", 10);
+    conf.setInt(JobContext.MAP_MAX_ATTEMPTS, 10);
+    conf.setInt(JobContext.REDUCE_MAX_ATTEMPTS, 10);
     runSleepJobTest(new JobClient(new JobConf(conf)), conf);
     runSortJobTests(new JobClient(new JobConf(conf)), conf);
     return 0;

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/SortValidator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/SortValidator.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/SortValidator.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/SortValidator.java Fri Sep 18 15:09:48 2009
@@ -33,6 +33,7 @@
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.mapred.lib.HashPartitioner;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.fs.*;
@@ -55,7 +56,11 @@
 
   static private final IntWritable sortInput = new IntWritable(1); 
   static private final IntWritable sortOutput = new IntWritable(2); 
-
+  static public String SORT_REDUCES = 
+    "mapreduce.sortvalidator.sort.reduce.tasks";
+  static public String MAPS_PER_HOST = "mapreduce.sortvalidator.mapsperhost";
+  static public String REDUCES_PER_HOST = 
+    "mapreduce.sortvalidator.reducesperhost";
   static void printUsage() {
     System.err.println("sortvalidate [-m <maps>] [-r <reduces>] [-deep] " +
                        "-sortInput <sort-input-dir> -sortOutput <sort-output-dir>");
@@ -64,7 +69,7 @@
 
   static private IntWritable deduceInputFile(JobConf job) {
     Path[] inputPaths = FileInputFormat.getInputPaths(job);
-    Path inputFile = new Path(job.get("map.input.file"));
+    Path inputFile = new Path(job.get(JobContext.MAP_INPUT_FILE));
 
     // value == one for sort-input; value == two for sort-output
     return (inputFile.getParent().equals(inputPaths[0])) ? 
@@ -208,12 +213,12 @@
           
           // Figure the 'current' partition and no. of reduces of the 'sort'
           try {
-            URI inputURI = new URI(job.get("map.input.file"));
+            URI inputURI = new URI(job.get(JobContext.MAP_INPUT_FILE));
             String inputFile = inputURI.getPath();
             // part file is of the form part-r-xxxxx
             partition = Integer.valueOf(inputFile.substring(
               inputFile.lastIndexOf("part") + 7)).intValue();
-            noSortReducers = job.getInt("sortvalidate.sort.reduce.tasks", -1);
+            noSortReducers = job.getInt(SORT_REDUCES, -1);
           } catch (Exception e) {
             System.err.println("Caught: " + e);
             System.exit(-1);
@@ -322,7 +327,7 @@
 
       int noSortReduceTasks = 
         outputfs.listStatus(sortOutput, sortPathsFilter).length;
-      jobConf.setInt("sortvalidate.sort.reduce.tasks", noSortReduceTasks);
+      jobConf.setInt(SORT_REDUCES, noSortReduceTasks);
       int noSortInputpaths =  inputfs.listStatus(sortInput).length;
 
       jobConf.setInputFormat(NonSplitableSequenceFileInputFormat.class);
@@ -347,7 +352,7 @@
       FileOutputFormat.setOutputPath(jobConf, outputPath);
       
       // Uncomment to run locally in a single process
-      //job_conf.set("mapred.job.tracker", "local");
+      //job_conf.set(JTConfig.JT, "local");
       Path[] inputPaths = FileInputFormat.getInputPaths(jobConf);
       System.out.println("\nSortValidator.RecordStatsChecker: Validate sort " +
                          "from " + inputPaths[0] + " (" + 
@@ -468,11 +473,11 @@
       ClusterStatus cluster = client.getClusterStatus();
       if (noMaps == -1) {
         noMaps = cluster.getTaskTrackers() * 
-          jobConf.getInt("test.sortvalidate.maps_per_host", 10);
+          jobConf.getInt(MAPS_PER_HOST, 10);
       }
       if (noReduces == -1) {
         noReduces = (int) (cluster.getMaxReduceTasks() * 0.9);
-        String sortReduces = jobConf.get("test.sortvalidate.reduces_per_host");
+        String sortReduces = jobConf.get(REDUCES_PER_HOST);
         if (sortReduces != null) {
            noReduces = cluster.getTaskTrackers() * 
                            Integer.parseInt(sortReduces);
@@ -491,7 +496,7 @@
       FileOutputFormat.setOutputPath(jobConf, outputPath);
       
       // Uncomment to run locally in a single process
-      //job_conf.set("mapred.job.tracker", "local");
+      //job_conf.set(JTConfig.JT, "local");
       Path[] inputPaths = FileInputFormat.getInputPaths(jobConf);
       System.out.println("\nSortValidator.RecordChecker: Running on " +
                          cluster.getTaskTrackers() +

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestBadRecords.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestBadRecords.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestBadRecords.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestBadRecords.java Fri Sep 18 15:09:48 2009
@@ -72,7 +72,7 @@
     conf.setJobName("mr");
     conf.setNumMapTasks(1);
     conf.setNumReduceTasks(1);
-    conf.setInt("mapred.task.timeout", 30*1000);
+    conf.setInt(JobContext.TASK_TIMEOUT, 30*1000);
     SkipBadRecords.setMapperMaxSkipRecords(conf, Long.MAX_VALUE);
     SkipBadRecords.setReducerMaxSkipGroups(conf, Long.MAX_VALUE);
     

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestCompressedEmptyMapOutputs.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestCompressedEmptyMapOutputs.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestCompressedEmptyMapOutputs.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestCompressedEmptyMapOutputs.java Fri Sep 18 15:09:48 2009
@@ -49,8 +49,8 @@
   throws Exception {
     // Scale down the default settings for RandomWriter for the test-case
     // Generates NUM_HADOOP_SLAVES * RW_MAPS_PER_HOST * RW_BYTES_PER_MAP -> 1MB
-    job.setInt("test.randomwrite.bytes_per_map", RW_BYTES_PER_MAP);
-    job.setInt("test.randomwriter.maps_per_host", RW_MAPS_PER_HOST);
+    job.setInt(RandomWriter.BYTES_PER_MAP, RW_BYTES_PER_MAP);
+    job.setInt(RandomWriter.MAPS_PER_HOST, RW_MAPS_PER_HOST);
     String[] rwArgs = {sortInput.toString()};
     
     // Run RandomWriter

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestControlledMapReduceJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestControlledMapReduceJob.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestControlledMapReduceJob.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestControlledMapReduceJob.java Fri Sep 18 15:09:48 2009
@@ -23,6 +23,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapred.ControlledMapReduceJob.ControlledMapReduceJobRunner;
+import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
 
 /**
  * Test to verify the controlled behavior of a ControlledMapReduceJob.
@@ -42,8 +43,8 @@
       throws Exception {
 
     Properties props = new Properties();
-    props.setProperty("mapred.tasktracker.map.tasks.maximum", "2");
-    props.setProperty("mapred.tasktracker.reduce.tasks.maximum", "2");
+    props.setProperty(TTConfig.TT_MAP_SLOTS, "2");
+    props.setProperty(TTConfig.TT_REDUCE_SLOTS, "2");
     startCluster(true, props);
     LOG.info("Started the cluster");
 

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestEmptyJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestEmptyJob.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestEmptyJob.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestEmptyJob.java Fri Sep 18 15:09:48 2009
@@ -34,6 +34,8 @@
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
+import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
 
 /**
  * A JUnit test to test Map-Reduce empty jobs.
@@ -221,10 +223,10 @@
       JobConf conf = new JobConf();
       fileSys = FileSystem.get(conf);
 
-      conf.set("mapred.job.tracker.handler.count", "1");
-      conf.set("mapred.job.tracker", "127.0.0.1:0");
-      conf.set("mapred.job.tracker.http.address", "127.0.0.1:0");
-      conf.set("mapred.task.tracker.http.address", "127.0.0.1:0");
+      conf.set(JTConfig.JT_IPC_HANDLER_COUNT, "1");
+      conf.set(JTConfig.JT_IPC_ADDRESS, "127.0.0.1:0");
+      conf.set(JTConfig.JT_HTTP_ADDRESS, "127.0.0.1:0");
+      conf.set(TTConfig.TT_HTTP_ADDRESS, "127.0.0.1:0");
 
       mr =
           new MiniMRCluster(taskTrackers, fileSys.getUri().toString(), 1,

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestFieldSelection.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestFieldSelection.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestFieldSelection.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestFieldSelection.java Fri Sep 18 15:09:48 2009
@@ -72,9 +72,9 @@
     job.setOutputFormat(TextOutputFormat.class);
     job.setNumReduceTasks(1);
 
-    job.set("mapred.data.field.separator", "-");
-    job.set("map.output.key.value.fields.spec", "6,5,1-3:0-");
-    job.set("reduce.output.key.value.fields.spec", ":4,3,2,1,0,0-");
+    job.set("mapreduce.fieldsel.data.field.separator", "-");
+    job.set("mapreduce.fieldsel.mapreduce.fieldsel.map.output.key.value.fields.spec", "6,5,1-3:0-");
+    job.set("mapreduce.fieldsel.mapreduce.fieldsel.reduce.output.key.value.fields.spec", ":4,3,2,1,0,0-");
 
     JobClient.runJob(job);
 

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestFileOutputCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestFileOutputCommitter.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestFileOutputCommitter.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestFileOutputCommitter.java Fri Sep 18 15:09:48 2009
@@ -35,7 +35,7 @@
   @SuppressWarnings("unchecked")
   public void testCommitter() throws Exception {
     JobConf job = new JobConf();
-    job.set("mapred.task.id", attempt);
+    job.set(JobContext.TASK_ATTEMPT_ID, attempt);
     job.setOutputCommitter(FileOutputCommitter.class);
     FileOutputFormat.setOutputPath(job, outDir);
     JobContext jContext = new JobContext(job, taskID.getJobID());

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestIndexCache.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestIndexCache.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestIndexCache.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestIndexCache.java Fri Sep 18 15:09:48 2009
@@ -29,6 +29,7 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
 
 import junit.framework.TestCase;
 
@@ -44,7 +45,7 @@
     Path p = new Path(System.getProperty("test.build.data", "/tmp"),
         "cache").makeQualified(fs);
     fs.delete(p, true);
-    conf.setInt("mapred.tasktracker.indexcache.mb", 1);
+    conf.setInt(TTConfig.TT_INDEX_CACHE, 1);
     final int partsPerMap = 1000;
     final int bytesPerFile = partsPerMap * 24;
     IndexCache cache = new IndexCache(conf);
@@ -111,7 +112,7 @@
     Path p = new Path(System.getProperty("test.build.data", "/tmp"),
         "cache").makeQualified(fs);
     fs.delete(p, true);
-    conf.setInt("mapred.tasktracker.indexcache.mb", 1);
+    conf.setInt(TTConfig.TT_INDEX_CACHE, 1);
     IndexCache cache = new IndexCache(conf);
 
     Path f = new Path(p, "badindex");

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestIsolationRunner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestIsolationRunner.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestIsolationRunner.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestIsolationRunner.java Fri Sep 18 15:09:48 2009
@@ -34,6 +34,7 @@
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
+import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.security.UserGroupInformation;
 
@@ -111,7 +112,7 @@
       LoginException {
     String taskid =
         new TaskAttemptID(new TaskID(jobId, taskType, 0), 0).toString();
-    return new LocalDirAllocator("mapred.local.dir").getLocalPathToRead(
+    return new LocalDirAllocator(MRConfig.LOCAL_DIR).getLocalPathToRead(
         TaskTracker.getTaskConfFile(UserGroupInformation.login(conf)
             .getUserName(), jobId.toString(), taskid, false), conf);
   }

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobDirCleanup.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobDirCleanup.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobDirCleanup.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobDirCleanup.java Fri Sep 18 15:09:48 2009
@@ -27,6 +27,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
 import org.apache.hadoop.mapreduce.SleepJob;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -51,7 +52,7 @@
       final int taskTrackers = 10;
       Configuration conf = new Configuration();
       JobConf mrConf = new JobConf();
-      mrConf.set("mapred.tasktracker.reduce.tasks.maximum", "1");
+      mrConf.set(TTConfig.TT_REDUCE_SLOTS, "1");
       dfs = new MiniDFSCluster(conf, 1, true, null);
       fileSys = dfs.getFileSystem();
       namenode = fileSys.getUri().toString();

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobExecutionAsDifferentUser.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobExecutionAsDifferentUser.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobExecutionAsDifferentUser.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobExecutionAsDifferentUser.java Fri Sep 18 15:09:48 2009
@@ -64,7 +64,7 @@
 
     // Run a job with jvm reuse
     JobConf myConf = getClusterConf();
-    myConf.set("mapred.job.reuse.jvm.num.tasks", "-1");
+    myConf.set(JobContext.JVM_NUMTASKS_TORUN, "-1");
     String[] args = { "-m", "6", "-r", "3", "-mt", "1000", "-rt", "1000" };
     assertEquals(0, ToolRunner.run(myConf, new SleepJob(), args));
   }

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistory.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistory.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistory.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobHistory.java Fri Sep 18 15:09:48 2009
@@ -43,6 +43,7 @@
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.TaskID;
 import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistory;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
@@ -588,7 +589,7 @@
 
       //set the done folder location
       String doneFolder = "history_done";
-      conf.set("mapred.job.tracker.history.completed.location", doneFolder);
+      conf.set(JTConfig.JT_JOBHISTORY_COMPLETED_LOCATION, doneFolder);
 
       String logDir =
         "file:///" + new File(System.getProperty("hadoop.log.dir")).
@@ -704,7 +705,7 @@
 
       //set the done folder location
       String doneFolder = TEST_ROOT_DIR + "history_done";
-      conf.set("mapred.job.tracker.history.completed.location", doneFolder);
+      conf.set(JTConfig.JT_JOBHISTORY_COMPLETED_LOCATION, doneFolder);
       
       mr = new MiniMRCluster(2, "file:///", 3, null, null, conf);
 

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobInProgress.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobInProgress.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobInProgress.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobInProgress.java Fri Sep 18 15:09:48 2009
@@ -42,6 +42,7 @@
 import org.apache.hadoop.mapred.UtilsForTests.FakeClock;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobCounter;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.mapreduce.Job.RawSplit;
 import org.apache.hadoop.net.DNSToSwitchMapping;
 import org.apache.hadoop.net.Node;
@@ -76,8 +77,8 @@
     TestSetup setup = new TestSetup(new TestSuite(TestJobInProgress.class)) {
       protected void setUp() throws Exception {
         JobConf conf = new JobConf();
-        conf.set("mapred.job.tracker", "localhost:0");
-        conf.set("mapred.job.tracker.http.address", "0.0.0.0:0");
+        conf.set(JTConfig.JT_IPC_ADDRESS, "localhost:0");
+        conf.set(JTConfig.JT_HTTP_ADDRESS, "0.0.0.0:0");
         conf.setClass("topology.node.switch.mapping.impl", 
             StaticMapping.class, DNSToSwitchMapping.class);
         jobTracker = new FakeJobTracker(conf, new FakeClock(), trackers);
@@ -166,7 +167,7 @@
     conf.setNumReduceTasks(numReds);
     conf.setSpeculativeExecution(false);
     conf.setBoolean(
-        "mapred.committer.job.setup.cleanup.needed", false);
+        JobContext.SETUP_CLEANUP_NEEDED, false);
     MyFakeJobInProgress job1 = new MyFakeJobInProgress(conf, jobTracker);
     job1.initTasks();
 

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobInProgressListener.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobInProgressListener.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobInProgressListener.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobInProgressListener.java Fri Sep 18 15:09:48 2009
@@ -30,6 +30,7 @@
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.TestNoJobSetupCleanup;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
@@ -59,7 +60,7 @@
       @Override
       protected void setUp() throws Exception {
         conf = new JobConf();   
-        conf.setClass("mapred.jobtracker.taskScheduler", MyScheduler.class,
+        conf.setClass(JTConfig.JT_TASK_SCHEDULER, MyScheduler.class,
                       TaskScheduler.class);
         mr = new MiniMRCluster(1, "file:///", 1, null, null, conf);
         jobtracker = mr.getJobTrackerRunner().getJobTracker();

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobQueueInformation.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobQueueInformation.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobQueueInformation.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobQueueInformation.java Fri Sep 18 15:09:48 2009
@@ -18,23 +18,14 @@
 package org.apache.hadoop.mapred;
 
 import java.io.IOException;
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
 import java.util.Collection;
 import java.util.List;
 
-import javax.security.auth.login.LoginException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.UnixUserGroupInformation;
-import org.apache.hadoop.mapreduce.SleepJob;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker;
 
 import junit.framework.TestCase;
@@ -88,9 +79,9 @@
     dfsCluster = new MiniDFSCluster(conf, 4, true, null);
 
     jc = new JobConf();
-    jc.setClass("mapred.jobtracker.taskScheduler", TestTaskScheduler.class,
+    jc.setClass(JTConfig.JT_TASK_SCHEDULER, TestTaskScheduler.class,
         TaskScheduler.class);
-    jc.setLong("mapred.jobtracker.taskScheduler.maxRunningTasksPerJob", 10L);
+    jc.setLong(JTConfig.JT_RUNNINGTASKS_PER_JOB, 10L);
     mrCluster = new MiniMRCluster(0, 0, taskTrackers, dfsCluster
         .getFileSystem().getUri().toString(), 1, null, null, null, jc);
   }

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobRetire.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobRetire.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobRetire.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobRetire.java Fri Sep 18 15:09:48 2009
@@ -24,7 +24,7 @@
 import junit.framework.TestCase;
 
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.jobhistory.JobHistory;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 /**
  * Test if the job retire works fine. 
@@ -39,8 +39,8 @@
     try {
       JobConf conf = new JobConf();
 
-      conf.setBoolean("mapred.job.tracker.retire.jobs", true);
-      conf.setLong("mapred.job.tracker.retiredjobs.cache.size", 1);
+      conf.setBoolean(JTConfig.JT_RETIREJOBS, true);
+      conf.setLong(JTConfig.JT_RETIREJOB_CACHE_SIZE, 1);
       mr = new MiniMRCluster(0, 0, 1, "file:///", 1, null, null, null, conf, 0);
       JobConf jobConf = mr.createJobConf();
       JobTracker jobtracker = mr.getJobTrackerRunner().getJobTracker();

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobStatusPersistency.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobStatusPersistency.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobStatusPersistency.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobStatusPersistency.java Fri Sep 18 15:09:48 2009
@@ -26,6 +26,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 public class TestJobStatusPersistency extends ClusterMapReduceTestCase {
   static final Path TEST_DIR = 
@@ -77,8 +78,8 @@
 
   public void testPersistency() throws Exception {
     Properties config = new Properties();
-    config.setProperty("mapred.job.tracker.persist.jobstatus.active", "true");
-    config.setProperty("mapred.job.tracker.persist.jobstatus.hours", "1");
+    config.setProperty(JTConfig.JT_PERSIST_JOBSTATUS, "true");
+    config.setProperty(JTConfig.JT_PERSIST_JOBSTATUS_HOURS, "1");
     stopCluster();
     startCluster(false, config);
     JobID jobId = runJob();
@@ -117,9 +118,9 @@
     fs.delete(TEST_DIR, true);
     
     Properties config = new Properties();
-    config.setProperty("mapred.job.tracker.persist.jobstatus.active", "true");
-    config.setProperty("mapred.job.tracker.persist.jobstatus.hours", "1");
-    config.setProperty("mapred.job.tracker.persist.jobstatus.dir", 
+    config.setProperty(JTConfig.JT_PERSIST_JOBSTATUS, "true");
+    config.setProperty(JTConfig.JT_PERSIST_JOBSTATUS_HOURS, "1");
+    config.setProperty(JTConfig.JT_PERSIST_JOBSTATUS_DIR, 
                        fs.makeQualified(TEST_DIR).toString());
     stopCluster();
     startCluster(false, config);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobSysDirWithDFS.java Fri Sep 18 15:09:48 2009
@@ -30,6 +30,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 /**
  * A JUnit test to test Job System Directory with Mini-DFS.
@@ -82,11 +83,12 @@
     FileOutputFormat.setOutputPath(conf, outDir);
     conf.setNumMapTasks(numMaps);
     conf.setNumReduceTasks(numReduces);
-    conf.set("mapred.system.dir", "/tmp/subru/mapred/system");
+    conf.set(JTConfig.JT_SYSTEM_DIR, "/tmp/subru/mapred/system");
     JobClient jobClient = new JobClient(conf);
     RunningJob job = jobClient.runJob(conf);
     // Checking that the Job Client system dir is not used
-    assertFalse(FileSystem.get(conf).exists(new Path(conf.get("mapred.system.dir")))); 
+    assertFalse(FileSystem.get(conf).exists(
+      new Path(conf.get(JTConfig.JT_SYSTEM_DIR)))); 
     // Check if the Job Tracker system dir is propogated to client
     String sysDir = jobClient.getSystemDir().toString();
     System.out.println("Job sys dir -->" + sysDir);
@@ -121,7 +123,7 @@
       final int taskTrackers = 4;
 
       JobConf conf = new JobConf();
-      conf.set("mapred.system.dir", "/tmp/custom/mapred/system");
+      conf.set(JTConfig.JT_SYSTEM_DIR, "/tmp/custom/mapred/system");
       dfs = new MiniDFSCluster(conf, 4, true, null);
       fileSys = dfs.getFileSystem();
       mr = new MiniMRCluster(taskTrackers, fileSys.getUri().toString(), 1, null, null, conf);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillCompletedJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillCompletedJob.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillCompletedJob.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillCompletedJob.java Fri Sep 18 15:09:48 2009
@@ -24,6 +24,7 @@
 import org.apache.hadoop.mapred.FakeObjectUtilities.FakeJobTracker;
 import org.apache.hadoop.mapred.TestRackAwareTaskPlacement.MyFakeJobInProgress;
 import org.apache.hadoop.mapred.UtilsForTests.FakeClock;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 /**
  * A JUnit test to test that killing completed jobs does not move them
@@ -41,9 +42,9 @@
   @Override
   protected void setUp() throws Exception {
     JobConf conf = new JobConf();
-    conf.set("mapred.job.tracker", "localhost:0");
-    conf.set("mapred.job.tracker.http.address", "0.0.0.0:0");
-    conf.setLong("mapred.tasktracker.expiry.interval", 1000);
+    conf.set(JTConfig.JT_IPC_ADDRESS, "localhost:0");
+    conf.set(JTConfig.JT_HTTP_ADDRESS, "0.0.0.0:0");
+    conf.setLong(JTConfig.JT_TRACKER_EXPIRY_INTERVAL, 1000);
     jobTracker = new FakeJobTracker(conf, (clock = new FakeClock()), trackers);
   }
 

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLazyOutput.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLazyOutput.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLazyOutput.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLazyOutput.java Fri Sep 18 15:09:48 2009
@@ -56,7 +56,7 @@
     private String id;
 
     public void configure(JobConf job) {
-      id = job.get("mapred.task.id");
+      id = job.get(JobContext.TASK_ATTEMPT_ID);
     }
 
     public void map(LongWritable key, Text val,
@@ -74,7 +74,7 @@
     private String id;
 
     public void configure(JobConf job) {
-      id = job.get("mapred.task.id");
+      id = job.get(JobContext.TASK_ATTEMPT_ID);
     }
 
     /** Writes all keys and values directly to output. */

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLimitTasksPerJobTaskScheduler.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLimitTasksPerJobTaskScheduler.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLimitTasksPerJobTaskScheduler.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLimitTasksPerJobTaskScheduler.java Fri Sep 18 15:09:48 2009
@@ -22,6 +22,7 @@
 import junit.framework.TestCase;
 
 import org.apache.hadoop.mapred.TestJobQueueTaskScheduler.FakeTaskTrackerManager;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 public class TestLimitTasksPerJobTaskScheduler extends TestCase {
   protected JobConf jobConf;
@@ -53,7 +54,7 @@
   }
 
   public void testMaxRunningTasksPerJob() throws IOException {
-    jobConf.setLong(LimitTasksPerJobTaskScheduler.MAX_TASKS_PER_JOB_PROPERTY,
+    jobConf.setLong(JTConfig.JT_RUNNINGTASKS_PER_JOB,
         4L);
     scheduler.setConf(jobConf);
     TestJobQueueTaskScheduler.submitJobs(taskTrackerManager, jobConf, 
@@ -88,7 +89,7 @@
   
   public void testMaxRunningTasksPerJobWithInterleavedTrackers()
       throws IOException {
-    jobConf.setLong(LimitTasksPerJobTaskScheduler.MAX_TASKS_PER_JOB_PROPERTY,
+    jobConf.setLong(JTConfig.JT_RUNNINGTASKS_PER_JOB,
         4L);
     scheduler.setConf(jobConf);
     TestJobQueueTaskScheduler.submitJobs(taskTrackerManager, jobConf, 2, JobStatus.RUNNING);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLocalizationWithLinuxTaskController.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLocalizationWithLinuxTaskController.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLocalizationWithLinuxTaskController.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLocalizationWithLinuxTaskController.java Fri Sep 18 15:09:48 2009
@@ -27,6 +27,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.ClusterWithLinuxTaskController.MyLinuxTaskController;
 import org.apache.hadoop.mapreduce.server.tasktracker.Localizer;
+import org.apache.hadoop.mapreduce.MRConfig;
 
 /**
  * Test to verify localization of a job and localization of a task on a
@@ -117,7 +118,7 @@
     for (String dir : localDirs) {
 
       File localDir = new File(dir);
-      assertTrue("mapred.local.dir " + localDir + " isn'task created!",
+      assertTrue(MRConfig.LOCAL_DIR + localDir + " isn'task created!",
           localDir.exists());
 
       File taskTrackerSubDir = new File(localDir, TaskTracker.SUBDIR);
@@ -165,7 +166,7 @@
   @Override
   protected void checkJobLocalization()
       throws IOException {
-    for (String localDir : trackerFConf.getStrings("mapred.local.dir")) {
+    for (String localDir : trackerFConf.getStrings(MRConfig.LOCAL_DIR)) {
       File jobDir =
           new File(localDir, TaskTracker.getLocalJobDir(task.getUser(), jobId
               .toString()));

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLostTracker.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLostTracker.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLostTracker.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestLostTracker.java Fri Sep 18 15:09:48 2009
@@ -24,6 +24,7 @@
 import org.apache.hadoop.mapred.FakeObjectUtilities.FakeJobInProgress;
 import org.apache.hadoop.mapred.FakeObjectUtilities.FakeJobTracker;
 import org.apache.hadoop.mapred.UtilsForTests.FakeClock;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 /**
  * A test to verify JobTracker's resilience to lost task trackers. 
@@ -43,9 +44,9 @@
   @Override
   protected void setUp() throws Exception {
     JobConf conf = new JobConf();
-    conf.set("mapred.job.tracker", "localhost:0");
-    conf.set("mapred.job.tracker.http.address", "0.0.0.0:0");
-    conf.setLong("mapred.tasktracker.expiry.interval", 1000);
+    conf.set(JTConfig.JT_IPC_ADDRESS, "localhost:0");
+    conf.set(JTConfig.JT_HTTP_ADDRESS, "0.0.0.0:0");
+    conf.setLong(JTConfig.JT_TRACKER_EXPIRY_INTERVAL, 1000);
     jobTracker = new FakeJobTracker(conf, (clock = new FakeClock()), trackers);
     jobTracker.startExpireTrackersThread();
   }

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMRServerPorts.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMRServerPorts.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMRServerPorts.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMRServerPorts.java Fri Sep 18 15:09:48 2009
@@ -25,6 +25,8 @@
 import org.apache.hadoop.hdfs.TestHDFSServerPorts;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
+import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 
@@ -61,15 +63,15 @@
    */
   private JobTracker startJobTracker(JobConf conf, JTRunner runner) 
   throws IOException, LoginException {
-    conf.set("mapred.job.tracker", "localhost:0");
-    conf.set("mapred.job.tracker.http.address", "0.0.0.0:0");
+    conf.set(JTConfig.JT_IPC_ADDRESS, "localhost:0");
+    conf.set(JTConfig.JT_HTTP_ADDRESS, "0.0.0.0:0");
     JobTracker jt = null;
     try {
       jt = JobTracker.startTracker(conf);
       runner.setJobTracker(jt);
       runner.start();
-      conf.set("mapred.job.tracker", "localhost:" + jt.getTrackerPort());
-      conf.set("mapred.job.tracker.http.address", 
+      conf.set(JTConfig.JT_IPC_ADDRESS, "localhost:" + jt.getTrackerPort());
+      conf.set(JTConfig.JT_HTTP_ADDRESS, 
                             "0.0.0.0:" + jt.getInfoPort());
     } catch(InterruptedException e) {
       throw new IOException(e.getLocalizedMessage());
@@ -134,23 +136,23 @@
 
       // start job tracker on the same port as name-node
       JobConf conf2 = new JobConf(hdfs.getConfig());
-      conf2.set("mapred.job.tracker",
+      conf2.set(JTConfig.JT_IPC_ADDRESS,
                 FileSystem.getDefaultUri(hdfs.getConfig()).toString());
-      conf2.set("mapred.job.tracker.http.address",
+      conf2.set(JTConfig.JT_HTTP_ADDRESS,
         TestHDFSServerPorts.NAME_NODE_HTTP_HOST + 0);
       boolean started = canStartJobTracker(conf2);
       assertFalse(started); // should fail
 
       // bind http server to the same port as name-node
-      conf2.set("mapred.job.tracker", TestHDFSServerPorts.NAME_NODE_HOST + 0);
-      conf2.set("mapred.job.tracker.http.address",
+      conf2.set(JTConfig.JT_IPC_ADDRESS, TestHDFSServerPorts.NAME_NODE_HOST + 0);
+      conf2.set(JTConfig.JT_HTTP_ADDRESS,
         hdfs.getConfig().get("dfs.http.address"));
       started = canStartJobTracker(conf2);
       assertFalse(started); // should fail again
 
       // both ports are different from the name-node ones
-      conf2.set("mapred.job.tracker", TestHDFSServerPorts.NAME_NODE_HOST + 0);
-      conf2.set("mapred.job.tracker.http.address",
+      conf2.set(JTConfig.JT_IPC_ADDRESS, TestHDFSServerPorts.NAME_NODE_HOST + 0);
+      conf2.set(JTConfig.JT_HTTP_ADDRESS,
         TestHDFSServerPorts.NAME_NODE_HTTP_HOST + 0);
       started = canStartJobTracker(conf2);
       assertTrue(started); // should start now
@@ -179,25 +181,25 @@
       jt = startJobTracker(conf2, runner);
 
       // start job tracker on the same port as name-node
-      conf2.set("mapred.task.tracker.report.address",
+      conf2.set(TTConfig.TT_REPORT_ADDRESS,
                 FileSystem.getDefaultUri(hdfs.getConfig()).toString());
-      conf2.set("mapred.task.tracker.http.address",
+      conf2.set(TTConfig.TT_HTTP_ADDRESS,
         TestHDFSServerPorts.NAME_NODE_HTTP_HOST + 0);
       boolean started = canStartTaskTracker(conf2);
       assertFalse(started); // should fail
 
       // bind http server to the same port as name-node
-      conf2.set("mapred.task.tracker.report.address",
+      conf2.set(TTConfig.TT_REPORT_ADDRESS,
         TestHDFSServerPorts.NAME_NODE_HOST + 0);
-      conf2.set("mapred.task.tracker.http.address",
+      conf2.set(TTConfig.TT_HTTP_ADDRESS,
         hdfs.getConfig().get("dfs.http.address"));
       started = canStartTaskTracker(conf2);
       assertFalse(started); // should fail again
 
       // both ports are different from the name-node ones
-      conf2.set("mapred.task.tracker.report.address",
+      conf2.set(TTConfig.TT_REPORT_ADDRESS,
         TestHDFSServerPorts.NAME_NODE_HOST + 0);
-      conf2.set("mapred.task.tracker.http.address",
+      conf2.set(TTConfig.TT_HTTP_ADDRESS,
         TestHDFSServerPorts.NAME_NODE_HTTP_HOST + 0);
       started = canStartTaskTracker(conf2);
       assertTrue(started); // should start now

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMRWithDistributedCache.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMRWithDistributedCache.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMRWithDistributedCache.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMRWithDistributedCache.java Fri Sep 18 15:09:48 2009
@@ -42,6 +42,7 @@
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 /**
  * Tests the use of the
@@ -112,7 +113,7 @@
       // (The symlinks exist in "localRunner/" for local Jobtrackers,
       // but the user has no way to get at them.
       if (!"local".equals(
-          context.getConfiguration().get("mapred.job.tracker"))) {
+          context.getConfiguration().get(JTConfig.JT_IPC_ADDRESS))) {
         File symlinkFile = new File("distributed.first.symlink");
         TestCase.assertTrue(symlinkFile.exists());
         TestCase.assertEquals(1, symlinkFile.length());
@@ -153,7 +154,7 @@
   /** Tests using the local job runner. */
   public void testLocalJobRunner() throws Exception {
     Configuration c = new Configuration();
-    c.set("mapred.job.tracker", "local");
+    c.set(JTConfig.JT_IPC_ADDRESS, "local");
     c.set("fs.default.name", "file:///");
     testWithConf(c);
   }

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapCollection.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapCollection.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapCollection.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapCollection.java Fri Sep 18 15:09:48 2009
@@ -246,9 +246,9 @@
       boolean pedantic) throws Exception {
     JobConf conf = new JobConf(new Configuration(), SpillMapper.class);
 
-    conf.setInt("io.sort.mb", ioSortMB);
-    conf.set("io.sort.record.percent", Float.toString(recPer));
-    conf.set("io.sort.spill.percent", Float.toString(spillPer));
+    conf.setInt(JobContext.IO_SORT_MB, ioSortMB);
+    conf.set(JobContext.MAP_SORT_RECORD_PERCENT, Float.toString(recPer));
+    conf.set(JobContext.MAP_SORT_SPILL_PERCENT, Float.toString(spillPer));
 
     conf.setInt("test.keywritable.length", keylen);
     conf.setInt("test.valwritable.length", vallen);
@@ -280,7 +280,7 @@
   }
 
   public void testLargeRecords() throws Exception {
-    // maps emitting records larger than io.sort.mb
+    // maps emitting records larger than mapreduce.task.io.sort.mb
     runTest("largerec", 100, 1024*1024, 5, false);
     runTest("largekeyzeroval", 1024*1024, 0, 5, false);
   }

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapOutputType.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapOutputType.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapOutputType.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapOutputType.java Fri Sep 18 15:09:48 2009
@@ -81,7 +81,7 @@
     Path outDir = new Path(testdir, "out");
     FileSystem fs = FileSystem.get(conf);
     fs.delete(testdir, true);
-    conf.setInt("io.sort.mb", 1);
+    conf.setInt(JobContext.IO_SORT_MB, 1);
     conf.setInputFormat(SequenceFileInputFormat.class);
     FileInputFormat.setInputPaths(conf, inDir);
     FileOutputFormat.setOutputPath(conf, outDir);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapProgress.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapProgress.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapProgress.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapProgress.java Fri Sep 18 15:09:48 2009
@@ -29,8 +29,8 @@
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.OutputFormat;
-import org.apache.hadoop.mapreduce.Job.RawSplit;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 /**
  *  Validates map phase progress.
@@ -134,9 +134,10 @@
     job.setNumReduceTasks(0);
     TaskAttemptID taskId = TaskAttemptID.forName(
                                   "attempt_200907082313_0424_m_000000_0");
-    job.setClass("mapreduce.outputformat.class",
+    job.setClass("mapreduce.job.outputformat.class",
                  NullOutputFormat.class, OutputFormat.class);
-    job.set("mapred.input.dir", TEST_ROOT_DIR);
+    job.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR,
+            TEST_ROOT_DIR);
     jobId = taskId.getJobID();
     
     JobContext jContext = new JobContext(job, jobId);
@@ -145,7 +146,7 @@
     job.setUseNewMapper(true); // use new api
     for (int i = 0; i < rawSplits.length; i++) {// rawSplits.length is 1
       map = new TestMapTask(
-          job.get("mapred.system.dir", "/tmp/hadoop/mapred/system") +
+          job.get(JTConfig.JT_SYSTEM_DIR, "/tmp/hadoop/mapred/system") +
           jobId + "job.xml",  
           taskId, i,
           rawSplits[i].getClassName(),

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapRed.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapRed.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapRed.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapRed.java Fri Sep 18 15:09:48 2009
@@ -773,7 +773,7 @@
       Path outDir = new Path(testdir, "out");
       FileSystem fs = FileSystem.get(conf);
       fs.delete(testdir, true);
-      conf.setInt("io.sort.mb", 1);
+      conf.setInt(JobContext.IO_SORT_MB, 1);
       conf.setInputFormat(SequenceFileInputFormat.class);
       FileInputFormat.setInputPaths(conf, inDir);
       FileOutputFormat.setOutputPath(conf, outDir);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapredHeartbeat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapredHeartbeat.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapredHeartbeat.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapredHeartbeat.java Fri Sep 18 15:09:48 2009
@@ -22,6 +22,7 @@
 import junit.framework.TestCase;
 
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 public class TestMapredHeartbeat extends TestCase {
   public void testJobDirCleanup() throws IOException {
@@ -42,7 +43,7 @@
       
       // test configured heartbeat interval
       taskTrackers = 5;
-      conf.setInt("mapred.heartbeats.in.second", 1);
+      conf.setInt(JTConfig.JT_HEARTBEATS_IN_SECOND, 1);
       mr = new MiniMRCluster(taskTrackers, "file:///", 3, 
           null, null, conf);
       jc = new JobClient(mr.createJobConf());
@@ -55,7 +56,7 @@
       
       // test configured heartbeat interval is capped with min value
       taskTrackers = 5;
-      conf.setInt("mapred.heartbeats.in.second", 10);
+      conf.setInt(JTConfig.JT_HEARTBEATS_IN_SECOND, 10);
       mr = new MiniMRCluster(taskTrackers, "file:///", 3, 
           null, null, conf);
       jc = new JobClient(mr.createJobConf());

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapredSystemDir.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapredSystemDir.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapredSystemDir.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMapredSystemDir.java Fri Sep 18 15:09:48 2009
@@ -24,12 +24,13 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.*;
 
 /**
- * Test if JobTracker is resilient to garbage in mapred.system.dir.
+ * Test if JobTracker is resilient to garbage in {@link JTConfig#JT_SYSTEM_DIR}
  */
 public class TestMapredSystemDir extends TestCase {
   private static final Log LOG = LogFactory.getLog(TestMapredSystemDir.class);
@@ -55,7 +56,7 @@
       dfs = new MiniDFSCluster(conf, 1, true, null);
       FileSystem fs = dfs.getFileSystem();
       
-      // create mapred.system.dir
+      // create Configs.SYSTEM_DIR
       Path mapredSysDir = new Path("/mapred");
       fs.mkdirs(mapredSysDir);
       fs.setPermission(mapredSysDir, new FsPermission(SYSTEM_DIR_PERMISSION));
@@ -69,7 +70,7 @@
                              1, null, null, MR_UGI, new JobConf(mrConf));
       JobTracker jobtracker = mr.getJobTrackerRunner().getJobTracker();
       
-      // add garbage to mapred.system.dir
+      // add garbage to Configs.SYSTEM_DIR
       Path garbage = new Path(jobtracker.getSystemDir(), "garbage");
       fs.mkdirs(garbage);
       fs.setPermission(garbage, new FsPermission(SYSTEM_DIR_PERMISSION));
@@ -77,7 +78,7 @@
       
       // stop the jobtracker
       mr.stopJobTracker();
-      mr.getJobTrackerConf().setBoolean("mapred.jobtracker.restart.recover", 
+      mr.getJobTrackerConf().setBoolean(JTConfig.JT_RESTART_ENABLED, 
                                         false);
       // start jobtracker but dont wait for it to be up
       mr.startJobTracker(false);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRChildTask.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRChildTask.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRChildTask.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRChildTask.java Fri Sep 18 15:09:48 2009
@@ -80,7 +80,7 @@
                                            makeQualified(localFs).toString());
      }
      public void configure(JobConf job) {
-       tmpDir = new Path(job.get("mapred.child.tmp", "./tmp"));
+       tmpDir = new Path(job.get(JobContext.TASK_TEMP_DIR, "./tmp"));
        try {
          localFs = FileSystem.getLocal(job);
        } catch (IOException ioe) {
@@ -343,7 +343,7 @@
   /**
    * Tests task's temp directory.
    * 
-   * In this test, we give different values to mapred.child.tmp
+   * In this test, we give different values to mapreduce.task.tmp.dir
    * both relative and absolute. And check whether the temp directory 
    * is created. We also check whether java.io.tmpdir value is same as 
    * the directory specified. We create a temp file and check if is is 

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRClasspath.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRClasspath.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRClasspath.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRClasspath.java Fri Sep 18 15:09:48 2009
@@ -29,6 +29,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 /**
  * A JUnit test to test Mini Map-Reduce Cluster with multiple directories
@@ -56,7 +57,7 @@
       file.close();
     }
     FileSystem.setDefaultUri(conf, fileSys);
-    conf.set("mapred.job.tracker", jobTracker);
+    conf.set(JTConfig.JT_IPC_ADDRESS, jobTracker);
     conf.setJobName("wordcount");
     conf.setInputFormat(TextInputFormat.class);
     
@@ -112,14 +113,14 @@
       file.close();
     }
     FileSystem.setDefaultUri(conf, uri);
-    conf.set("mapred.job.tracker", jobTracker);
+    conf.set(JTConfig.JT_IPC_ADDRESS, jobTracker);
     conf.setJobName("wordcount");
     conf.setInputFormat(TextInputFormat.class);
 
     // the keys are counts
     conf.setOutputValueClass(IntWritable.class);
     // the values are the messages
-    conf.set("mapred.output.key.class", "testjar.ExternalWritable");
+    conf.set(JobContext.OUTPUT_KEY_CLASS, "testjar.ExternalWritable");
 
     FileInputFormat.setInputPaths(conf, inDir);
     FileOutputFormat.setOutputPath(conf, outDir);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRDFSSort.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRDFSSort.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRDFSSort.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRDFSSort.java Fri Sep 18 15:09:48 2009
@@ -75,8 +75,8 @@
   throws Exception {
     // Scale down the default settings for RandomWriter for the test-case
     // Generates NUM_HADOOP_SLAVES * RW_MAPS_PER_HOST * RW_BYTES_PER_MAP
-    job.setInt("test.randomwrite.bytes_per_map", RW_BYTES_PER_MAP);
-    job.setInt("test.randomwriter.maps_per_host", RW_MAPS_PER_HOST);
+    job.setInt(RandomWriter.BYTES_PER_MAP, RW_BYTES_PER_MAP);
+    job.setInt(RandomWriter.MAPS_PER_HOST, RW_MAPS_PER_HOST);
     String[] rwArgs = {sortInput.toString()};
     
     // Run RandomWriter
@@ -86,8 +86,8 @@
   private static void runSort(JobConf job, Path sortInput, Path sortOutput) 
   throws Exception {
 
-    job.setInt("mapred.job.reuse.jvm.num.tasks", -1);
-    job.setInt("io.sort.mb", 1);
+    job.setInt(JobContext.JVM_NUMTASKS_TORUN, -1);
+    job.setInt(JobContext.IO_SORT_MB, 1);
     job.setNumMapTasks(12);
 
     // Setup command-line arguments to 'sort'
@@ -140,7 +140,7 @@
                                       boolean reuse) throws IOException {
     // setup a map-only job that reads the input and only sets the counters
     // based on how many times the jvm was reused.
-    job.setInt("mapred.job.reuse.jvm.num.tasks", reuse ? -1 : 1);
+    job.setInt(JobContext.JVM_NUMTASKS_TORUN, reuse ? -1 : 1);
     FileInputFormat.setInputPaths(job, SORT_INPUT_PATH);
     job.setInputFormat(SequenceFileInputFormat.class);
     job.setOutputFormat(NullOutputFormat.class);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRWithDFS.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRWithDFS.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRWithDFS.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRWithDFS.java Fri Sep 18 15:09:48 2009
@@ -41,6 +41,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.security.UnixUserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
@@ -154,7 +155,7 @@
       File localDir = new File(mr.getTaskTrackerLocalDir(i));
       assertTrue("Local dir " + localDir + " does not exist.", localDir
           .isDirectory());
-      LOG.info("Verifying contents of mapred.local.dir "
+      LOG.info("Verifying contents of " + MRConfig.LOCAL_DIR + " "
           + localDir.getAbsolutePath());
 
       // Verify contents(user-dir) of tracker-sub-dir

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMultipleLevelCaching.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMultipleLevelCaching.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMultipleLevelCaching.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMultipleLevelCaching.java Fri Sep 18 15:09:48 2009
@@ -30,6 +30,7 @@
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.mapreduce.JobCounter;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 /**
  * This test checks whether the task caches are created and used properly.
@@ -106,7 +107,7 @@
       JobConf jc = new JobConf();
       // cache-level = level (unshared levels) + 1(topmost shared node i.e /a) 
       //               + 1 (for host)
-      jc.setInt("mapred.task.cache.levels", level + 2);
+      jc.setInt(JTConfig.JT_TASKCACHE_LEVELS, level + 2);
       mr = new MiniMRCluster(taskTrackers, namenode, 1, new String[] {rack2}, 
     		                 new String[] {"host2.com"}, jc);
 

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java Fri Sep 18 15:09:48 2009
@@ -75,7 +75,7 @@
     FileSystem fs = FileSystem.getLocal(job);
     String name = "part-00000";
     //pretend that we have input file with 1/2/3 as the suffix
-    job.set("map.input.file", "1/2/3");
+    job.set(JobContext.MAP_INPUT_FILE, "1/2/3");
     // we use the last two legs of the input file as the output file
     job.set("mapred.outputformat.numOfTrailingLegs", "2");
     MultipleTextOutputFormat<Text, Text> theOutputFormat = new MultipleTextOutputFormat<Text, Text>();
@@ -86,7 +86,7 @@
   
   public void testFormat() throws Exception {
     JobConf job = new JobConf();
-    job.set("mapred.task.id", attempt);
+    job.set(JobContext.TASK_ATTEMPT_ID, attempt);
     FileOutputFormat.setOutputPath(job, workDir.getParent().getParent());
     FileOutputFormat.setWorkOutputPath(job, workDir);
     FileSystem fs = workDir.getFileSystem(job);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestNodeRefresh.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestNodeRefresh.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestNodeRefresh.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestNodeRefresh.java Fri Sep 18 15:09:48 2009
@@ -33,6 +33,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UnixUserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -127,13 +128,13 @@
   }
 
   /**
-   * Check default value of mapred.hosts.exclude. Also check if only 
+   * Check default value of HOSTS_EXCLUDE. Also check if only 
    * owner/supergroup user is allowed to this command.
    */
   public void testMRRefreshDefault() throws IOException {  
     // start a cluster with 2 hosts and no exclude-hosts file
     Configuration conf = new Configuration();
-    conf.set("mapred.hosts.exclude", "");
+    conf.set(JTConfig.JT_HOSTS_EXCLUDE_FILENAME, "");
     startCluster(2, 1, 0, conf);
 
     conf = mr.createJobConf(new JobConf(conf));
@@ -203,7 +204,7 @@
     UnixUserGroupInformation.saveToConf(conf, 
         UnixUserGroupInformation.UGI_PROPERTY_NAME, ugi);
     // set the supergroup
-    conf.set("mapred.permissions.supergroup", "abc");
+    conf.set(JTConfig.JT_SUPERGROUP, "abc");
     startCluster(2, 1, 0, conf);
 
     conf = mr.createJobConf(new JobConf(conf));

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestQueueManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestQueueManager.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestQueueManager.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestQueueManager.java Fri Sep 18 15:09:48 2009
@@ -34,6 +34,7 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -672,13 +673,13 @@
                              String queueName) 
   throws IOException, InterruptedException, ClassNotFoundException {
     Configuration clientConf = new Configuration();
-    clientConf.set("mapred.job.tracker", "localhost:"
+    clientConf.set(JTConfig.JT_IPC_ADDRESS, "localhost:"
         + miniMRCluster.getJobTrackerPort());
     if (userInfo != null) {
       clientConf.set(UnixUserGroupInformation.UGI_PROPERTY_NAME, userInfo);
     }
     if (queueName != null) {
-      clientConf.set("mapred.job.queue.name", queueName);
+      clientConf.set(JobContext.QUEUE_NAME, queueName);
     }
     SleepJob sleep = new SleepJob();
     sleep.setConf(clientConf);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestRackAwareTaskPlacement.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestRackAwareTaskPlacement.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestRackAwareTaskPlacement.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestRackAwareTaskPlacement.java Fri Sep 18 15:09:48 2009
@@ -31,6 +31,7 @@
 import org.apache.hadoop.mapred.UtilsForTests.FakeClock;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobCounter;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.mapreduce.Job.RawSplit;
 import org.apache.hadoop.net.DNSToSwitchMapping;
 import org.apache.hadoop.net.StaticMapping;
@@ -60,8 +61,8 @@
       new TestSetup(new TestSuite(TestRackAwareTaskPlacement.class)) {
       protected void setUp() throws Exception {
         JobConf conf = new JobConf();
-        conf.set("mapred.job.tracker", "localhost:0");
-        conf.set("mapred.job.tracker.http.address", "0.0.0.0:0");
+        conf.set(JTConfig.JT_IPC_ADDRESS, "localhost:0");
+        conf.set(JTConfig.JT_HTTP_ADDRESS, "0.0.0.0:0");
         conf.setClass("topology.node.switch.mapping.impl", 
           StaticMapping.class, DNSToSwitchMapping.class);
         jobTracker = new FakeJobTracker(conf, new FakeClock(), trackers);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestRecoveryManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestRecoveryManager.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestRecoveryManager.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestRecoveryManager.java Fri Sep 18 15:09:48 2009
@@ -30,6 +30,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.mapred.JobTracker.RecoveryManager;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.security.UserGroupInformation;
 
 /**
@@ -62,8 +63,7 @@
     FileSystem fs = FileSystem.get(new Configuration());
     fs.delete(TEST_DIR, true); // cleanup
     
-    conf.set("mapred.jobtracker.job.history.block.size", "1024");
-    conf.set("mapred.jobtracker.job.history.buffer.size", "1024");
+    conf.set(JTConfig.JT_JOBHISTORY_BLOCK_SIZE, "1024");
     
     MiniMRCluster mr = new MiniMRCluster(1, "file:///", 1, null, null, conf);
     
@@ -126,8 +126,7 @@
     out.close();
 
     // make sure that the jobtracker is in recovery mode
-    mr.getJobTrackerConf().setBoolean("mapred.jobtracker.restart.recover", 
-                                      true);
+    mr.getJobTrackerConf().setBoolean(JTConfig.JT_RESTART_ENABLED, true);
     // start the jobtracker
     LOG.info("Starting jobtracker");
     mr.startJobTracker();
@@ -167,10 +166,9 @@
     fs.delete(TEST_DIR, true);
     
     JobConf conf = new JobConf();
+    conf.set(JTConfig.JT_JOBHISTORY_BLOCK_SIZE, "1024");
     conf.set(
       DeprecatedQueueConfigurationParser.MAPRED_QUEUE_NAMES_KEY, "default");
-    conf.set("mapred.jobtracker.job.history.block.size", "1024");
-    conf.set("mapred.jobtracker.job.history.buffer.size", "1024");
     
     MiniMRCluster mr = new MiniMRCluster(1, "file:///", 1, null, null, conf);
     JobTracker jobtracker = mr.getJobTrackerRunner().getJobTracker();
@@ -239,9 +237,9 @@
     mr.stopJobTracker();
     
     // make sure that the jobtracker is in recovery mode
-    mr.getJobTrackerConf().setBoolean("mapred.jobtracker.restart.recover", 
+    mr.getJobTrackerConf().setBoolean(JTConfig.JT_RESTART_ENABLED, 
                                       true);
-    mr.getJobTrackerConf().setInt("mapred.jobtracker.maxtasks.per.job", 25);
+    mr.getJobTrackerConf().setInt(JTConfig.JT_TASKS_PER_JOB, 25);
     
     mr.getJobTrackerConf().setBoolean("mapred.acls.enabled" , true);
     UserGroupInformation ugi = UserGroupInformation.readFrom(job1);
@@ -293,8 +291,8 @@
     // start the jobtracker
     JobConf conf = new JobConf();
     FileSystem.setDefaultUri(conf, namenode);
-    conf.set("mapred.job.tracker", "localhost:0");
-    conf.set("mapred.job.tracker.http.address", "127.0.0.1:0");
+    conf.set(JTConfig.JT_IPC_ADDRESS, "localhost:0");
+    conf.set(JTConfig.JT_HTTP_ADDRESS, "127.0.0.1:0");
 
     JobTracker jobtracker = new JobTracker(conf);
 

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetch.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetch.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetch.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetch.java Fri Sep 18 15:09:48 2009
@@ -33,13 +33,13 @@
   public void testReduceFromDisk() throws Exception {
     final int MAP_TASKS = 8;
     JobConf job = mrCluster.createJobConf();
-    job.set("mapred.job.reduce.input.buffer.percent", "0.0");
+    job.set(JobContext.REDUCE_INPUT_BUFFER_PERCENT, "0.0");
     job.setNumMapTasks(MAP_TASKS);
     job.set(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, "-Xmx128m");
-    job.setInt("mapred.job.reduce.total.mem.bytes", 128 << 20);
-    job.set("mapred.job.shuffle.input.buffer.percent", "0.05");
-    job.setInt("io.sort.factor", 2);
-    job.setInt("mapred.inmem.merge.threshold", 4);
+    job.setInt(JobContext.REDUCE_MEMORY_TOTAL_BYTES, 128 << 20);
+    job.set(JobContext.SHUFFLE_INPUT_BUFFER_PERCENT, "0.05");
+    job.setInt(JobContext.IO_SORT_FACTOR, 2);
+    job.setInt(JobContext.REDUCE_MERGE_INMEM_THRESHOLD, 4);
     Counters c = runJob(job);
     final long spill = c.findCounter(TaskCounter.SPILLED_RECORDS).getCounter();
     final long out = c.findCounter(TaskCounter.MAP_OUTPUT_RECORDS).getCounter();
@@ -56,9 +56,9 @@
   public void testReduceFromMem() throws Exception {
     final int MAP_TASKS = 3;
     JobConf job = mrCluster.createJobConf();
-    job.set("mapred.job.reduce.input.buffer.percent", "1.0");
-    job.set("mapred.job.shuffle.input.buffer.percent", "1.0");
-    job.setInt("mapred.job.reduce.total.mem.bytes", 128 << 20);
+    job.set(JobContext.REDUCE_INPUT_BUFFER_PERCENT, "1.0");
+    job.set(JobContext.SHUFFLE_INPUT_BUFFER_PERCENT, "1.0");
+    job.setInt(JobContext.REDUCE_MEMORY_TOTAL_BYTES, 128 << 20);
     job.setNumMapTasks(MAP_TASKS);
     Counters c = runJob(job);
     final long spill = c.findCounter(TaskCounter.SPILLED_RECORDS).getCounter();

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java Fri Sep 18 15:09:48 2009
@@ -80,14 +80,14 @@
     final int MAP_TASKS = 7;
     JobConf job = mrCluster.createJobConf();
     job.setNumMapTasks(MAP_TASKS);
-    job.setInt("mapred.inmem.merge.threshold", 0);
-    job.set("mapred.job.reduce.input.buffer.percent", "1.0");
-    job.setInt("mapred.reduce.parallel.copies", 1);
-    job.setInt("io.sort.mb", 10);
+    job.setInt(JobContext.REDUCE_MERGE_INMEM_THRESHOLD, 0);
+    job.set(JobContext.REDUCE_INPUT_BUFFER_PERCENT, "1.0");
+    job.setInt(JobContext.SHUFFLE_PARALLEL_COPIES, 1);
+    job.setInt(JobContext.IO_SORT_MB, 10);
     job.set(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, "-Xmx128m");
-    job.setInt("mapred.job.reduce.total.mem.bytes", 128 << 20);
-    job.set("mapred.job.shuffle.input.buffer.percent", "0.14");
-    job.set("mapred.job.shuffle.merge.percent", "1.0");
+    job.setInt(JobContext.REDUCE_MEMORY_TOTAL_BYTES, 128 << 20);
+    job.set(JobContext.SHUFFLE_INPUT_BUFFER_PERCENT, "0.14");
+    job.set(JobContext.SHUFFLE_MERGE_EPRCENT, "1.0");
     Counters c = runJob(job);
     final long out = c.findCounter(TaskCounter.MAP_OUTPUT_RECORDS).getCounter();
     final long spill = c.findCounter(TaskCounter.SPILLED_RECORDS).getCounter();
@@ -112,7 +112,7 @@
     @Override
     public void configure(JobConf conf) {
       nMaps = conf.getNumMapTasks();
-      id = nMaps - conf.getInt("mapred.task.partition", -1) - 1;
+      id = nMaps - conf.getInt(JobContext.TASK_PARTITION, -1) - 1;
       Arrays.fill(b, 0, 4096, (byte)'V');
       ((StringBuilder)fmt.out()).append(keyfmt);
     }
@@ -248,8 +248,8 @@
     conf.setNumReduceTasks(1);
     conf.setInputFormat(FakeIF.class);
     conf.setNumTasksToExecutePerJvm(1);
-    conf.setInt("mapred.map.max.attempts", 0);
-    conf.setInt("mapred.reduce.max.attempts", 0);
+    conf.setInt(JobContext.MAP_MAX_ATTEMPTS, 0);
+    conf.setInt(JobContext.REDUCE_MAX_ATTEMPTS, 0);
     FileInputFormat.setInputPaths(conf, new Path("/in"));
     final Path outp = new Path("/out");
     FileOutputFormat.setOutputPath(conf, outp);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceTask.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceTask.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceTask.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceTask.java Fri Sep 18 15:09:48 2009
@@ -90,7 +90,7 @@
     @SuppressWarnings("unchecked")
     RawKeyValueIterator rawItr = 
       Merger.merge(conf, rfs, Text.class, Text.class, codec, new Path[]{path}, 
-                   false, conf.getInt("io.sort.factor", 100), tmpDir, 
+                   false, conf.getInt(JobContext.IO_SORT_FACTOR, 100), tmpDir, 
                    new Text.Comparator(), new NullProgress(), null, null, null);
     @SuppressWarnings("unchecked") // WritableComparators are not generic
     ReduceTask.ValuesIterator valItr = 

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java Fri Sep 18 15:09:48 2009
@@ -53,7 +53,7 @@
       fail("Failed to create output directory");
     }
 
-    job.set("mapred.task.id", attempt);
+    job.set(JobContext.TASK_ATTEMPT_ID, attempt);
     FileOutputFormat.setOutputPath(job, dir.getParent().getParent());
     FileOutputFormat.setWorkOutputPath(job, dir);
 



Mime
View raw message