hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sha...@apache.org
Subject svn commit: r816664 [2/9] - in /hadoop/mapreduce/trunk: ./ conf/ src/benchmarks/gridmix/ src/benchmarks/gridmix/pipesort/ src/benchmarks/gridmix2/ src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapreduce/ src/c++/pipes/impl/ src/c++/task-controller...
Date Fri, 18 Sep 2009 15:10:02 GMT
Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java Fri Sep 18 15:09:48 2009
@@ -47,9 +47,9 @@
 
     if (includeHadoopFlags) {
       args.add("-D");
-      args.add("mapred.job.tracker=local");
+      args.add("mapreduce.jobtracker.address=local");
       args.add("-D");
-      args.add("mapred.map.tasks=1");
+      args.add("mapreduce.job.maps=1");
       args.add("-D");
       args.add("fs.default.name=file:///");
     }

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java Fri Sep 18 15:09:48 2009
@@ -144,7 +144,7 @@
     args.add("-D");
     args.add("fs.default.name=file:///");
     args.add("-D");
-    args.add("mapred.job.tracker=local");
+    args.add("mapreduce.jobtracker.address=local");
 
     args.add("--table");
     args.add(TABLE_NAME);

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java Fri Sep 18 15:09:48 2009
@@ -31,6 +31,7 @@
 import org.apache.hadoop.mapred.FileOutputFormat;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.util.ReflectionUtils;
 
 import org.apache.hadoop.sqoop.ImportOptions;
@@ -59,9 +60,9 @@
 
     if (includeHadoopFlags) {
       args.add("-D");
-      args.add("mapred.job.tracker=local");
+      args.add("mapreduce.jobtracker.address=local");
       args.add("-D");
-      args.add("mapred.map.tasks=1");
+      args.add("mapreduce.job.maps=1");
       args.add("-D");
       args.add("fs.default.name=file:///");
     }
@@ -122,7 +123,7 @@
       job.set(ReparseMapper.USER_TYPE_NAME_KEY, tableClassName);
 
       // use local mode in the same JVM.
-      job.set("mapred.job.tracker", "local");
+      job.set(JTConfig.JT_IPC_ADDRESS, "local");
       job.set("fs.default.name", "file:///");
 
       String warehouseDir = getWarehouseDir();

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java Fri Sep 18 15:09:48 2009
@@ -287,9 +287,9 @@
 
     if (includeHadoopFlags) {
       args.add("-D");
-      args.add("mapred.job.tracker=local");
+      args.add("mapreduce.jobtracker.address=local");
       args.add("-D");
-      args.add("mapred.map.tasks=1");
+      args.add("mapreduce.job.maps=1");
       args.add("-D");
       args.add("fs.default.name=file:///");
     }

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java Fri Sep 18 15:09:48 2009
@@ -27,6 +27,7 @@
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.SkipBadRecords;
 import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.streaming.io.InputWriter;
 import org.apache.hadoop.streaming.io.OutputReader;
 import org.apache.hadoop.streaming.io.TextInputWriter;
@@ -68,7 +69,7 @@
     //processed records could be different(equal or less) than the no of 
     //records input.
     SkipBadRecords.setAutoIncrMapperProcCount(job, false);
-    skipping = job.getBoolean("mapred.skip.on", false);
+    skipping = job.getBoolean(JobContext.SKIP_RECORDS, false);
     if (mapInputWriterClass_.getCanonicalName().equals(TextInputWriter.class.getCanonicalName())) {
       String inputFormatClassName = job.getClass("mapred.input.format.class", TextInputFormat.class).getCanonicalName();
       ignoreKey = inputFormatClassName.equals(TextInputFormat.class.getCanonicalName());

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java Fri Sep 18 15:09:48 2009
@@ -28,6 +28,7 @@
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.SkipBadRecords;
+import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.streaming.io.InputWriter;
 import org.apache.hadoop.streaming.io.OutputReader;
 import org.apache.hadoop.util.StringUtils;
@@ -69,7 +70,7 @@
     //processed records could be different(equal or less) than the no of 
     //records input.
     SkipBadRecords.setAutoIncrReducerProcCount(job, false);
-    skipping = job.getBoolean("mapred.skip.on", false);
+    skipping = job.getBoolean(JobContext.SKIP_RECORDS, false);
 
     try {
       reduceOutFieldSeparator = job_.get("stream.reduce.output.field.separator", "\t").getBytes("UTF-8");

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java Fri Sep 18 15:09:48 2009
@@ -45,7 +45,10 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.MRConfig;
+import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.filecache.DistributedCache;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileAlreadyExistsException;
 import org.apache.hadoop.mapred.FileInputFormat;
@@ -503,7 +506,7 @@
     System.out.println("  The location of this working directory is unspecified.");
     System.out.println();
     System.out.println("To set the number of reduce tasks (num. of output files):");
-    System.out.println("  -D mapred.reduce.tasks=10");
+    System.out.println("  -D " + JobContext.NUM_REDUCES + "=10");
     System.out.println("To skip the sort/combine/shuffle/sort/reduce step:");
     System.out.println("  Use -numReduceTasks 0");
     System.out
@@ -514,18 +517,18 @@
     System.out.println("  This equivalent -reducer NONE");
     System.out.println();
     System.out.println("To speed up the last maps:");
-    System.out.println("  -D mapred.map.tasks.speculative.execution=true");
+    System.out.println("  -D " + JobContext.MAP_SPECULATIVE + "=true");
     System.out.println("To speed up the last reduces:");
-    System.out.println("  -D mapred.reduce.tasks.speculative.execution=true");
+    System.out.println("  -D " + JobContext.REDUCE_SPECULATIVE + "=true");
     System.out.println("To name the job (appears in the JobTracker Web UI):");
-    System.out.println("  -D mapred.job.name='My Job' ");
+    System.out.println("  -D " + JobContext.JOB_NAME + "='My Job'");
     System.out.println("To change the local temp directory:");
     System.out.println("  -D dfs.data.dir=/tmp/dfs");
     System.out.println("  -D stream.tmpdir=/tmp/streaming");
     System.out.println("Additional local temp directories with -cluster local:");
-    System.out.println("  -D mapred.local.dir=/tmp/local");
-    System.out.println("  -D mapred.system.dir=/tmp/system");
-    System.out.println("  -D mapred.temp.dir=/tmp/temp");
+    System.out.println("  -D " + MRConfig.LOCAL_DIR + "=/tmp/local");
+    System.out.println("  -D " + JTConfig.JT_SYSTEM_DIR + "=/tmp/system");
+    System.out.println("  -D " + MRConfig.TEMP_DIR + "=/tmp/temp");
     System.out.println("To treat tasks with non-zero exit status as SUCCEDED:");    
     System.out.println("  -D stream.non.zero.exit.is.failure=false");
     System.out.println("Use a custom hadoopStreaming build along a standard hadoop install:");
@@ -610,7 +613,7 @@
     if (packageFiles_.size() + unjarFiles.size() == 0) {
       return null;
     }
-    String tmp = jobConf_.get("stream.tmpdir"); //, "/tmp/${user.name}/"
+    String tmp = jobConf_.get("stream.tmpdir"); //, "/tmp/${mapreduce.job.user.name}/"
     File tmpDir = (tmp == null) ? null : new File(tmp);
     // tmpDir=null means OS default tmp dir
     File jobJar = File.createTempFile("streamjob", ".jar", tmpDir);
@@ -652,7 +655,7 @@
 
     // The correct FS must be set before this is called!
     // (to resolve local vs. dfs drive letter differences) 
-    // (mapred.working.dir will be lazily initialized ONCE and depends on FS)
+    // (mapreduce.job.working.dir will be lazily initialized ONCE and depends on FS)
     for (int i = 0; i < inputSpecs_.size(); i++) {
       FileInputFormat.addInputPaths(jobConf_, 
                         (String) inputSpecs_.get(i));
@@ -894,7 +897,7 @@
   }
 
   protected String getJobTrackerHostPort() {
-    return jobConf_.get("mapred.job.tracker");
+    return jobConf_.get(JTConfig.JT_IPC_ADDRESS);
   }
 
   protected void jobInfo() {
@@ -903,7 +906,7 @@
     } else {
       String hp = getJobTrackerHostPort();
       LOG.info("To kill this job, run:");
-      LOG.info(getHadoopClientHome() + "/bin/hadoop job  -Dmapred.job.tracker=" + hp + " -kill "
+      LOG.info(getHadoopClientHome() + "/bin/hadoop job  -D" + JTConfig.JT_IPC_ADDRESS + "=" + hp + " -kill "
                + jobId_);
       //LOG.info("Job file: " + running_.getJobFile());
       LOG.info("Tracking URL: " + StreamUtil.qualifyHost(running_.getTrackingURL()));

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java Fri Sep 18 15:09:48 2009
@@ -36,6 +36,8 @@
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.mapred.FileSplit;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 /** Utilities not available elsewhere in Hadoop.
  *  
@@ -420,13 +422,13 @@
   // JobConf helpers
 
   public static FileSplit getCurrentSplit(JobConf job) {
-    String path = job.get("map.input.file");
+    String path = job.get(JobContext.MAP_INPUT_FILE);
     if (path == null) {
       return null;
     }
     Path p = new Path(path);
-    long start = Long.parseLong(job.get("map.input.start"));
-    long length = Long.parseLong(job.get("map.input.length"));
+    long start = Long.parseLong(job.get(JobContext.MAP_INPUT_START));
+    long length = Long.parseLong(job.get(JobContext.MAP_INPUT_PATH));
     return new FileSplit(p, start, length, job);
   }
 
@@ -439,16 +441,16 @@
   }
 
   public static boolean isLocalJobTracker(JobConf job) {
-    return job.get("mapred.job.tracker", "local").equals("local");
+    return job.get(JTConfig.JT_IPC_ADDRESS, "local").equals("local");
   }
 
   public static TaskId getTaskInfo(JobConf job) {
     TaskId res = new TaskId();
 
-    String id = job.get("mapred.task.id");
+    String id = job.get(JobContext.TASK_ATTEMPT_ID);
     if (isLocalJobTracker(job)) {
       // it uses difft naming 
-      res.mapTask = job.getBoolean("mapred.task.is.map", true);
+      res.mapTask = job.getBoolean(JobContext.TASK_ISMAP, true);
       res.jobid = "0";
       res.taskid = 0;
       res.execid = 0;

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java Fri Sep 18 15:09:48 2009
@@ -32,6 +32,7 @@
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.*;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 
 /**
@@ -69,7 +70,7 @@
       fileSys = dfs.getFileSystem();
       namenode = fileSys.getUri().getAuthority();
       mr  = new MiniMRCluster(1, namenode, 3);
-      strJobTracker = "mapred.job.tracker=" + "localhost:" + mr.getJobTrackerPort();
+      strJobTracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
       strNamenode = "fs.default.name=" + namenode;
     } catch (Exception e) {
       e.printStackTrace();
@@ -116,7 +117,7 @@
       "-output", OUTPUT_DIR,
       "-mapper", "xargs cat", 
       "-reducer", "cat",
-      "-jobconf", "mapred.reduce.tasks=1",
+      "-jobconf", "mapreduce.job.reduces=1",
       "-cacheArchive", cacheArchiveString1, 
       "-cacheArchive", cacheArchiveString2,
       "-jobconf", strNamenode,

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java Fri Sep 18 15:09:48 2009
@@ -35,6 +35,7 @@
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.OutputLogFilter;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 /**
  * This test case tests the symlink creation
  * utility provided by distributed caching 
@@ -73,7 +74,7 @@
         mr  = new MiniMRCluster(1, namenode, 3);
         // During tests, the default Configuration will use a local mapred
         // So don't specify -config or -cluster
-        String strJobtracker = "mapred.job.tracker=" + "localhost:" + mr.getJobTrackerPort();
+        String strJobtracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
         String strNamenode = "fs.default.name=" + namenode;
         String argv[] = new String[] {
           "-input", INPUT_FILE,

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java Fri Sep 18 15:09:48 2009
@@ -54,7 +54,7 @@
       "-output", OUTPUT_DIR.getAbsolutePath(),
       "-mapper", map,
       "-reducer", reduce,
-      "-jobconf", "keep.failed.task.files=true",
+      "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
       "-jobconf", "stream.map.output=rawbytes",
       "-jobconf", "stream.reduce.input=rawbytes",

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java Fri Sep 18 15:09:48 2009
@@ -20,10 +20,8 @@
 
 import junit.framework.TestCase;
 import java.io.*;
-import java.util.*;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
+
+import org.apache.hadoop.mapreduce.JobContext;
 
 /**
  * This class tests hadoopStreaming in MapReduce local mode.
@@ -65,7 +63,7 @@
       "-reducer", "aggregate",
       //"-verbose",
       //"-jobconf", "stream.debug=set"
-      "-jobconf", "keep.failed.task.files=true",
+      "-jobconf", JobContext.PRESERVE_FAILED_TASK_FILES + "=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
   }

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java Fri Sep 18 15:09:48 2009
@@ -70,10 +70,10 @@
       //"-verbose",
       "-jobconf", "stream.map.output.field.separator=.",
       "-jobconf", "stream.num.map.output.key.fields=2",
-      "-jobconf", "map.output.key.field.separator=.",
+      "-jobconf", "mapreduce.mapreduce.mapreduce.map.output.key.field.separator=.",
       "-jobconf", "num.key.fields.for.partition=1",
-      "-jobconf", "mapred.reduce.tasks=2",
-      "-jobconf", "keep.failed.task.files=true",
+      "-jobconf", "mapreduce.job.reduces=2",
+      "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
   }

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java Fri Sep 18 15:09:48 2009
@@ -64,7 +64,7 @@
       "-mapper", map,
       "-reducer", "org.apache.hadoop.mapred.lib.IdentityReducer",
       "-numReduceTasks", "0",
-      "-jobconf", "keep.failed.task.files=true",
+      "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
   }

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java Fri Sep 18 15:09:48 2009
@@ -64,7 +64,7 @@
       "-reducer", reduce,
       //"-verbose",
       //"-jobconf", "stream.debug=set"
-      "-jobconf", "keep.failed.task.files=true",
+      "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
   }

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java Fri Sep 18 15:09:48 2009
@@ -61,7 +61,7 @@
         new String[] { "-input", inputPath.makeQualified(inFs).toString(),
             "-output", outputPath.makeQualified(outFs).toString(), "-mapper",
             map, "-reducer", reduce, "-jobconf",
-            "keep.failed.task.files=true", "-jobconf",
+            "mapreduce.task.files.preserve.failedtasks=true", "-jobconf",
             "stream.tmpdir=" + System.getProperty("test.build.data", "/tmp") };
     StreamJob streamJob = new StreamJob(args, true);
     streamJob.setConf(myConf);

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java Fri Sep 18 15:09:48 2009
@@ -41,6 +41,7 @@
 import org.apache.hadoop.mapred.OutputLogFilter;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.SkipBadRecords;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 public class TestStreamingBadRecords extends ClusterMapReduceTestCase
 {
@@ -69,7 +70,7 @@
 
   protected void setUp() throws Exception {
     Properties props = new Properties();
-    props.setProperty("mapred.job.tracker.retire.jobs", "false");
+    props.setProperty(JTConfig.JT_RETIREJOBS, "false");
     startCluster(true, props);
   }
 
@@ -169,20 +170,21 @@
       "-reducer", badReducer,
       "-verbose",
       "-inputformat", "org.apache.hadoop.mapred.KeyValueTextInputFormat",
-      "-jobconf", "mapred.skip.attempts.to.start.skipping="+attSkip,
-      "-jobconf", "mapred.skip.out.dir=none",
-      "-jobconf", "mapred.map.max.attempts="+mapperAttempts,
-      "-jobconf", "mapred.reduce.max.attempts="+reducerAttempts,
-      "-jobconf", "mapred.skip.map.max.skip.records="+Long.MAX_VALUE,
-      "-jobconf", "mapred.skip.reduce.max.skip.groups="+Long.MAX_VALUE,
-      "-jobconf", "mapred.map.tasks=1",
-      "-jobconf", "mapred.reduce.tasks=1",
+      "-jobconf", "mapreduce.task.skip.start.attempts="+attSkip,
+      "-jobconf", "mapreduce.job.skip.outdir=none",
+      "-jobconf", "mapreduce.map.maxattempts="+mapperAttempts,
+      "-jobconf", "mapreduce.reduce.maxattempts="+reducerAttempts,
+      "-jobconf", "mapreduce.map.skip.maxrecords="+Long.MAX_VALUE,
+      "-jobconf", "mapreduce.reduce.skip.maxgroups="+Long.MAX_VALUE,
+      "-jobconf", "mapreduce.job.maps=1",
+      "-jobconf", "mapreduce.job.reduces=1",
       "-jobconf", "fs.default.name="+clusterConf.get("fs.default.name"),
-      "-jobconf", "mapred.job.tracker="+clusterConf.get("mapred.job.tracker"),
-      "-jobconf", "mapred.job.tracker.http.address="
-                    +clusterConf.get("mapred.job.tracker.http.address"),
+      "-jobconf", "mapreduce.jobtracker.address=" + 
+                   clusterConf.get(JTConfig.JT_IPC_ADDRESS),
+      "-jobconf", "mapreduce.jobtracker.http.address="
+                    +clusterConf.get(JTConfig.JT_HTTP_ADDRESS),
       "-jobconf", "stream.debug=set",
-      "-jobconf", "keep.failed.task.files=true",
+      "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
     StreamJob job = new StreamJob(args, false);      
@@ -202,22 +204,22 @@
       "-reducer", badReducer,
       "-verbose",
       "-inputformat", "org.apache.hadoop.mapred.KeyValueTextInputFormat",
-      "-jobconf", "mapred.skip.attempts.to.start.skipping=1",
+      "-jobconf", "mapreduce.task.skip.start.attempts=1",
       //actually fewer attempts are required than specified
       //but to cater to the case of slow processed counter update, need to 
       //have more attempts
-      "-jobconf", "mapred.map.max.attempts=20",
-      "-jobconf", "mapred.reduce.max.attempts=15",
-      "-jobconf", "mapred.skip.map.max.skip.records=1",
-      "-jobconf", "mapred.skip.reduce.max.skip.groups=1",
-      "-jobconf", "mapred.map.tasks=1",
-      "-jobconf", "mapred.reduce.tasks=1",
+      "-jobconf", "mapreduce.map.maxattempts=20",
+      "-jobconf", "mapreduce.reduce.maxattempts=15",
+      "-jobconf", "mapreduce.map.skip.maxrecords=1",
+      "-jobconf", "mapreduce.reduce.skip.maxgroups=1",
+      "-jobconf", "mapreduce.job.maps=1",
+      "-jobconf", "mapreduce.job.reduces=1",
       "-jobconf", "fs.default.name="+clusterConf.get("fs.default.name"),
-      "-jobconf", "mapred.job.tracker="+clusterConf.get("mapred.job.tracker"),
-      "-jobconf", "mapred.job.tracker.http.address="
-                    +clusterConf.get("mapred.job.tracker.http.address"),
+      "-jobconf", "mapreduce.jobtracker.address="+clusterConf.get(JTConfig.JT_IPC_ADDRESS),
+      "-jobconf", "mapreduce.jobtracker.http.address="
+                    +clusterConf.get(JTConfig.JT_HTTP_ADDRESS),
       "-jobconf", "stream.debug=set",
-      "-jobconf", "keep.failed.task.files=true",
+      "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
     StreamJob job = new StreamJob(args, false);      

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java Fri Sep 18 15:09:48 2009
@@ -68,7 +68,7 @@
       "-reducer", reduce,
       //"-verbose",
       //"-jobconf", "stream.debug=set"
-      "-jobconf", "keep.failed.task.files=true",
+      "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
   }

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java Fri Sep 18 15:09:48 2009
@@ -50,10 +50,10 @@
       "-output", OUTPUT_DIR.getAbsolutePath(),
       "-mapper", (failMap ? failingTask : echoTask),
       "-reducer", (failMap ? echoTask : failingTask),
-      "-jobconf", "keep.failed.task.files=true",
+      "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
       "-jobconf", "stream.non.zero.exit.is.failure=" + exitStatusIsFailure,
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
-      "-jobconf", "io.sort.mb=10"
+      "-jobconf", "mapreduce.task.io.sort.mb=10"
     };
   }
 

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java Fri Sep 18 15:09:48 2009
@@ -51,7 +51,7 @@
       "-reducer", reduce,
       //"-verbose",
       //"-jobconf", "stream.debug=set"
-      "-jobconf", "keep.failed.task.files=true",
+      "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
   }

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java Fri Sep 18 15:09:48 2009
@@ -66,7 +66,7 @@
       "-input", INPUT_FILE.getAbsolutePath(),
       "-output", OUTPUT_DIR.getAbsolutePath(),
       "-mapper", "cat",
-      "-jobconf", "keep.failed.task.files=true",
+      "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
       "-jobconf", "stream.non.zero.exit.is.failure=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java Fri Sep 18 15:09:48 2009
@@ -36,7 +36,7 @@
   protected File INPUT_FILE = new File("TestStreamingSeparator.input.txt");
   protected File OUTPUT_DIR = new File("TestStreamingSeparator.out");
   protected String input = "roses1are.red\nviolets1are.blue\nbunnies1are.pink\n";
-  // key.value.separator.in.input.line reads 1 as separator
+  // mapreduce.input.keyvaluelinerecordreader.key.value.separator reads 1 as separator
   // stream.map.input.field.separator uses 2 as separator
   // map behaves like "/usr/bin/tr 2 3"; (translate 2 to 3)
   protected String map = StreamUtil.makeJavaCommand(TrApp.class, new String[]{"2", "3"});
@@ -45,7 +45,7 @@
   // reduce behaves like "/usr/bin/tr 3 4"; (translate 3 to 4)
   protected String reduce = StreamUtil.makeJavaCommand(TrAppReduce.class, new String[]{"3", "4"});
   // stream.reduce.output.field.separator recognize 4 as separator
-  // mapred.textoutputformat.separator outputs 5 as separator
+  // mapreduce.output.textoutputformat.separator outputs 5 as separator
   protected String outputExpect = "bunnies5are.pink\nroses5are.red\nviolets5are.blue\n";
 
   private StreamJob job;
@@ -73,15 +73,15 @@
       "-reducer", reduce,
       //"-verbose",
       //"-jobconf", "stream.debug=set"
-      "-jobconf", "keep.failed.task.files=true",
+      "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
       "-inputformat", "KeyValueTextInputFormat",
-      "-jobconf", "key.value.separator.in.input.line=1",
+      "-jobconf", "mapreduce.input.keyvaluelinerecordreader.key.value.separator=1",
       "-jobconf", "stream.map.input.field.separator=2",
       "-jobconf", "stream.map.output.field.separator=3",
       "-jobconf", "stream.reduce.input.field.separator=3",
       "-jobconf", "stream.reduce.output.field.separator=4",
-      "-jobconf", "mapred.textoutputformat.separator=5",
+      "-jobconf", "mapreduce.output.textoutputformat.separator=5",
     };
   }
   

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java Fri Sep 18 15:09:48 2009
@@ -29,6 +29,8 @@
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.TaskReport;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 /**
  * Tests for the ability of a streaming task to set the status
@@ -49,11 +51,11 @@
       "-input", INPUT_FILE,
       "-output", OUTPUT_DIR,
       "-mapper", map,
-      "-jobconf", "mapred.map.tasks=1",
-      "-jobconf", "mapred.reduce.tasks=0",      
-      "-jobconf", "keep.failed.task.files=true",
+      "-jobconf", JobContext.NUM_MAPS + "=1",
+      "-jobconf", JobContext.NUM_REDUCES + "=0",      
+      "-jobconf", JobContext.PRESERVE_FAILED_TASK_FILES + "=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
-      "-jobconf", "mapred.job.tracker=localhost:"+jobtrackerPort,
+      "-jobconf", JTConfig.JT_IPC_ADDRESS + "=localhost:"+jobtrackerPort,
       "-jobconf", "fs.default.name=file:///"
     };
   }
@@ -80,7 +82,7 @@
     MiniMRCluster mr = null;
     FileSystem fs = null;
     JobConf conf = new JobConf();
-    conf.setBoolean("mapred.job.tracker.retire.jobs", false);
+    conf.setBoolean(JTConfig.JT_RETIREJOBS, false);
     try {
       mr = new MiniMRCluster(1, "file:///", 3, null , null, conf);
 

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java Fri Sep 18 15:09:48 2009
@@ -47,8 +47,8 @@
                                                          Integer.toString(duringLines),
                                                          Integer.toString(postLines)}),
       "-reducer", StreamJob.REDUCE_NONE,
-      "-jobconf", "keep.failed.task.files=true",
-      "-jobconf", "mapred.task.timeout=5000",
+      "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
+      "-jobconf", "mapreduce.task.timeout=5000",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
   }

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java Fri Sep 18 15:09:48 2009
@@ -35,6 +35,7 @@
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.OutputLogFilter;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 /**
  * This test case tests the symlink creation
  * utility provided by distributed caching 
@@ -69,7 +70,7 @@
         mr  = new MiniMRCluster(1, namenode, 3);
         // During tests, the default Configuration will use a local mapred
         // So don't specify -config or -cluster
-        String strJobtracker = "mapred.job.tracker=" + "localhost:" + mr.getJobTrackerPort();
+        String strJobtracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
         String strNamenode = "fs.default.name=" + namenode;
         String argv[] = new String[] {
           "-input", INPUT_FILE,

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java Fri Sep 18 15:09:48 2009
@@ -54,7 +54,7 @@
       "-output", OUTPUT_DIR.getAbsolutePath(),
       "-mapper", map,
       "-reducer", reduce,
-      "-jobconf", "keep.failed.task.files=true",
+      "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
       "-io", "typedbytes"
     };

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java Fri Sep 18 15:09:48 2009
@@ -27,6 +27,8 @@
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.TestMiniMRWithDFS;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.util.StringUtils;
 
 import junit.framework.TestCase;
@@ -55,9 +57,9 @@
       "-mapper", map,
       "-reducer", "org.apache.hadoop.mapred.lib.IdentityReducer",
       "-numReduceTasks", "0",
-      "-jobconf", "mapred.map.tasks=1",
+      "-jobconf", JobContext.NUM_MAPS + "=1",
       "-jobconf", JobConf.MAPRED_MAP_TASK_ULIMIT + "=" + memLimit,
-      "-jobconf", "mapred.job.tracker=" + "localhost:" +
+      "-jobconf", JTConfig.JT_IPC_ADDRESS + "=localhost:" +
                                            mr.getJobTrackerPort(),
       "-jobconf", "fs.default.name=" + "hdfs://localhost:" 
                    + dfs.getNameNodePort(),

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrApp.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrApp.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrApp.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrApp.java Fri Sep 18 15:09:48 2009
@@ -41,18 +41,18 @@
     // test that some JobConf properties are exposed as expected     
     // Note the dots translated to underscore: 
     // property names have been escaped in PipeMapRed.safeEnvVarName()
-    expectDefined("mapred_local_dir");
+    expectDefined("mapreduce_cluster_local_dir");
     expect("mapred_output_format_class", "org.apache.hadoop.mapred.TextOutputFormat");
-    expect("mapred_output_key_class", "org.apache.hadoop.io.Text");
-    expect("mapred_output_value_class", "org.apache.hadoop.io.Text");
+    expect("mapreduce_job_output_key_class", "org.apache.hadoop.io.Text");
+    expect("mapreduce_job_output_value_class", "org.apache.hadoop.io.Text");
 
-    expect("mapred_task_is_map", "true");
-    expectDefined("mapred_task_id");
+    expect("mapreduce_task_ismap", "true");
+    expectDefined("mapreduce_task_attempt_id");
 
-    expectDefined("map_input_file");
-    expectDefined("map_input_length");
+    expectDefined("mapreduce_map_input_file");
+    expectDefined("mapreduce_map_input_length");
 
-    expectDefined("io_sort_factor");
+    expectDefined("mapreduce_task_io_sort_factor");
 
     // the FileSplit context properties are not available in local hadoop..
     // so can't check them in this test.

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrAppReduce.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrAppReduce.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrAppReduce.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TrAppReduce.java Fri Sep 18 15:09:48 2009
@@ -41,17 +41,17 @@
     // test that some JobConf properties are exposed as expected     
     // Note the dots translated to underscore: 
     // property names have been escaped in PipeMapRed.safeEnvVarName()
-    expect("mapred_job_tracker", "local");
+    expect("mapreduce_jobtracker_address", "local");
     //expect("mapred_local_dir", "build/test/mapred/local");
-    expectDefined("mapred_local_dir");
+    expectDefined("mapreduce_cluster_local_dir");
     expect("mapred_output_format_class", "org.apache.hadoop.mapred.TextOutputFormat");
-    expect("mapred_output_key_class", "org.apache.hadoop.io.Text");
-    expect("mapred_output_value_class", "org.apache.hadoop.io.Text");
+    expect("mapreduce_job_output_key_class", "org.apache.hadoop.io.Text");
+    expect("mapreduce_job_output_value_class", "org.apache.hadoop.io.Text");
 
-    expect("mapred_task_is_map", "false");
-    expectDefined("mapred_task_id");
+    expect("mapreduce_task_ismap", "false");
+    expectDefined("mapreduce_task_attempt_id");
 
-    expectDefined("io_sort_factor");
+    expectDefined("mapreduce_task_io_sort_factor");
 
     // the FileSplit context properties are not available in local hadoop..
     // so can't check them in this test.

Modified: hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/tests/MapSideDiskSpill.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/tests/MapSideDiskSpill.java?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/tests/MapSideDiskSpill.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/vaidya/src/java/org/apache/hadoop/vaidya/postexdiagnosis/tests/MapSideDiskSpill.java Fri Sep 18 15:09:48 2009
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.vaidya.postexdiagnosis.tests;
 
+import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.vaidya.statistics.job.JobStatistics;
 import org.apache.hadoop.vaidya.statistics.job.JobStatisticsInterface.JobKeys;
 import org.apache.hadoop.vaidya.statistics.job.JobStatisticsInterface.KeyDataType;
@@ -98,9 +99,14 @@
   public String getPrescription() {
     return 
     "* Use combiner to lower the map output size.\n" +
-      "* Increase map side sort buffer size (io.sort.mb:"+this._job.getJobConf().getInt("io.sort.mb", 0) + ").\n" +
-      "* Increase index buffer size (io.sort.record.percent:"+ this._job.getJobConf().getInt("io.sort.record.percent", 0) + ") if number of Map Output Records are large. \n" +
-      "* Increase (io.sort.spill.percent:"+ this._job.getJobConf().getInt("io.sort.spill.percent", 0) + "), default 0.80 i.e. 80% of sort buffer size and index buffer size. \n";
+      "* Increase map side sort buffer size (" + JobContext.IO_SORT_FACTOR + 
+      ":" + this._job.getJobConf().getInt(JobContext.IO_SORT_MB, 0) + ").\n" +
+      "* Increase index buffer size (" + JobContext.MAP_SORT_RECORD_PERCENT + 
+      ":" + this._job.getJobConf().getInt(JobContext.MAP_SORT_RECORD_PERCENT, 0)
+      + ") if number of Map Output Records are large. \n" +
+      "* Increase (" + JobContext.MAP_SORT_SPILL_PERCENT + ":" + 
+      this._job.getJobConf().getInt(JobContext.MAP_SORT_SPILL_PERCENT, 0) + 
+      "), default 0.80 i.e. 80% of sort buffer size and index buffer size. \n";
   }
 
   /* (non-Javadoc)

Modified: hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/capacity_scheduler.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/capacity_scheduler.xml?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/capacity_scheduler.xml (original)
+++ hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/capacity_scheduler.xml Fri Sep 18 15:09:48 2009
@@ -135,7 +135,7 @@
               <th>Value</th>
             </tr>
             <tr>
-              <td>mapred.jobtracker.taskScheduler</td>
+              <td>mapreduce.jobtracker.taskscheduler</td>
               <td>org.apache.hadoop.mapred.CapacityTaskScheduler</td>
             </tr>
           </table>
@@ -147,7 +147,7 @@
           You can define multiple queues to which users can submit jobs with
           the Capacity Scheduler. To define multiple queues, you should edit
           the site configuration for Hadoop and modify the
-          <em>mapred.queue.names</em> property.
+          <em>mapreduce.jobtracker.taskscheduler.queue.names</em> property.
         </p>
         <p>
           You can also configure ACLs for controlling which users or groups

Modified: hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/cluster_setup.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/cluster_setup.xml?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/cluster_setup.xml (original)
+++ hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/cluster_setup.xml Fri Sep 18 15:09:48 2009
@@ -221,12 +221,12 @@
           <th>Notes</th>
         </tr>
         <tr>
-          <td>mapred.job.tracker</td>
+          <td>mapreduce.jobtracker.address</td>
           <td>Host or IP and port of <code>JobTracker</code>.</td>
           <td><em>host:port</em> pair.</td>
         </tr>
 		    <tr>
-		      <td>mapred.system.dir</td>
+		      <td>mapreduce.jobtracker.system.dir</td>
 		      <td>
 		        Path on the HDFS where where the Map/Reduce framework stores 
 		        system files e.g. <code>/hadoop/mapred/system/</code>.
@@ -237,7 +237,7 @@
 		      </td>
 		    </tr>
 		    <tr>
-		      <td>mapred.local.dir</td>
+		      <td>mapreduce.cluster.local.dir</td>
 		      <td>
 		        Comma-separated list of paths on the local filesystem where 
 		        temporary Map/Reduce data is written.
@@ -264,7 +264,7 @@
 		      </td>
 		    </tr>
 		    <tr>
-		      <td>mapred.hosts/mapred.hosts.exclude</td>
+		      <td>mapreduce.jobtracker.hosts.filename/mapreduce.jobtracker.hosts.exclude.filename</td>
 		      <td>List of permitted/excluded TaskTrackers.</td>
 		      <td>
 		        If necessary, use these files to control the list of allowable 
@@ -284,7 +284,7 @@
             being used, the list of configured queue names must be
             specified here. Once queues are defined, users can submit
             jobs to a queue using the property name 
-            <em>mapred.job.queue.name</em> in the job configuration.
+            <em>mapreduce.job.queuename</em> in the job configuration.
             There could be a separate 
             configuration file for configuring properties of these 
             queues that is managed by the scheduler. 
@@ -383,7 +383,7 @@
                   </tr>
                   <tr>
                     <td>conf/mapred-site.xml</td>
-                    <td>mapred.reduce.parallel.copies</td>
+                    <td>mapreduce.reduce.shuffle.parallelcopies</td>
                     <td>20</td>
                     <td>
                       Higher number of parallel copies run by reduces to fetch
@@ -392,7 +392,7 @@
                   </tr>
                   <tr>
                     <td>conf/mapred-site.xml</td>
-                    <td>mapred.map.child.java.opts</td>
+                    <td>mapreduce.map.java.opts</td>
                     <td>-Xmx512M</td>
                     <td>
                       Larger heap-size for child jvms of maps. 
@@ -400,7 +400,7 @@
                   </tr>
                   <tr>
                     <td>conf/mapred-site.xml</td>
-                    <td>mapred.reduce.child.java.opts</td>
+                    <td>mapreduce.reduce.java.opts</td>
                     <td>-Xmx512M</td>
                     <td>
                       Larger heap-size for child jvms of reduces. 
@@ -417,13 +417,13 @@
                   </tr>
                   <tr>
                     <td>conf/core-site.xml</td>
-                    <td>io.sort.factor</td>
+                    <td>mapreduce.task.io.sort.factor</td>
                     <td>100</td>
                     <td>More streams merged at once while sorting files.</td>
                   </tr>
                   <tr>
                     <td>conf/core-site.xml</td>
-                    <td>io.sort.mb</td>
+                    <td>mapreduce.task.io.sort.mb</td>
                     <td>200</td>
                     <td>Higher memory-limit while sorting data.</td>
                   </tr>
@@ -448,7 +448,7 @@
 		          </tr>
                   <tr>
                     <td>conf/mapred-site.xml</td>
-                    <td>mapred.job.tracker.handler.count</td>
+                    <td>mapreduce.jobtracker.handler.count</td>
                     <td>60</td>
                     <td>
                       More JobTracker server threads to handle RPCs from large 
@@ -457,13 +457,13 @@
                   </tr>
                   <tr>
                     <td>conf/mapred-site.xml</td>
-                    <td>mapred.reduce.parallel.copies</td>
+                    <td>mapreduce.reduce.shuffle.parallelcopies</td>
                     <td>50</td>
                     <td></td>
                   </tr>
                   <tr>
                     <td>conf/mapred-site.xml</td>
-                    <td>tasktracker.http.threads</td>
+                    <td>mapreduce.tasktracker.http.threads</td>
                     <td>50</td>
                     <td>
                       More worker threads for the TaskTracker's http server. The
@@ -473,7 +473,7 @@
                   </tr>
                   <tr>
                     <td>conf/mapred-site.xml</td>
-                    <td>mapred.map.child.java.opts</td>
+                    <td>mapreduce.map.java.opts</td>
                     <td>-Xmx512M</td>
                     <td>
                       Larger heap-size for child jvms of maps. 
@@ -481,7 +481,7 @@
                   </tr>
                   <tr>
                     <td>conf/mapred-site.xml</td>
-                    <td>mapred.reduce.child.java.opts</td>
+                    <td>mapreduce.reduce.java.opts</td>
                     <td>-Xmx1024M</td>
                     <td>Larger heap-size for child jvms of reduces.</td>
                   </tr>
@@ -558,7 +558,7 @@
 
     <table>
           <tr><th>Name</th><th>Type</th><th>Description</th></tr>
-          <tr><td>mapred.tasktracker.taskmemorymanager.monitoring-interval</td>
+          <tr><td>mapreduce.tasktracker.taskmemorymanager.monitoringinterval</td>
             <td>long</td>
             <td>The time interval, in milliseconds, between which the TT 
             checks for any memory violation. The default value is 5000 msec
@@ -685,7 +685,7 @@
             <th>Property</th><th>Value</th><th>Notes</th>
             </tr>
             <tr>
-            <td>mapred.task.tracker.task-controller</td>
+            <td>mapreduce.tasktracker.taskcontroller</td>
             <td>Fully qualified class name of the task controller class</td>
             <td>Currently there are two implementations of task controller
             in the Hadoop system, DefaultTaskController and LinuxTaskController.
@@ -748,8 +748,8 @@
             </p>
             <table><tr><th>Name</th><th>Description</th></tr>
             <tr>
-            <td>mapred.local.dir</td>
-            <td>Path to mapred local directories. Should be same as the value 
+            <td>mapreduce.cluster.local.dir</td>
+            <td>Path to mapreduce.cluster.local.directories. Should be same as the value 
             which was provided to key in mapred-site.xml. This is required to
             validate paths passed to the setuid executable in order to prevent
             arbitrary paths being passed to it.</td>
@@ -766,7 +766,7 @@
             <p>
             The LinuxTaskController requires that paths including and leading up to
             the directories specified in
-            <em>mapred.local.dir</em> and <em>hadoop.log.dir</em> to be set 755
+            <em>mapreduce.cluster.local.dir</em> and <em>hadoop.log.dir</em> to be set 755
             permissions.
             </p>
             </section>
@@ -801,7 +801,7 @@
             monitoring script in <em>mapred-site.xml</em>.</p>
             <table>
             <tr><th>Name</th><th>Description</th></tr>
-            <tr><td><code>mapred.healthChecker.script.path</code></td>
+            <tr><td><code>mapreduce.tasktracker.healthchecker.script.path</code></td>
             <td>Absolute path to the script which is periodically run by the 
             TaskTracker to determine if the node is 
             healthy or not. The file should be executable by the TaskTracker.
@@ -810,18 +810,18 @@
             is not started.</td>
             </tr>
             <tr>
-            <td><code>mapred.healthChecker.interval</code></td>
+            <td><code>mapreduce.tasktracker.healthchecker.interval</code></td>
             <td>Frequency at which the node health script is run, 
             in milliseconds</td>
             </tr>
             <tr>
-            <td><code>mapred.healthChecker.script.timeout</code></td>
+            <td><code>mapreduce.tasktracker.healthchecker.script.timeout</code></td>
             <td>Time after which the node health script will be killed by
             the TaskTracker if unresponsive.
             The node is marked unhealthy. if node health script times out.</td>
             </tr>
             <tr>
-            <td><code>mapred.healthChecker.script.args</code></td>
+            <td><code>mapreduce.tasktracker.healthchecker.script.args</code></td>
             <td>Extra arguments that can be passed to the node health script 
             when launched.
             These should be comma separated list of arguments. </td>
@@ -858,17 +858,17 @@
             <title>History Logging</title>
             
             <p> The job history files are stored in central location 
-            <code> hadoop.job.history.location </code> which can be on DFS also,
+            <code> mapreduce.jobtracker.jobhistory.location </code> which can be on DFS also,
             whose default value is <code>${HADOOP_LOG_DIR}/history</code>. 
             The history web UI is accessible from job tracker web UI.</p>
             
             <p> The history files are also logged to user specified directory
-            <code>hadoop.job.history.user.location</code> 
+            <code>mapreduce.job.userhistorylocation</code> 
             which defaults to job output directory. The files are stored in
             "_logs/history/" in the specified directory. Hence, by default 
-            they will be in "mapred.output.dir/_logs/history/". User can stop
+            they will be in "mapreduce.output.fileoutputformat.outputdir/_logs/history/". User can stop
             logging by giving the value <code>none</code> for 
-            <code>hadoop.job.history.user.location</code> </p>
+            <code>mapreduce.job.userhistorylocation</code> </p>
             
             <p> User can view the history logs summary in specified directory 
             using the following command <br/>
@@ -892,9 +892,9 @@
       <section>
         <title>Map/Reduce</title>
         <p>The job tracker restart can recover running jobs if 
-        <code>mapred.jobtracker.restart.recover</code> is set true and 
+        <code>mapreduce.jobtracker.restart.recover</code> is set true and 
         <a href="#Logging">JobHistory logging</a> is enabled. Also 
-        <code>mapred.jobtracker.job.history.block.size</code> value should be 
+        <code>mapreduce.jobtracker.jobhistory.block.size</code> value should be 
         set to an optimal value to dump job history to disk as soon as 
         possible, the typical value is 3145728(3MB).</p>
       </section>

Modified: hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/distcp.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/distcp.xml?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/distcp.xml (original)
+++ hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/distcp.xml Fri Sep 18 15:09:48 2009
@@ -331,7 +331,7 @@
           copied by a previous map on a re-execution will be marked as
           &quot;skipped&quot;.</li>
 
-          <li>If a map fails <code>mapred.map.max.attempts</code> times, the
+          <li>If a map fails <code>mapreduce.map.maxattempts</code> times, the
           remaining map tasks will be killed (unless <code>-i</code> is
           set).</li>
 

Modified: hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/fair_scheduler.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/fair_scheduler.xml?rev=816664&r1=816663&r2=816664&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/fair_scheduler.xml (original)
+++ hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/fair_scheduler.xml Fri Sep 18 15:09:48 2009
@@ -108,7 +108,7 @@
       </p>
 <source>
 &lt;property&gt;
-  &lt;name&gt;mapred.jobtracker.taskScheduler&lt;/name&gt;
+  &lt;name&gt;mapreduce.jobtracker.taskscheduler&lt;/name&gt;
   &lt;value&gt;org.apache.hadoop.mapred.FairScheduler&lt;/value&gt;
 &lt;/property&gt;
 </source>
@@ -167,16 +167,16 @@
           </td>
           <td>
             Specify which jobconf property is used to determine the pool that a
-            job belongs in. String, default: <em>user.name</em>
+            job belongs in. String, default: <em>mapreduce.job.mapreduce.job.user.name</em>
             (i.e. one pool for each user). 
             Another useful value is <em>group.name</em> to create a
             pool per Unix group.
             Finally, a common setting is to use a non-standard property
             such as <em>pool.name</em> as the pool name property, and make it
-            default to <em>user.name</em> through the following setting:<br/>
+            default to <em>mapreduce.job.mapreduce.job.user.name</em> through the following setting:<br/>
             <code>&lt;property&gt;</code><br/> 
             <code>&nbsp;&nbsp;&lt;name&gt;pool.name&lt;/name&gt;</code><br/>
-            <code>&nbsp;&nbsp;&lt;value&gt;${user.name}&lt;/value&gt;</code><br/>
+            <code>&nbsp;&nbsp;&lt;value&gt;${mapreduce.job.mapreduce.job.user.name}&lt;/value&gt;</code><br/>
             <code>&lt;/property&gt;</code><br/>
             This allows you to specify the pool name explicitly for some jobs
             through the jobconf (e.g. passing <em>-Dpool.name=&lt;name&gt;</em>
@@ -400,7 +400,7 @@
     &lt;minReduces&gt;5&lt;/minReduces&gt;
     &lt;minSharePreemptionTimeout&gt;300&lt;/minSharePreemptionTimeout&gt;
   &lt;/pool&gt;
-  &lt;user name="sample_user"&gt;
+  &lt;mapreduce.job.mapreduce.job.user.name="sample_user"&gt;
     &lt;maxRunningJobs&gt;6&lt;/maxRunningJobs&gt;
   &lt;/user&gt;
   &lt;userMaxJobsDefault&gt;3&lt;/userMaxJobsDefault&gt;



Mime
View raw message