hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ste...@apache.org
Subject svn commit: r885145 [32/34] - in /hadoop/mapreduce/branches/MAPREDUCE-233: ./ .eclipse.templates/ .eclipse.templates/.launches/ conf/ ivy/ lib/ src/benchmarks/gridmix/ src/benchmarks/gridmix/pipesort/ src/benchmarks/gridmix2/ src/benchmarks/gridmix2/sr...
Date Sat, 28 Nov 2009 20:26:22 GMT
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/ThreadedMapBenchmark.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/ThreadedMapBenchmark.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/ThreadedMapBenchmark.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/ThreadedMapBenchmark.java Sat Nov 28 20:26:01 2009
@@ -60,7 +60,7 @@
                                 + "ThreadedMapBenchmark"));
   private static Path INPUT_DIR = new Path(BASE_DIR, "input");
   private static Path OUTPUT_DIR = new Path(BASE_DIR, "output");
-  private static final float FACTOR = 2.3f; // io.sort.mb set to 
+  private static final float FACTOR = 2.3f; // mapreduce.task.io.sort.mb set to 
                                             // (FACTOR * data_size) should 
                                             // result in only 1 spill
 
@@ -247,9 +247,9 @@
       job.setNumMapTasks(numMapsPerHost * cluster.getTaskTrackers());
       job.setNumReduceTasks(1);
       
-      // set io.sort.mb to avoid spill
+      // set mapreduce.task.io.sort.mb to avoid spill
       int ioSortMb = (int)Math.ceil(FACTOR * dataSizePerMap);
-      job.set("io.sort.mb", String.valueOf(ioSortMb));
+      job.set(JobContext.IO_SORT_MB, String.valueOf(ioSortMb));
       fs = FileSystem.get(job);
       
       LOG.info("Running sort with 1 spill per map");
@@ -261,12 +261,12 @@
                + " millisec");
       fs.delete(OUTPUT_DIR, true);
       
-      // set io.sort.mb to have multiple spills
+      // set mapreduce.task.io.sort.mb to have multiple spills
       JobConf spilledJob = new JobConf(job, ThreadedMapBenchmark.class);
       ioSortMb = (int)Math.ceil(FACTOR 
                                 * Math.ceil((double)dataSizePerMap 
                                             / numSpillsPerMap));
-      spilledJob.set("io.sort.mb", String.valueOf(ioSortMb));
+      spilledJob.set(JobContext.IO_SORT_MB, String.valueOf(ioSortMb));
       spilledJob.setJobName("threaded-map-benchmark-spilled");
       spilledJob.setJarByClass(ThreadedMapBenchmark.class);
       

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java Sat Nov 28 20:26:01 2009
@@ -47,6 +47,7 @@
 import org.apache.hadoop.mapred.SortValidator.RecordStatsChecker.NonSplitableSequenceFileInputFormat;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
+import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
 import org.apache.commons.logging.Log;
 
@@ -226,7 +227,7 @@
   /**
    * A utility that waits for specified amount of time
    */
-  static void waitFor(long duration) {
+  public static void waitFor(long duration) {
     try {
       synchronized (waitLock) {
         waitLock.wait(duration);
@@ -374,9 +375,9 @@
 
     public void configure(JobConf conf) {
       try {
-        String taskId = conf.get("mapred.task.id");
+        String taskId = conf.get(JobContext.TASK_ATTEMPT_ID);
         id = Integer.parseInt(taskId.split("_")[4]);
-        totalMaps = Integer.parseInt(conf.get("mapred.map.tasks"));
+        totalMaps = Integer.parseInt(conf.get(JobContext.NUM_MAPS));
         fs = FileSystem.get(conf);
         signal = new Path(conf.get(getTaskSignalParameter(true)));
       } catch (IOException ioe) {
@@ -595,7 +596,7 @@
   }
 
   // Run a job that will be succeeded and wait until it completes
-  static RunningJob runJobSucceed(JobConf conf, Path inDir, Path outDir)
+  public static RunningJob runJobSucceed(JobConf conf, Path inDir, Path outDir)
          throws IOException {
     conf.setJobName("test-job-succeed");
     conf.setMapperClass(IdentityMapper.class);
@@ -614,7 +615,7 @@
   }
 
   // Run a job that will be failed and wait until it completes
-  static RunningJob runJobFail(JobConf conf, Path inDir, Path outDir)
+  public static RunningJob runJobFail(JobConf conf, Path inDir, Path outDir)
          throws IOException {
     conf.setJobName("test-job-fail");
     conf.setMapperClass(FailMapper.class);
@@ -633,7 +634,7 @@
   }
 
   // Run a job that will be killed and wait until it completes
-  static RunningJob runJobKill(JobConf conf,  Path inDir, Path outDir)
+  public static RunningJob runJobKill(JobConf conf,  Path inDir, Path outDir)
          throws IOException {
 
     conf.setJobName("test-job-kill");
@@ -678,7 +679,8 @@
     public void map(WritableComparable key, Writable value,
         OutputCollector<WritableComparable, Writable> out, Reporter reporter)
         throws IOException {
-
+      //NOTE- the next line is required for the TestDebugScript test to succeed
+      System.err.println("failing map");
       throw new RuntimeException("failing map");
     }
   }
@@ -714,4 +716,16 @@
     fos.close();
   }
 
+  static JobTracker getJobTracker() {
+    JobConf conf = new JobConf();
+    conf.set(JTConfig.JT_IPC_ADDRESS, "localhost:0");
+    conf.set(JTConfig.JT_HTTP_ADDRESS, "0.0.0.0:0");
+    JobTracker jt;
+    try {
+      jt = new JobTracker(conf);
+      return jt;
+    } catch (Exception e) {
+      throw new RuntimeException("Could not start jt", e);
+    }
+  }
 }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/jobcontrol/JobControlTestUtils.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/jobcontrol/JobControlTestUtils.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/jobcontrol/JobControlTestUtils.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/jobcontrol/JobControlTestUtils.java Sat Nov 28 20:26:01 2009
@@ -145,7 +145,7 @@
         throws IOException {
       Text dumbKey = new Text("");
       while (values.hasNext()) {
-        Text data = (Text) values.next();
+        Text data = values.next();
         output.collect(dumbKey, data);
       }
     }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/join/TestDatamerge.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/join/TestDatamerge.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/join/TestDatamerge.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/join/TestDatamerge.java Sat Nov 28 20:26:01 2009
@@ -50,6 +50,7 @@
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
+import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -231,7 +232,7 @@
     JobConf job = new JobConf(conf, c);
     Path base = cluster.getFileSystem().makeQualified(new Path("/"+jointype));
     Path[] src = writeSimpleSrc(base, conf, srcs);
-    job.set("mapred.join.expr", CompositeInputFormat.compose(jointype,
+    job.set("mapreduce.join.expr", CompositeInputFormat.compose(jointype,
         SequenceFileInputFormat.class, src));
     job.setInt("testdatamerge.sources", srcs);
     job.setInputFormat(CompositeInputFormat.class);
@@ -302,7 +303,7 @@
       sb.append(",");
     }
     sb.append(CompositeInputFormat.compose(Fake_IF.class,"raboof") + "))");
-    job.set("mapred.join.expr", sb.toString());
+    job.set("mapreduce.join.expr", sb.toString());
     job.setInputFormat(CompositeInputFormat.class);
     Path outf = new Path(base, "out");
     FileOutputFormat.setOutputPath(job, outf);
@@ -317,7 +318,8 @@
     job.setOutputFormat(SequenceFileOutputFormat.class);
     JobClient.runJob(job);
 
-    FileStatus[] outlist = cluster.getFileSystem().listStatus(outf);
+    FileStatus[] outlist = cluster.getFileSystem().listStatus(outf, 
+                             new Utils.OutputFileUtils.OutputFilesFilter());
     assertEquals(1, outlist.length);
     assertTrue(0 < outlist[0].getLen());
     SequenceFile.Reader r =
@@ -352,7 +354,7 @@
     JobConf job = new JobConf();
     Path base = cluster.getFileSystem().makeQualified(new Path("/empty"));
     Path[] src = { new Path(base,"i0"), new Path("i1"), new Path("i2") };
-    job.set("mapred.join.expr", CompositeInputFormat.compose("outer",
+    job.set("mapreduce.join.expr", CompositeInputFormat.compose("outer",
         Fake_IF.class, src));
     job.setInputFormat(CompositeInputFormat.class);
     FileOutputFormat.setOutputPath(job, new Path(base, "out"));

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/join/TestWrappedRecordReaderClassloader.java Sat Nov 28 20:26:01 2009
@@ -54,7 +54,7 @@
 
     Path base = new Path(testdir, "/empty");
     Path[] src = { new Path(base, "i0"), new Path("i1"), new Path("i2") };
-    job.set("mapred.join.expr", CompositeInputFormat.compose("outer",
+    job.set("mapreduce.join.expr", CompositeInputFormat.compose("outer",
         IF_ClassLoaderChecker.class, src));
 
     CompositeInputFormat<NullWritable> inputFormat = new CompositeInputFormat<NullWritable>();

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java Sat Nov 28 20:26:01 2009
@@ -30,10 +30,11 @@
 import org.apache.hadoop.mapred.HadoopTestCase;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.OutputLogFilter;
+import org.apache.hadoop.mapred.JobContext;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
+import org.apache.hadoop.mapred.Utils;
 
 
 public class TestKeyFieldBasedComparator extends HadoopTestCase {
@@ -47,7 +48,7 @@
     super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
     conf = createJobConf();
     localConf = createJobConf();
-    localConf.set("map.output.key.field.separator", " ");
+    localConf.set(JobContext.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
   }
   public void configure(String keySpec, int expect) throws Exception {
     Path testdir = new Path("build/test/test.mapred.spill");
@@ -68,7 +69,7 @@
     conf.setOutputKeyComparatorClass(KeyFieldBasedComparator.class);
     conf.setKeyFieldComparatorOptions(keySpec);
     conf.setKeyFieldPartitionerOptions("-k1.1,1.1");
-    conf.set("map.output.key.field.separator", " ");
+    conf.set(JobContext.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
     conf.setMapperClass(InverseMapper.class);
     conf.setReducerClass(IdentityReducer.class);
     if (!fs.mkdirs(testdir)) {
@@ -94,7 +95,7 @@
     }
     Path[] outputFiles = FileUtil.stat2Paths(
         getFileSystem().listStatus(outDir,
-        new OutputLogFilter()));
+        new Utils.OutputFileUtils.OutputFilesFilter()));
     if (outputFiles.length > 0) {
       InputStream is = getFileSystem().open(outputFiles[0]);
       BufferedReader reader = new BufferedReader(new InputStreamReader(is));

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/lib/TestLineInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/lib/TestLineInputFormat.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/lib/TestLineInputFormat.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/lib/TestLineInputFormat.java Sat Nov 28 20:26:01 2009
@@ -54,7 +54,7 @@
     localFs.delete(workDir, true);
     FileInputFormat.setInputPaths(job, workDir);
     int numLinesPerMap = 5;
-    job.setInt("mapred.line.input.format.linespermap", numLinesPerMap);
+    job.setInt("mapreduce.input.lineinputformat.linespermap", numLinesPerMap);
 
     // for a variety of lengths
     for (int length = 0; length < MAX_LENGTH;

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/lib/TestMultithreadedMapRunner.java Sat Nov 28 20:26:01 2009
@@ -23,6 +23,7 @@
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.*;
+import org.apache.hadoop.mapreduce.lib.map.MultithreadedMapper;
 
 import java.io.DataOutputStream;
 import java.io.IOException;
@@ -92,7 +93,7 @@
 
     conf.setMapRunnerClass(MultithreadedMapRunner.class);
     
-    conf.setInt("mapred.map.multithreadedrunner.threads", 2);
+    conf.setInt(MultithreadedMapper.NUM_THREADS, 2);
 
     if (ioEx) {
       conf.setBoolean("multithreaded.ioException", true);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/lib/db/TestConstructQuery.java Sat Nov 28 20:26:01 2009
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.hadoop.mapred.lib.db;
 
 import java.io.IOException;

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/pipes/TestPipes.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/pipes/TestPipes.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/pipes/TestPipes.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapred/pipes/TestPipes.java Sat Nov 28 20:26:01 2009
@@ -37,11 +37,12 @@
 import org.apache.hadoop.mapred.FileOutputFormat;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.mapred.OutputLogFilter;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.TestMiniMRWithDFS;
+import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapred.Counters.Counter;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.ToolRunner;
 
 import junit.framework.TestCase;
 
@@ -196,7 +197,8 @@
 
     List<String> results = new ArrayList<String>();
     for (Path p:FileUtil.stat2Paths(dfs.getFileSystem().listStatus(outputPath,
-    		                        new OutputLogFilter()))) {
+    		                        new Utils.OutputFileUtils
+    		                                 .OutputFilesFilter()))) {
       results.add(TestMiniMRWithDFS.readOutput(p, job));
     }
     assertEquals("number of reduces is wrong", 
@@ -262,12 +264,14 @@
                        " -program " + 
                        dfs.getFileSystem().makeQualified(wordExec));
     try {
-      Submitter.main(new String[]{"-conf", jobXml.toString(),
+      int ret = ToolRunner.run(new Submitter(),
+                               new String[]{"-conf", jobXml.toString(),
                                   "-input", inDir.toString(),
                                   "-output", nonPipedOutDir.toString(),
                                   "-program", 
                         dfs.getFileSystem().makeQualified(wordExec).toString(),
                                   "-reduces", "2"});
+      assertEquals(0, ret);
     } catch (Exception e) {
       assertTrue("got exception: " + StringUtils.stringifyException(e), false);
     }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/MapReduceTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/MapReduceTestUtil.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/MapReduceTestUtil.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/MapReduceTestUtil.java Sat Nov 28 20:26:01 2009
@@ -43,12 +43,13 @@
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.mapred.OutputLogFilter;
+import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.Reducer;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
 import org.apache.hadoop.util.ReflectionUtils;
 
 /**
@@ -132,7 +133,7 @@
    */
   public static Job createCopyJob(Configuration conf, Path outdir, 
       Path... indirs) throws Exception {
-    conf.setInt("mapred.map.tasks", 3);
+    conf.setInt(JobContext.NUM_MAPS, 3);
     Job theJob = new Job(conf);
     theJob.setJobName("DataMoveJob");
 
@@ -158,7 +159,7 @@
   public static Job createFailJob(Configuration conf, Path outdir, 
       Path... indirs) throws Exception {
 
-    conf.setInt("mapred.map.max.attempts", 2);
+    conf.setInt(JobContext.MAP_MAX_ATTEMPTS, 2);
     Job theJob = new Job(conf);
     theJob.setJobName("Fail-Job");
 
@@ -213,7 +214,7 @@
     throws IOException, InterruptedException {
       Text dumbKey = new Text("");
       while (values.hasNext()) {
-        Text data = (Text) values.next();
+        Text data = values.next();
         context.write(dumbKey, data);
       }
     }
@@ -370,8 +371,8 @@
   public static TaskAttemptContext createDummyMapTaskAttemptContext(
       Configuration conf) {
     TaskAttemptID tid = new TaskAttemptID("jt", 1, TaskType.MAP, 0, 0);
-    conf.set("mapred.task.id", tid.toString());
-    return new TaskAttemptContext(conf, tid);    
+    conf.set(JobContext.TASK_ATTEMPT_ID, tid.toString());
+    return new TaskAttemptContextImpl(conf, tid);    
   }
 
   public static StatusReporter createDummyReporter() {
@@ -395,7 +396,7 @@
     StringBuffer result = new StringBuffer();
 
     Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir,
-           new OutputLogFilter()));
+           new Utils.OutputFileUtils.OutputFilesFilter()));
     for (Path outputFile : fileList) {
       LOG.info("Path" + ": "+ outputFile);
       BufferedReader file = 

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestMapReduceLazyOutput.java Sat Nov 28 20:26:01 2009
@@ -36,7 +36,7 @@
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.mapred.OutputLogFilter;
+import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
@@ -147,7 +147,7 @@
 
       Path[] fileList = 
         FileUtil.stat2Paths(fileSys.listStatus(output1,
-            new OutputLogFilter()));
+            new Utils.OutputFileUtils.OutputFilesFilter()));
       for(int i=0; i < fileList.length; ++i) {
         System.out.println("Test1 File list[" + i + "]" + ": "+ fileList[i]);
       }
@@ -159,7 +159,7 @@
 
       fileList =
         FileUtil.stat2Paths(fileSys.listStatus(output2,
-            new OutputLogFilter()));
+            new Utils.OutputFileUtils.OutputFilesFilter()));
       for(int i=0; i < fileList.length; ++i) {
         System.out.println("Test2 File list[" + i + "]" + ": "+ fileList[i]);
       }
@@ -172,7 +172,7 @@
 
       fileList =
         FileUtil.stat2Paths(fileSys.listStatus(output3,
-            new OutputLogFilter()));
+            new Utils.OutputFileUtils.OutputFilesFilter()));
       for(int i=0; i < fileList.length; ++i) {
         System.out.println("Test3 File list[" + i + "]" + ": "+ fileList[i]);
       }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestMapReduceLocal.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestMapReduceLocal.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestMapReduceLocal.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestMapReduceLocal.java Sat Nov 28 20:26:01 2009
@@ -158,9 +158,14 @@
                                      "REDUCE_INPUT_RECORDS").getValue();
     long mapOut = ctrs.findCounter(COUNTER_GROUP, 
                                    "MAP_OUTPUT_RECORDS").getValue();
+    long reduceOut = ctrs.findCounter(COUNTER_GROUP,
+                                      "REDUCE_OUTPUT_RECORDS").getValue();
+    long reduceGrps = ctrs.findCounter(COUNTER_GROUP,
+                                       "REDUCE_INPUT_GROUPS").getValue();
     assertEquals("map out = combine in", mapOut, combineIn);
     assertEquals("combine out = reduce in", combineOut, reduceIn);
     assertTrue("combine in > combine out", combineIn > combineOut);
+    assertEquals("reduce groups = reduce out", reduceGrps, reduceOut);
     String group = "Random Group";
     CounterGroup ctrGrp = ctrs.getGroup(group);
     assertEquals(0, ctrGrp.size());

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestNoJobSetupCleanup.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestNoJobSetupCleanup.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestNoJobSetupCleanup.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestNoJobSetupCleanup.java Sat Nov 28 20:26:01 2009
@@ -25,11 +25,12 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.mapred.FileAlreadyExistsException;
+import org.apache.hadoop.fs.FileAlreadyExistsException;
 import org.apache.hadoop.mapred.HadoopTestCase;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobID;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
+import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
 
 public class TestNoJobSetupCleanup extends HadoopTestCase {
   private static String TEST_ROOT_DIR =
@@ -91,7 +92,7 @@
       super.checkOutputSpecs(job);
       // creating dummy TaskAttemptID
       TaskAttemptID tid = new TaskAttemptID("jt", 1, TaskType.JOB_SETUP, 0, 0);
-      getOutputCommitter(new TaskAttemptContext(job.getConfiguration(), tid)).
+      getOutputCommitter(new TaskAttemptContextImpl(job.getConfiguration(), tid)).
         setupJob(job);
     }
   }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestValueIterReset.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestValueIterReset.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestValueIterReset.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/TestValueIterReset.java Sat Nov 28 20:26:01 2009
@@ -38,7 +38,7 @@
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.OutputLogFilter;
+import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
@@ -530,8 +530,8 @@
       job.setMapOutputValueClass(IntWritable.class);
       job.setOutputKeyClass(IntWritable.class);
       job.setOutputValueClass(IntWritable.class);
-      job.getConfiguration().setInt("mapred.job.reduce.markreset.buffer.size",
-                                    128);  
+      job.getConfiguration().
+        setInt(JobContext.REDUCE_MARKRESET_BUFFER_SIZE,128);  
       job.setInputFormatClass(TextInputFormat.class);
       job.setOutputFormatClass(TextOutputFormat.class);
       FileInputFormat.addInputPath(job,
@@ -551,7 +551,7 @@
   private void validateOutput() throws IOException {
     Path[] outputFiles = FileUtil.stat2Paths(
         localFs.listStatus(new Path(TEST_ROOT_DIR + "/out"),
-            new OutputLogFilter()));
+            new Utils.OutputFileUtils.OutputFilesFilter()));
     if (outputFiles.length > 0) {
       InputStream is = localFs.open(outputFiles[0]);
       BufferedReader reader = new BufferedReader(new InputStreamReader(is));

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/filecache/TestTrackerDistributedCacheManager.java Sat Nov 28 20:26:01 2009
@@ -24,36 +24,69 @@
 import java.io.IOException;
 import java.util.Random;
 
+import javax.security.auth.login.LoginException;
+
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapred.DefaultTaskController;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.TaskController;
+import org.apache.hadoop.mapred.TaskTracker;
+import org.apache.hadoop.mapred.TaskController.InitializationContext;
 import org.apache.hadoop.mapreduce.filecache.DistributedCache;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.LocalDirAllocator;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.RawLocalFileSystem;
 import org.apache.hadoop.mapreduce.filecache.TaskDistributedCacheManager;
 import org.apache.hadoop.mapreduce.filecache.TrackerDistributedCacheManager;
+import org.apache.hadoop.mapreduce.server.tasktracker.TTConfig;
+import org.apache.hadoop.security.UserGroupInformation;
 
 public class TestTrackerDistributedCacheManager extends TestCase {
-  private static final String TEST_LOCAL_DIR_PROP = "test.local.dir";
-  private static String TEST_CACHE_BASE_DIR =
-    new Path(System.getProperty("test.build.data","/tmp/cachebasedir"))
-    .toString().replace(' ', '+');
-  private static String TEST_ROOT_DIR =
-    System.getProperty("test.build.data", "/tmp/distributedcache");
+
+  protected String TEST_ROOT_DIR =
+      new File(System.getProperty("test.build.data", "/tmp"),
+          TestTrackerDistributedCacheManager.class.getSimpleName())
+          .getAbsolutePath();
+
+  protected File ROOT_MAPRED_LOCAL_DIR;
+  private static String TEST_CACHE_BASE_DIR = "cachebasedir";
+  protected int numLocalDirs = 6;
+
   private static final int TEST_FILE_SIZE = 4 * 1024; // 4K
   private static final int LOCAL_CACHE_LIMIT = 5 * 1024; //5K
-  private Configuration conf;
-  private Path firstCacheFile;
-  private Path secondCacheFile;
+  protected Configuration conf;
+  protected Path firstCacheFile;
+  protected Path secondCacheFile;
+  private FileSystem fs;
+
+  protected LocalDirAllocator localDirAllocator = 
+    new LocalDirAllocator(JobConf.MAPRED_LOCAL_DIR_PROPERTY);
 
   @Override
   protected void setUp() throws IOException {
+
+    // Prepare the tests' root dir
+    File TEST_ROOT = new File(TEST_ROOT_DIR);
+    if (!TEST_ROOT.exists()) {
+      TEST_ROOT.mkdirs();
+    }
+
+    // Prepare the tests' mapred-local-dir
+    ROOT_MAPRED_LOCAL_DIR = new File(TEST_ROOT_DIR, "mapred/local");
+    ROOT_MAPRED_LOCAL_DIR.mkdirs();
+
     conf = new Configuration();
-    conf.setLong("local.cache.size", LOCAL_CACHE_LIMIT);
-    conf.set(TEST_LOCAL_DIR_PROP, TEST_ROOT_DIR);
+    conf.setLong(TTConfig.TT_LOCAL_CACHE_SIZE, LOCAL_CACHE_LIMIT);
+    conf.set(JobConf.MAPRED_LOCAL_DIR_PROPERTY, ROOT_MAPRED_LOCAL_DIR.toString());
     conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "file:///");
+    fs = FileSystem.get(conf);
+
+    // Create the temporary cache files to be used in the tests.
     firstCacheFile = new Path(TEST_ROOT_DIR, "firstcachefile");
     secondCacheFile = new Path(TEST_ROOT_DIR, "secondcachefile");
     createTempFile(firstCacheFile);
@@ -62,29 +95,43 @@
 
   /**
    * This is the typical flow for using the DistributedCache classes.
+   * 
+   * @throws IOException
+   * @throws LoginException
    */
-  public void testManagerFlow() throws IOException {
-    TrackerDistributedCacheManager manager = 
-        new TrackerDistributedCacheManager(conf);
-    LocalDirAllocator localDirAllocator = 
-        new LocalDirAllocator(TEST_LOCAL_DIR_PROP);
+  public void testManagerFlow() throws IOException, LoginException {
 
+    // ****** Imitate JobClient code
     // Configures a task/job with both a regular file and a "classpath" file.
     Configuration subConf = new Configuration(conf);
     DistributedCache.addCacheFile(firstCacheFile.toUri(), subConf);
     DistributedCache.addFileToClassPath(secondCacheFile, subConf);
     TrackerDistributedCacheManager.determineTimestamps(subConf);
+    // ****** End of imitating JobClient code
 
     Path jobFile = new Path(TEST_ROOT_DIR, "job.xml");
     FileOutputStream os = new FileOutputStream(new File(jobFile.toString()));
     subConf.writeXml(os);
     os.close();
 
+    String userName = getJobOwnerName();
+
+    // ****** Imitate TaskRunner code.
+    TrackerDistributedCacheManager manager = 
+      new TrackerDistributedCacheManager(conf);
     TaskDistributedCacheManager handle =
       manager.newTaskDistributedCacheManager(subConf);
     assertNull(null, DistributedCache.getLocalCacheFiles(subConf));
-    handle.setup(localDirAllocator, 
-        new File(new Path(TEST_ROOT_DIR, "workdir").toString()), "distcache");
+    File workDir = new File(new Path(TEST_ROOT_DIR, "workdir").toString());
+    handle.setup(localDirAllocator, workDir, TaskTracker
+        .getDistributedCacheDir(userName));
+
+    InitializationContext context = new InitializationContext();
+    context.user = userName;
+    context.workDir = workDir;
+    getTaskController().initializeDistributedCache(context);
+    // ****** End of imitating TaskRunner code
+
     Path[] localCacheFiles = DistributedCache.getLocalCacheFiles(subConf);
     assertNotNull(null, localCacheFiles);
     assertEquals(2, localCacheFiles.length);
@@ -97,35 +144,63 @@
     assertEquals(1, handle.getClassPaths().size());
     assertEquals(cachedSecondFile.toString(), handle.getClassPaths().get(0));
 
+    checkFilePermissions(localCacheFiles);
+
     // Cleanup
     handle.release();
     manager.purgeCache();
     assertFalse(pathToFile(cachedFirstFile).exists());
   }
 
+  /**
+   * Check proper permissions on the cache files
+   * 
+   * @param localCacheFiles
+   * @throws IOException
+   */
+  protected void checkFilePermissions(Path[] localCacheFiles)
+      throws IOException {
+    Path cachedFirstFile = localCacheFiles[0];
+    Path cachedSecondFile = localCacheFiles[1];
+    // Both the files should have executable permissions on them.
+    assertTrue("First cache file is not executable!", new File(cachedFirstFile
+        .toUri().getPath()).canExecute());
+    assertTrue("Second cache file is not executable!", new File(
+        cachedSecondFile.toUri().getPath()).canExecute());
+  }
+
+  protected TaskController getTaskController() {
+    return new DefaultTaskController();
+  }
+
+  protected String getJobOwnerName() throws LoginException {
+    UserGroupInformation ugi = UserGroupInformation.login(conf);
+    return ugi.getUserName();
+  }
 
   /** test delete cache */
   public void testDeleteCache() throws Exception {
     TrackerDistributedCacheManager manager = 
         new TrackerDistributedCacheManager(conf);
     FileSystem localfs = FileSystem.getLocal(conf);
+    long now = System.currentTimeMillis();
 
     manager.getLocalCache(firstCacheFile.toUri(), conf, 
-        new Path(TEST_CACHE_BASE_DIR), null, false, 
-        System.currentTimeMillis(), new Path(TEST_ROOT_DIR), false);
-    manager.releaseCache(firstCacheFile.toUri(), conf);
+        TEST_CACHE_BASE_DIR, fs.getFileStatus(firstCacheFile), false,
+        now, new Path(TEST_ROOT_DIR), false);
+    manager.releaseCache(firstCacheFile.toUri(), conf, now);
     //in above code,localized a file of size 4K and then release the cache 
     // which will cause the cache be deleted when the limit goes out. 
     // The below code localize another cache which's designed to
     //sweep away the first cache.
     manager.getLocalCache(secondCacheFile.toUri(), conf, 
-        new Path(TEST_CACHE_BASE_DIR), null, false, 
+        TEST_CACHE_BASE_DIR, fs.getFileStatus(secondCacheFile), false, 
         System.currentTimeMillis(), new Path(TEST_ROOT_DIR), false);
     FileStatus[] dirStatuses = localfs.listStatus(
-        new Path(TEST_CACHE_BASE_DIR));
+      new Path(ROOT_MAPRED_LOCAL_DIR.toString()));
     assertTrue("DistributedCache failed deleting old" + 
         " cache when the cache store is full.",
-        dirStatuses.length > 1);
+        dirStatuses.length == 1);
   }
   
   public void testFileSystemOtherThanDefault() throws Exception {
@@ -135,7 +210,8 @@
     Path fileToCache = new Path("fakefile:///"
         + firstCacheFile.toUri().getPath());
     Path result = manager.getLocalCache(fileToCache.toUri(), conf,
-        new Path(TEST_CACHE_BASE_DIR), null, false, System.currentTimeMillis(),
+        TEST_CACHE_BASE_DIR, fs.getFileStatus(firstCacheFile), false,
+        System.currentTimeMillis(),
         new Path(TEST_ROOT_DIR), false);
     assertNotNull("DistributedCache cached file on non-default filesystem.",
         result);
@@ -155,15 +231,110 @@
   protected void tearDown() throws IOException {
     new File(firstCacheFile.toString()).delete();
     new File(secondCacheFile.toString()).delete();
+    FileUtil.fullyDelete(new File(TEST_ROOT_DIR));
   }
 
-  private void assertFileLengthEquals(Path a, Path b) 
+  protected void assertFileLengthEquals(Path a, Path b) 
       throws FileNotFoundException {
     assertEquals("File sizes mismatch.", 
        pathToFile(a).length(), pathToFile(b).length());
   }
 
-  private File pathToFile(Path p) {
+  protected File pathToFile(Path p) {
     return new File(p.toString());
   }
+  
+  public static class FakeFileSystem extends RawLocalFileSystem {
+    private long increment = 0;
+    public FakeFileSystem() {
+      super();
+    }
+    
+    public FileStatus getFileStatus(Path p) throws IOException {
+      File f = pathToFile(p);
+      return new FileStatus(f.length(), f.isDirectory(), 1, 128,
+      f.lastModified() + increment, makeQualified(new Path(f.getPath())));
+    }
+    
+    void advanceClock(long millis) {
+      increment += millis;
+    }
+  }
+  
+  public void testFreshness() throws Exception {
+    Configuration myConf = new Configuration(conf);
+    myConf.set("fs.default.name", "refresh:///");
+    myConf.setClass("fs.refresh.impl", FakeFileSystem.class, FileSystem.class);
+    TrackerDistributedCacheManager manager = 
+      new TrackerDistributedCacheManager(myConf);
+    // ****** Imitate JobClient code
+    // Configures a task/job with both a regular file and a "classpath" file.
+    Configuration subConf = new Configuration(myConf);
+    DistributedCache.addCacheFile(firstCacheFile.toUri(), subConf);
+    TrackerDistributedCacheManager.determineTimestamps(subConf);
+    // ****** End of imitating JobClient code
+
+    String userName = getJobOwnerName();
+
+    // ****** Imitate TaskRunner code.
+    TaskDistributedCacheManager handle =
+      manager.newTaskDistributedCacheManager(subConf);
+    assertNull(null, DistributedCache.getLocalCacheFiles(subConf));
+    File workDir = new File(new Path(TEST_ROOT_DIR, "workdir").toString());
+    handle.setup(localDirAllocator, workDir, TaskTracker
+        .getDistributedCacheDir(userName));
+    // ****** End of imitating TaskRunner code
+
+    Path[] localCacheFiles = DistributedCache.getLocalCacheFiles(subConf);
+    assertNotNull(null, localCacheFiles);
+    assertEquals(1, localCacheFiles.length);
+    Path cachedFirstFile = localCacheFiles[0];
+    assertFileLengthEquals(firstCacheFile, cachedFirstFile);
+    assertFalse("Paths should be different.", 
+        firstCacheFile.equals(cachedFirstFile));
+    // release
+    handle.release();
+    
+    // change the file timestamp
+    FileSystem fs = FileSystem.get(myConf);
+    ((FakeFileSystem)fs).advanceClock(1);
+
+    // running a task of the same job
+    Throwable th = null;
+    try {
+      handle.setup(localDirAllocator, workDir, TaskTracker
+          .getDistributedCacheDir(userName));
+    } catch (IOException ie) {
+      th = ie;
+    }
+    assertNotNull("Throwable is null", th);
+    assertTrue("Exception message does not match",
+        th.getMessage().contains("has changed on HDFS since job started"));
+    // release
+    handle.release();
+    
+    // submit another job
+    Configuration subConf2 = new Configuration(myConf);
+    DistributedCache.addCacheFile(firstCacheFile.toUri(), subConf2);
+    TrackerDistributedCacheManager.determineTimestamps(subConf2);
+    
+    handle =
+      manager.newTaskDistributedCacheManager(subConf2);
+    handle.setup(localDirAllocator, workDir, TaskTracker
+        .getDistributedCacheDir(userName));
+    Path[] localCacheFiles2 = DistributedCache.getLocalCacheFiles(subConf2);
+    assertNotNull(null, localCacheFiles2);
+    assertEquals(1, localCacheFiles2.length);
+    Path cachedFirstFile2 = localCacheFiles2[0];
+    assertFileLengthEquals(firstCacheFile, cachedFirstFile2);
+    assertFalse("Paths should be different.", 
+        firstCacheFile.equals(cachedFirstFile2));
+    
+    // assert that two localizations point to different paths
+    assertFalse("two jobs with different timestamps did not localize" +
+        " in different paths", cachedFirstFile.equals(cachedFirstFile2));
+    // release
+    handle.release();
+  }
+
 }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java Sat Nov 28 20:26:01 2009
@@ -21,7 +21,7 @@
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.io.*;
-import org.apache.hadoop.mapred.OutputLogFilter;
+import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
@@ -84,10 +84,10 @@
     System.out.println("inputData:");
     System.out.println(inputData.toString());
 
-    conf.setInt("aggregator.descriptor.num", 1);
-    conf.set("aggregator.descriptor.0", 
+    conf.setInt(ValueAggregatorJobBase.DESCRIPTOR_NUM, 1);
+    conf.set(ValueAggregatorJobBase.DESCRIPTOR + ".0", 
       "UserDefined,org.apache.hadoop.mapreduce.lib.aggregate.AggregatorTests");
-    conf.setLong("aggregate.max.num.unique.values", 14);
+    conf.setLong(UniqValueCount.MAX_NUM_UNIQUE_VALUES, 14);
     
     Job job = new Job(conf);
     FileInputFormat.setInputPaths(job, INPUT_DIR);
@@ -127,7 +127,7 @@
     FileSystem fs = outDir.getFileSystem(conf);
     StringBuffer result = new StringBuffer();
     Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir,
-                        new OutputLogFilter()));
+                        new Utils.OutputFileUtils.OutputFilesFilter()));
     for(int i=0; i < fileList.length; ++i) {
       BufferedReader file = 
         new BufferedReader(new InputStreamReader(fs.open(fileList[i])));

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/fieldsel/TestMRFieldSelection.java Sat Nov 28 20:26:01 2009
@@ -52,9 +52,10 @@
     StringBuffer expectedOutput = new StringBuffer();
     constructInputOutputData(inputData, expectedOutput, numOfInputLines);
     
-    conf.set("mapred.data.field.separator", "-");
-    conf.set("map.output.key.value.fields.spec", "6,5,1-3:0-");
-    conf.set("reduce.output.key.value.fields.spec", ":4,3,2,1,0,0-");
+    conf.set(FieldSelectionHelper.DATA_FIELD_SEPERATOR, "-");
+    conf.set(FieldSelectionHelper.MAP_OUTPUT_KEY_VALUE_SPEC, "6,5,1-3:0-");
+    conf.set(
+      FieldSelectionHelper.REDUCE_OUTPUT_KEY_VALUE_SPEC, ":4,3,2,1,0,0-");
     Job job = MapReduceTestUtil.createJob(conf, inDir, outDir,
       1, 1, inputData.toString());
     job.setMapperClass(FieldSelectionMapper.class);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMRKeyValueTextInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMRKeyValueTextInputFormat.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMRKeyValueTextInputFormat.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMRKeyValueTextInputFormat.java Sat Nov 28 20:26:01 2009
@@ -18,9 +18,15 @@
 
 package org.apache.hadoop.mapreduce.lib.input;
 
-import java.io.*;
-import java.util.*;
-import junit.framework.TestCase;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.util.BitSet;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
 
 import org.apache.commons.logging.*;
 import org.apache.hadoop.conf.Configuration;
@@ -33,19 +39,25 @@
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.task.MapContextImpl;
 import org.apache.hadoop.util.LineReader;
 import org.apache.hadoop.util.ReflectionUtils;
 
-public class TestMRKeyValueTextInputFormat extends TestCase {
+import org.junit.Test;
+import static junit.framework.Assert.*;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+public class TestMRKeyValueTextInputFormat {
   private static final Log LOG =
     LogFactory.getLog(TestMRKeyValueTextInputFormat.class.getName());
 
-  private static int MAX_LENGTH = 10000;
-  
   private static Configuration defaultConf = new Configuration();
   private static FileSystem localFs = null; 
   static {
     try {
+      defaultConf.set("fs.default.name", "file:///");
       localFs = FileSystem.getLocal(defaultConf);
     } catch (IOException e) {
       throw new RuntimeException("init failure", e);
@@ -55,8 +67,9 @@
     new Path(new Path(System.getProperty("test.build.data", "."), "data"),
              "TestKeyValueTextInputFormat");
   
+  @Test
   public void testFormat() throws Exception {
-    Job job = new Job(new Configuration());
+    Job job = new Job(new Configuration(defaultConf));
     Path file = new Path(workDir, "test.txt");
 
     int seed = new Random().nextInt();
@@ -66,6 +79,7 @@
     localFs.delete(workDir, true);
     FileInputFormat.setInputPaths(job, workDir);
 
+    final int MAX_LENGTH = 10000;
     // for a variety of lengths
     for (int length = 0; length < MAX_LENGTH;
          length += random.nextInt(MAX_LENGTH / 10) + 1) {
@@ -105,7 +119,7 @@
           assertEquals("reader class is KeyValueLineRecordReader.", 
             KeyValueLineRecordReader.class, clazz);
           MapContext<Text, Text, Text, Text> mcontext = 
-            new MapContext<Text, Text, Text, Text>(job.getConfiguration(), 
+            new MapContextImpl<Text, Text, Text, Text>(job.getConfiguration(), 
             context.getTaskAttemptID(), reader, null, null, 
             MapReduceTestUtil.createDummyReporter(), splits.get(j));
           reader.initialize(splits.get(j), mcontext);
@@ -121,7 +135,10 @@
               value = reader.getCurrentValue();
               clazz = value.getClass();
               assertEquals("Value class is Text.", Text.class, clazz);
-              int v = Integer.parseInt(value.toString());
+              final int k = Integer.parseInt(key.toString());
+              final int v = Integer.parseInt(value.toString());
+              assertEquals("Bad key", 0, k % 2);
+              assertEquals("Mismatched key/value", k / 2, v);
               LOG.debug("read " + v);
               assertFalse("Key in multiple partitions.", bits.get(v));
               bits.set(v);
@@ -137,12 +154,113 @@
 
     }
   }
+
+  @Test
+  public void testSplitableCodecs() throws Exception {
+    final Job job = new Job(defaultConf);
+    final Configuration conf = job.getConfiguration();
+
+    // Create the codec
+    CompressionCodec codec = null;
+    try {
+      codec = (CompressionCodec)
+      ReflectionUtils.newInstance(conf.getClassByName("org.apache.hadoop.io.compress.BZip2Codec"), conf);
+    } catch (ClassNotFoundException cnfe) {
+      throw new IOException("Illegal codec!");
+    }
+    Path file = new Path(workDir, "test"+codec.getDefaultExtension());
+
+    int seed = new Random().nextInt();
+    LOG.info("seed = " + seed);
+    Random random = new Random(seed);
+
+    localFs.delete(workDir, true);
+    FileInputFormat.setInputPaths(job, workDir);
+
+    final int MAX_LENGTH = 500000;
+    FileInputFormat.setMaxInputSplitSize(job, MAX_LENGTH / 20);
+    // for a variety of lengths
+    for (int length = 0; length < MAX_LENGTH;
+         length += random.nextInt(MAX_LENGTH / 4) + 1) {
+
+      LOG.info("creating; entries = " + length);
+
+      // create a file with length entries
+      Writer writer =
+        new OutputStreamWriter(codec.createOutputStream(localFs.create(file)));
+      try {
+        for (int i = 0; i < length; i++) {
+          writer.write(Integer.toString(i * 2));
+          writer.write("\t");
+          writer.write(Integer.toString(i));
+          writer.write("\n");
+        }
+      } finally {
+        writer.close();
+      }
+
+      // try splitting the file in a variety of sizes
+      KeyValueTextInputFormat format = new KeyValueTextInputFormat();
+      assertTrue("KVTIF claims not splittable", format.isSplitable(job, file));
+      for (int i = 0; i < 3; i++) {
+        int numSplits = random.nextInt(MAX_LENGTH / 2000) + 1;
+        LOG.info("splitting: requesting = " + numSplits);
+        List<InputSplit> splits = format.getSplits(job);
+        LOG.info("splitting: got =        " + splits.size());
+
+        // check each split
+        BitSet bits = new BitSet(length);
+        for (int j = 0; j < splits.size(); j++) {
+          LOG.debug("split["+j+"]= " + splits.get(j));
+          TaskAttemptContext context = MapReduceTestUtil.
+            createDummyMapTaskAttemptContext(job.getConfiguration());
+          RecordReader<Text, Text> reader = format.createRecordReader(
+            splits.get(j), context);
+          Class<?> clazz = reader.getClass();
+          MapContext<Text, Text, Text, Text> mcontext =
+            new MapContextImpl<Text, Text, Text, Text>(job.getConfiguration(),
+            context.getTaskAttemptID(), reader, null, null,
+            MapReduceTestUtil.createDummyReporter(), splits.get(j));
+          reader.initialize(splits.get(j), mcontext);
+
+          Text key = null;
+          Text value = null;
+          try {
+            int count = 0;
+            while (reader.nextKeyValue()) {
+              key = reader.getCurrentKey();
+              value = reader.getCurrentValue();
+              final int k = Integer.parseInt(key.toString());
+              final int v = Integer.parseInt(value.toString());
+              assertEquals("Bad key", 0, k % 2);
+              assertEquals("Mismatched key/value", k / 2, v);
+              LOG.debug("read " + k + "," + v);
+              assertFalse(k + "," + v + " in multiple partitions.",bits.get(v));
+              bits.set(v);
+              count++;
+            }
+            if (count > 0) {
+              LOG.info("splits["+j+"]="+splits.get(j)+" count=" + count);
+            } else {
+              LOG.debug("splits["+j+"]="+splits.get(j)+" count=" + count);
+            }
+          } finally {
+            reader.close();
+          }
+        }
+        assertEquals("Some keys in no partition.", length, bits.cardinality());
+      }
+
+    }
+  }
+
   private LineReader makeStream(String str) throws IOException {
     return new LineReader(new ByteArrayInputStream
                                            (str.getBytes("UTF-8")), 
                                            defaultConf);
   }
   
+  @Test
   public void testUTF8() throws Exception {
     LineReader in = makeStream("abcd\u20acbdcd\u20ac");
     Text line = new Text();
@@ -154,6 +272,7 @@
     assertEquals("split on fake newline", "abc\u200axyz", line.toString());
   }
 
+  @Test
   public void testNewLines() throws Exception {
     LineReader in = makeStream("a\nbb\n\nccc\rdddd\r\neeeee");
     Text out = new Text();
@@ -194,7 +313,7 @@
     RecordReader<Text, Text> reader = format.createRecordReader(split, 
       MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));
     MapContext<Text, Text, Text, Text> mcontext = 
-      new MapContext<Text, Text, Text, Text>(conf, 
+      new MapContextImpl<Text, Text, Text, Text>(conf, 
       context.getTaskAttemptID(), reader, null, null,
       MapReduceTestUtil.createDummyReporter(), 
       split);
@@ -208,8 +327,9 @@
   /**
    * Test using the gzip codec for reading
    */
-  public static void testGzip() throws IOException, InterruptedException {
-    Configuration conf = new Configuration();
+  @Test
+  public void testGzip() throws IOException, InterruptedException {
+    Configuration conf = new Configuration(defaultConf);
     CompressionCodec gzip = new GzipCodec();
     ReflectionUtils.setConf(gzip, conf);
     localFs.delete(workDir, true);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsBinaryInputFormat.java Sat Nov 28 20:26:01 2009
@@ -30,6 +30,7 @@
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.task.MapContextImpl;
 
 import junit.framework.TestCase;
 
@@ -79,7 +80,7 @@
       RecordReader<BytesWritable, BytesWritable> reader =
             bformat.createRecordReader(split, context);
       MapContext<BytesWritable, BytesWritable, BytesWritable, BytesWritable> 
-        mcontext = new MapContext<BytesWritable, BytesWritable,
+        mcontext = new MapContextImpl<BytesWritable, BytesWritable,
           BytesWritable, BytesWritable>(job.getConfiguration(), 
           context.getTaskAttemptID(), reader, null, null, 
           MapReduceTestUtil.createDummyReporter(), 

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileAsTextInputFormat.java Sat Nov 28 20:26:01 2009
@@ -30,6 +30,7 @@
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.task.MapContextImpl;
 import org.apache.hadoop.conf.*;
 
 public class TestMRSequenceFileAsTextInputFormat extends TestCase {
@@ -84,7 +85,7 @@
           RecordReader<Text, Text> reader =
             format.createRecordReader(split, context);
           MapContext<Text, Text, Text, Text> mcontext = 
-            new MapContext<Text, Text, Text, Text>(job.getConfiguration(), 
+            new MapContextImpl<Text, Text, Text, Text>(job.getConfiguration(), 
             context.getTaskAttemptID(), reader, null, null, 
             MapReduceTestUtil.createDummyReporter(), 
             split);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMRSequenceFileInputFilter.java Sat Nov 28 20:26:01 2009
@@ -33,6 +33,7 @@
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.task.MapContextImpl;
 import org.apache.hadoop.conf.*;
 
 public class TestMRSequenceFileInputFilter extends TestCase {
@@ -96,7 +97,7 @@
       RecordReader<Text, BytesWritable> reader =
         format.createRecordReader(split, context);
       MapContext<Text, BytesWritable, Text, BytesWritable> mcontext = 
-        new MapContext<Text, BytesWritable, Text, BytesWritable>(
+        new MapContextImpl<Text, BytesWritable, Text, BytesWritable>(
         job.getConfiguration(), 
         context.getTaskAttemptID(), reader, null, null, 
         MapReduceTestUtil.createDummyReporter(), split);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMultipleInputs.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMultipleInputs.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMultipleInputs.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestMultipleInputs.java Sat Nov 28 20:26:01 2009
@@ -17,20 +17,128 @@
  */
 package org.apache.hadoop.mapreduce.lib.input;
 
+import java.io.BufferedReader;
+import java.io.DataOutputStream;
 import java.io.IOException;
+import java.io.InputStreamReader;
 import java.util.Map;
 
-import junit.framework.TestCase;
-
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.HadoopTestCase;
+import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.junit.Before;
+import org.junit.Test;
 
 /**
  * @see TestDelegatingInputFormat
  */
-public class TestMultipleInputs extends TestCase {
+public class TestMultipleInputs extends HadoopTestCase {
+
+  public TestMultipleInputs() throws IOException {
+    super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
+  }
+
+  private static final Path ROOT_DIR = new Path("testing/mo");
+  private static final Path IN1_DIR = new Path(ROOT_DIR, "input1");
+  private static final Path IN2_DIR = new Path(ROOT_DIR, "input2");
+  private static final Path OUT_DIR = new Path(ROOT_DIR, "output");
+
+  private Path getDir(Path dir) {
+    // Hack for local FS that does not have the concept of a 'mounting point'
+    if (isLocalFS()) {
+      String localPathRoot = System.getProperty("test.build.data", "/tmp")
+          .replace(' ', '+');
+      dir = new Path(localPathRoot, dir);
+    }
+    return dir;
+  }
+
+  @Before
+  public void setUp() throws Exception {
+    super.setUp();
+    Path rootDir = getDir(ROOT_DIR);
+    Path in1Dir = getDir(IN1_DIR);
+    Path in2Dir = getDir(IN2_DIR);
+
+    Configuration conf = createJobConf();
+    FileSystem fs = FileSystem.get(conf);
+    fs.delete(rootDir, true);
+    if (!fs.mkdirs(in1Dir)) {
+      throw new IOException("Mkdirs failed to create " + in1Dir.toString());
+    }
+    if (!fs.mkdirs(in2Dir)) {
+      throw new IOException("Mkdirs failed to create " + in2Dir.toString());
+    }
+  }
+
+  @Test
+  public void testDoMultipleInputs() throws IOException {
+    Path in1Dir = getDir(IN1_DIR);
+    Path in2Dir = getDir(IN2_DIR);
+
+    Path outDir = getDir(OUT_DIR);
+
+    Configuration conf = createJobConf();
+    FileSystem fs = FileSystem.get(conf);
+    fs.delete(outDir, true);
+
+    DataOutputStream file1 = fs.create(new Path(in1Dir, "part-0"));
+    file1.writeBytes("a\nb\nc\nd\ne");
+    file1.close();
+
+    // write tab delimited to second file because we're doing
+    // KeyValueInputFormat
+    DataOutputStream file2 = fs.create(new Path(in2Dir, "part-0"));
+    file2.writeBytes("a\tblah\nb\tblah\nc\tblah\nd\tblah\ne\tblah");
+    file2.close();
+
+    Job job = new Job(conf);
+    job.setJobName("mi");
+
+    MultipleInputs.addInputPath(job, in1Dir, TextInputFormat.class,
+        MapClass.class);
+    MultipleInputs.addInputPath(job, in2Dir, KeyValueTextInputFormat.class,
+        KeyValueMapClass.class);
+
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(Text.class);
+    job.setOutputKeyClass(NullWritable.class);
+    job.setOutputValueClass(Text.class);
+    job.setReducerClass(ReducerClass.class);
+    FileOutputFormat.setOutputPath(job, outDir);
+
+    boolean success = false;
+    try {
+      success = job.waitForCompletion(true);
+    } catch (InterruptedException ie) {
+      throw new RuntimeException(ie);
+    } catch (ClassNotFoundException instante) {
+      throw new RuntimeException(instante);
+    }
+    if (!success)
+      throw new RuntimeException("Job failed!");
+
+    // copy bytes a bunch of times for the ease of readLine() - whatever
+    BufferedReader output = new BufferedReader(new InputStreamReader(fs
+        .open(new Path(outDir, "part-r-00000"))));
+    // reducer should have counted one key from each file
+    assertTrue(output.readLine().equals("a 2"));
+    assertTrue(output.readLine().equals("b 2"));
+    assertTrue(output.readLine().equals("c 2"));
+    assertTrue(output.readLine().equals("d 2"));
+    assertTrue(output.readLine().equals("e 2"));
+  }
+
   @SuppressWarnings("unchecked")
   public void testAddInputPathWithFormat() throws IOException {
     final Job conf = new Job();
@@ -50,7 +158,7 @@
     MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class,
        MapClass.class);
     MultipleInputs.addInputPath(conf, new Path("/bar"),
-       KeyValueTextInputFormat.class, MapClass2.class);
+        KeyValueTextInputFormat.class, KeyValueMapClass.class);
     final Map<Path, InputFormat> inputs = MultipleInputs
        .getInputFormatMap(conf);
     final Map<Path, Class<? extends Mapper>> maps = MultipleInputs
@@ -60,12 +168,42 @@
     assertEquals(KeyValueTextInputFormat.class, inputs.get(new Path("/bar"))
        .getClass());
     assertEquals(MapClass.class, maps.get(new Path("/foo")));
-    assertEquals(MapClass2.class, maps.get(new Path("/bar")));
+    assertEquals(KeyValueMapClass.class, maps.get(new Path("/bar")));
   }
 
-  static class MapClass extends Mapper<String, String, String, String> {
+  static final Text blah = new Text("blah");
+
+  // these 3 classes do a reduce side join with 2 different mappers
+  static class MapClass extends Mapper<LongWritable, Text, Text, Text> {
+    // receives "a", "b", "c" as values
+    @Override
+    public void map(LongWritable key, Text value, Context ctx)
+        throws IOException, InterruptedException {
+      ctx.write(value, blah);
+    }
   }
 
-  static class MapClass2 extends MapClass {
+  static class KeyValueMapClass extends Mapper<Text, Text, Text, Text> {
+    // receives "a", "b", "c" as keys
+    @Override
+    public void map(Text key, Text value, Context ctx) throws IOException,
+        InterruptedException {
+      ctx.write(key, blah);
+    }
   }
+
+  static class ReducerClass extends Reducer<Text, Text, NullWritable, Text> {
+    // should receive 2 rows for each key
+    int count = 0;
+
+    @Override
+    public void reduce(Text key, Iterable<Text> values, Context ctx)
+        throws IOException, InterruptedException {
+      count = 0;
+      for (Text value : values)
+        count++;
+      ctx.write(NullWritable.get(), new Text(key.toString() + " " + count));
+    }
+  }
+
 }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/input/TestNLineInputFormat.java Sat Nov 28 20:26:01 2009
@@ -26,6 +26,7 @@
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.mapreduce.*;
+import org.apache.hadoop.mapreduce.task.MapContextImpl;
 
 public class TestNLineInputFormat extends TestCase {
   private static int MAX_LENGTH = 200;
@@ -90,7 +91,7 @@
       assertEquals("reader class is LineRecordReader.", 
         LineRecordReader.class, clazz);
       MapContext<LongWritable, Text, LongWritable, Text> mcontext = 
-        new MapContext<LongWritable, Text, LongWritable, Text>(
+        new MapContextImpl<LongWritable, Text, LongWritable, Text>(
           job.getConfiguration(), context.getTaskAttemptID(), reader, null,
           null, MapReduceTestUtil.createDummyReporter(), splits.get(i));
       reader.initialize(splits.get(i), mcontext);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestJoinDatamerge.java Sat Nov 28 20:26:01 2009
@@ -32,6 +32,7 @@
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapreduce.*;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
@@ -282,7 +283,8 @@
   private static void checkOuterConsistency(Job job, Path[] src) 
       throws IOException {
     Path outf = FileOutputFormat.getOutputPath(job);
-    FileStatus[] outlist = cluster.getFileSystem().listStatus(outf);
+    FileStatus[] outlist = cluster.getFileSystem().listStatus(outf, new 
+                             Utils.OutputFileUtils.OutputFilesFilter());
     assertEquals("number of part files is more than 1. It is" + outlist.length,
       1, outlist.length);
     assertTrue("output file with zero length" + outlist[0].getLen(),
@@ -388,7 +390,8 @@
     job.waitForCompletion(true);
     assertTrue("Job failed", job.isSuccessful());
 
-    FileStatus[] outlist = cluster.getFileSystem().listStatus(outf);
+    FileStatus[] outlist = cluster.getFileSystem().listStatus(outf, 
+                             new Utils.OutputFileUtils.OutputFilesFilter());
     assertEquals(1, outlist.length);
     assertTrue(0 < outlist[0].getLen());
     SequenceFile.Reader r =

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestJoinProperties.java Sat Nov 28 20:26:01 2009
@@ -35,6 +35,7 @@
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapreduce.*;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
+import org.apache.hadoop.mapreduce.task.MapContextImpl;
 
 public class TestJoinProperties extends TestCase {
 
@@ -374,7 +375,7 @@
         RecordReader reader = format.createRecordReader(
 	            split, context);
       MapContext mcontext = 
-        new MapContext(conf, 
+        new MapContextImpl(conf, 
         context.getTaskAttemptID(), reader, null, null, 
         MapReduceTestUtil.createDummyReporter(), split);
       reader.initialize(split, mcontext);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java Sat Nov 28 20:26:01 2009
@@ -25,6 +25,7 @@
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.*;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil.Fake_RR;
+import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
 
 public class TestWrappedRRClassloader extends TestCase {
   /**
@@ -52,9 +53,9 @@
       new CompositeInputFormat<NullWritable>();
     // create dummy TaskAttemptID
     TaskAttemptID tid = new TaskAttemptID("jt", 1, TaskType.MAP, 0, 0);
-    conf.set("mapred.task.id", tid.toString());
+    conf.set(JobContext.TASK_ATTEMPT_ID, tid.toString());
     inputFormat.createRecordReader(inputFormat.getSplits(new Job(conf)).get(0), 
-      new TaskAttemptContext(conf, tid));
+      new TaskAttemptContextImpl(conf, tid));
   }
 
   public static class Fake_ClassLoader extends ClassLoader {

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java Sat Nov 28 20:26:01 2009
@@ -37,6 +37,7 @@
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
+import org.apache.hadoop.mapreduce.task.MapContextImpl;
 
 import junit.framework.TestCase;
 import org.apache.commons.logging.*;
@@ -100,7 +101,7 @@
       writer.close(context);
     }
     committer.commitTask(context);
-    committer.cleanupJob(job);
+    committer.commitJob(job);
 
     InputFormat<IntWritable, DoubleWritable> iformat =
       new SequenceFileInputFormat<IntWritable, DoubleWritable>();
@@ -112,7 +113,7 @@
       RecordReader<IntWritable, DoubleWritable> reader =
         iformat.createRecordReader(split, context);
       MapContext<IntWritable, DoubleWritable, BytesWritable, BytesWritable> 
-        mcontext = new MapContext<IntWritable, DoubleWritable,
+        mcontext = new MapContextImpl<IntWritable, DoubleWritable,
           BytesWritable, BytesWritable>(job.getConfiguration(), 
           context.getTaskAttemptID(), reader, null, null, 
           MapReduceTestUtil.createDummyReporter(), 

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedComparator.java Sat Nov 28 20:26:01 2009
@@ -26,8 +26,9 @@
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.HadoopTestCase;
-import org.apache.hadoop.mapred.OutputLogFilter;
+import org.apache.hadoop.mapred.Utils;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.Reducer;
 import org.apache.hadoop.mapreduce.lib.map.InverseMapper;
@@ -42,7 +43,7 @@
   public TestMRKeyFieldBasedComparator() throws IOException {
     super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
     conf = createJobConf();
-    conf.set("map.output.key.field.separator", " ");
+    conf.set(JobContext.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
   }
   
   private void testComparator(String keySpec, int expect) 
@@ -51,9 +52,9 @@
     Path inDir = new Path(root, "test_cmp/in");
     Path outDir = new Path(root, "test_cmp/out");
     
-    conf.set("mapred.text.key.comparator.options", keySpec);
-    conf.set("mapred.text.key.partitioner.options", "-k1.1,1.1");
-    conf.set("map.output.key.field.separator", " ");
+    conf.set("mapreduce.partition.keycomparator.options", keySpec);
+    conf.set("mapreduce.partition.keypartitioner.options", "-k1.1,1.1");
+    conf.set(JobContext.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
 
     Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 2,
                 line1 +"\n" + line2 + "\n"); 
@@ -69,7 +70,7 @@
 
     // validate output
     Path[] outputFiles = FileUtil.stat2Paths(getFileSystem().listStatus(outDir,
-        new OutputLogFilter()));
+        new Utils.OutputFileUtils.OutputFilesFilter()));
     if (outputFiles.length > 0) {
       InputStream is = getFileSystem().open(outputFiles[0]);
       BufferedReader reader = new BufferedReader(new InputStreamReader(is));
@@ -119,7 +120,7 @@
   public void testWithoutMRJob(String keySpec, int expect) throws Exception {
     KeyFieldBasedComparator<Void, Void> keyFieldCmp = 
       new KeyFieldBasedComparator<Void, Void>();
-    conf.set("mapred.text.key.comparator.options", keySpec);
+    conf.set("mapreduce.partition.keycomparator.options", keySpec);
     keyFieldCmp.setConf(conf);
     int result = keyFieldCmp.compare(line1_bytes, 0, line1_bytes.length,
         line2_bytes, 0, line2_bytes.length);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestMRKeyFieldBasedPartitioner.java Sat Nov 28 20:26:01 2009
@@ -50,7 +50,7 @@
     // check if the hashcode is correct with specified keyspec
     kfbp = new KeyFieldBasedPartitioner<Text, Text>();
     conf = new Configuration();
-    conf.set("mapred.text.key.partitioner.options", "-k2,2");
+    conf.set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, "-k2,2");
     kfbp.setConf(conf);
     String expectedOutput = "def";
     byte[] eBytes = expectedOutput.getBytes();
@@ -62,7 +62,7 @@
     // test with invalid end index in keyspecs
     kfbp = new KeyFieldBasedPartitioner<Text, Text>();
     conf = new Configuration();
-    conf.set("mapred.text.key.partitioner.options", "-k2,5");
+    conf.set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, "-k2,5");
     kfbp.setConf(conf);
     expectedOutput = "def\txyz";
     eBytes = expectedOutput.getBytes();
@@ -74,7 +74,7 @@
     // test with 0 end index in keyspecs
     kfbp = new KeyFieldBasedPartitioner<Text, Text>();
     conf = new Configuration();
-    conf.set("mapred.text.key.partitioner.options", "-k2");
+    conf.set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, "-k2");
     kfbp.setConf(conf);
     expectedOutput = "def\txyz";
     eBytes = expectedOutput.getBytes();
@@ -86,7 +86,7 @@
     // test with invalid keyspecs
     kfbp = new KeyFieldBasedPartitioner<Text, Text>();
     conf = new Configuration();
-    conf.set("mapred.text.key.partitioner.options", "-k10");
+    conf.set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, "-k10");
     kfbp.setConf(conf);
     assertEquals("Partitioner doesnt work as expected", 0, 
                  kfbp.getPartition(new Text(input), new Text(), numReducers));
@@ -94,7 +94,7 @@
     // test with multiple keyspecs
     kfbp = new KeyFieldBasedPartitioner<Text, Text>();
     conf = new Configuration();
-    conf.set("mapred.text.key.partitioner.options", "-k2,2 -k4,4");
+    conf.set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, "-k2,2 -k4,4");
     kfbp.setConf(conf);
     input = "abc\tdef\tpqr\txyz";
     expectedOutput = "def";
@@ -110,7 +110,7 @@
     // test with invalid start index in keyspecs
     kfbp = new KeyFieldBasedPartitioner<Text, Text>();
     conf = new Configuration();
-    conf.set("mapred.text.key.partitioner.options", "-k2,2 -k30,21 -k4,4 -k5");
+    conf.set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, "-k2,2 -k30,21 -k4,4 -k5");
     kfbp.setConf(conf);
     expectedOutput = "def";
     eBytes = expectedOutput.getBytes();

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java Sat Nov 28 20:26:01 2009
@@ -34,6 +34,7 @@
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.WritableUtils;
+import org.apache.hadoop.mapreduce.JobContext;
 
 public class TestTotalOrderPartitioner extends TestCase {
 
@@ -82,7 +83,7 @@
                                  ).makeQualified(fs);
     Path p = new Path(testdir, testname + "/_partition.lst");
     TotalOrderPartitioner.setPartitionFile(conf, p);
-    conf.setInt("mapred.reduce.tasks", splits.length + 1);
+    conf.setInt(JobContext.NUM_REDUCES, splits.length + 1);
     SequenceFile.Writer w = null;
     try {
       w = SequenceFile.createWriter(fs, conf, p,
@@ -104,7 +105,7 @@
     Configuration conf = new Configuration();
     Path p = TestTotalOrderPartitioner.<Text>writePartitionFile(
         "totalordermemcmp", conf, splitStrings);
-    conf.setClass("mapred.mapoutput.key.class", Text.class, Object.class);
+    conf.setClass(JobContext.MAP_OUTPUT_KEY_CLASS, Text.class, Object.class);
     try {
       partitioner.setConf(conf);
       NullWritable nw = NullWritable.get();
@@ -123,8 +124,8 @@
     Configuration conf = new Configuration();
     Path p = TestTotalOrderPartitioner.<Text>writePartitionFile(
         "totalorderbinarysearch", conf, splitStrings);
-    conf.setBoolean("total.order.partitioner.natural.order", false);
-    conf.setClass("mapred.mapoutput.key.class", Text.class, Object.class);
+    conf.setBoolean(TotalOrderPartitioner.NATURAL_ORDER, false);
+    conf.setClass(JobContext.MAP_OUTPUT_KEY_CLASS, Text.class, Object.class);
     try {
       partitioner.setConf(conf);
       NullWritable nw = NullWritable.get();
@@ -157,9 +158,9 @@
     Arrays.sort(revSplitStrings, new ReverseStringComparator());
     Path p = TestTotalOrderPartitioner.<Text>writePartitionFile(
         "totalordercustomcomparator", conf, revSplitStrings);
-    conf.setBoolean("total.order.partitioner.natural.order", false);
-    conf.setClass("mapred.mapoutput.key.class", Text.class, Object.class);
-    conf.setClass("mapred.output.key.comparator.class",
+    conf.setBoolean(TotalOrderPartitioner.NATURAL_ORDER, false);
+    conf.setClass(JobContext.MAP_OUTPUT_KEY_CLASS, Text.class, Object.class);
+    conf.setClass(JobContext.KEY_COMPARATOR,
       ReverseStringComparator.class, RawComparator.class);
     ArrayList<Check<Text>> revCheck = new ArrayList<Check<Text>>();
     revCheck.add(new Check<Text>(new Text("aaaaa"), 9));

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/test/MapredTestDriver.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/test/MapredTestDriver.java?rev=885145&r1=885144&r2=885145&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/test/MapredTestDriver.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/test/mapred/org/apache/hadoop/test/MapredTestDriver.java Sat Nov 28 20:26:01 2009
@@ -28,8 +28,18 @@
 import org.apache.hadoop.mapred.TestSequenceFileInputFormat;
 import org.apache.hadoop.mapred.TestTextInputFormat;
 import org.apache.hadoop.mapred.ThreadedMapBenchmark;
+import org.apache.hadoop.mapreduce.FailJob;
+import org.apache.hadoop.mapreduce.SleepJob;
 import org.apache.hadoop.util.ProgramDriver;
 
+import org.apache.hadoop.hdfs.NNBench;
+import org.apache.hadoop.fs.TestFileSystem;
+import org.apache.hadoop.fs.TestDFSIO;
+import org.apache.hadoop.fs.DFSCIOTest;
+import org.apache.hadoop.fs.DistributedFSCheck;
+import org.apache.hadoop.io.FileBench;
+import org.apache.hadoop.fs.JHLogAnalyzer;
+
 /**
  * Driver for Map-reduce tests.
  *
@@ -69,6 +79,25 @@
       pgd.addClass("MRReliabilityTest", ReliabilityTest.class,
           "A program that tests the reliability of the MR framework by " +
           "injecting faults/failures");
+      pgd.addClass("fail", FailJob.class, "a job that always fails");
+      pgd.addClass("sleep", SleepJob.class, 
+                   "A job that sleeps at each map and reduce task.");
+      pgd.addClass("nnbench", NNBench.class, 
+          "A benchmark that stresses the namenode.");
+      pgd.addClass("testfilesystem", TestFileSystem.class, 
+          "A test for FileSystem read/write.");
+      pgd.addClass("TestDFSIO", TestDFSIO.class, 
+          "Distributed i/o benchmark.");
+      pgd.addClass("DFSCIOTest", DFSCIOTest.class, "" +
+          "Distributed i/o benchmark of libhdfs.");
+      pgd.addClass("DistributedFSCheck", DistributedFSCheck.class, 
+          "Distributed checkup of the file system consistency.");
+      pgd.addClass("filebench", FileBench.class, 
+          "Benchmark SequenceFile(Input|Output)Format " +
+          "(block,record compressed and uncompressed), " +
+          "Text(Input|Output)Format (compressed and uncompressed)");
+      pgd.addClass(JHLogAnalyzer.class.getSimpleName(), JHLogAnalyzer.class, 
+          "Job History Log analyzer.");
     } catch(Throwable e) {
       e.printStackTrace();
     }



Mime
View raw message