hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tomwh...@apache.org
Subject svn commit: r895914 - in /hadoop/mapreduce/trunk: ./ src/contrib/streaming/src/java/org/apache/hadoop/streaming/ src/contrib/streaming/src/test/org/apache/hadoop/streaming/
Date Tue, 05 Jan 2010 06:09:59 GMT
Author: tomwhite
Date: Tue Jan  5 06:09:51 2010
New Revision: 895914

URL: http://svn.apache.org/viewvc?rev=895914&view=rev
Log:
MAPREDUCE-1155. Streaming tests swallow exceptions. Contributed by Todd Lipcon.

Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestAutoInputFormat.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestDumpTypedBytes.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestGzipInput.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestLoadTypedBytes.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamJob.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCounters.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingTaskLog.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Tue Jan  5 06:09:51 2010
@@ -201,6 +201,9 @@
     MAPREDUCE-1131. Using profilers other than hprof can cause JobClient to
     report job failure. (Aaron Kimball via tomwhite)
 
+    MAPREDUCE-1155. Streaming tests swallow exceptions.
+    (Todd Lipcon via tomwhite)
+
 Release 0.21.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java Tue Jan  5 06:09:51 2010
@@ -124,13 +124,13 @@
       postProcessArgs();
   
       setJobConf();
-      return submitAndMonitorJob();
     }catch (IllegalArgumentException ex) {
       //ignore, since log will already be printed
       // print the log in debug mode.
       LOG.debug("Error in streaming job", ex);
       return 1;
     }
+    return submitAndMonitorJob();
   }
   
   /**

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestAutoInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestAutoInputFormat.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestAutoInputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestAutoInputFormat.java Tue Jan  5 06:09:51 2010
@@ -36,9 +36,10 @@
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.streaming.AutoInputFormat;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestAutoInputFormat extends TestCase {
+public class TestAutoInputFormat {
 
   private static Configuration conf = new Configuration();
 
@@ -49,6 +50,7 @@
   private static final int SPLITS_COUNT = 2;
 
   @SuppressWarnings( { "unchecked", "deprecation" })
+  @Test
   public void testFormat() throws IOException {
     JobConf job = new JobConf(conf);
     FileSystem fs = FileSystem.getLocal(conf);

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestDumpTypedBytes.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestDumpTypedBytes.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestDumpTypedBytes.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestDumpTypedBytes.java Tue Jan  5 06:09:51 2010
@@ -31,10 +31,12 @@
 import org.apache.hadoop.streaming.DumpTypedBytes;
 import org.apache.hadoop.typedbytes.TypedBytesInput;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestDumpTypedBytes extends TestCase {
+public class TestDumpTypedBytes {
 
+  @Test
   public void testDumping() throws Exception {
     Configuration conf = new Configuration();
     MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestGzipInput.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestGzipInput.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestGzipInput.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestGzipInput.java Tue Jan  5 06:09:51 2010
@@ -30,13 +30,13 @@
 {
 
   public TestGzipInput() throws IOException {
-    INPUT_FILE = new File("input.txt.gz");
+    INPUT_FILE = new File(TEST_DIR, "input.txt.gz");
   }
   
   protected void createInput() throws IOException
   {
     GZIPOutputStream out = new GZIPOutputStream(
-                                                new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
+      new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
     out.write(input.getBytes("UTF-8"));
     out.close();
   }

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestLoadTypedBytes.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestLoadTypedBytes.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestLoadTypedBytes.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestLoadTypedBytes.java Tue Jan  5 06:09:51 2010
@@ -31,10 +31,12 @@
 import org.apache.hadoop.typedbytes.TypedBytesOutput;
 import org.apache.hadoop.typedbytes.TypedBytesWritable;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestLoadTypedBytes extends TestCase {
+public class TestLoadTypedBytes {
 
+  @Test
   public void testLoading() throws Exception {
     Configuration conf = new Configuration();
     MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java Tue Jan  5 06:09:51 2010
@@ -24,9 +24,14 @@
 import java.io.DataOutputStream;
 import java.io.InputStreamReader;
 import java.io.BufferedReader;
+import java.util.Arrays;
 import java.util.zip.ZipEntry;
 import java.util.jar.JarOutputStream;
 import java.util.zip.ZipOutputStream;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
@@ -34,6 +39,10 @@
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.util.StringUtils;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 /**
  * This class tests cacheArchive option of streaming 
@@ -42,12 +51,14 @@
  */
 public class TestMultipleArchiveFiles extends TestStreaming
 {
+  private static final Log LOG = LogFactory.getLog(TestMultipleArchiveFiles.class);
 
   private StreamJob job;
-  private String INPUT_FILE = "input.txt";
-  private String CACHE_ARCHIVE_1 = "cacheArchive1.zip";
+  private String INPUT_DIR = "multiple-archive-files/";
+  private String INPUT_FILE = INPUT_DIR + "input.txt";
+  private String CACHE_ARCHIVE_1 = INPUT_DIR + "cacheArchive1.zip";
   private File CACHE_FILE_1 = null;
-  private String CACHE_ARCHIVE_2 = "cacheArchive2.zip";
+  private String CACHE_ARCHIVE_2 = INPUT_DIR + "cacheArchive2.zip";
   private File CACHE_FILE_2 = null;
   private String expectedOutput = null;
   private String OUTPUT_DIR = "out";
@@ -59,27 +70,23 @@
   private String strNamenode = null;
   private String namenode = null;
 
-  public TestMultipleArchiveFiles() throws IOException {
+  public TestMultipleArchiveFiles() throws Exception {
     CACHE_FILE_1 = new File("cacheArchive1");
     CACHE_FILE_2 = new File("cacheArchive2");
     input = "HADOOP";
     expectedOutput = "HADOOP\t\nHADOOP\t\n";
-    try {
-      conf = new Configuration();      
-      dfs = new MiniDFSCluster(conf, 1, true, null);      
-      fileSys = dfs.getFileSystem();
-      namenode = fileSys.getUri().getAuthority();
-      mr  = new MiniMRCluster(1, namenode, 3);
-      strJobTracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
-      strNamenode = "fs.default.name=" + namenode;
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
+    conf = new Configuration();      
+    dfs = new MiniDFSCluster(conf, 1, true, null);      
+    fileSys = dfs.getFileSystem();
+    namenode = fileSys.getUri().getAuthority();
+    mr  = new MiniMRCluster(1, namenode, 3);
+    strJobTracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
+    strNamenode = "fs.default.name=" + namenode;
   }
   
   protected void createInput() throws IOException
   {
-
+    fileSys.delete(new Path(INPUT_DIR), true);
     DataOutputStream dos = fileSys.create(new Path(INPUT_FILE));
     String inputFileString = "symlink1/cacheArchive1\nsymlink2/cacheArchive2";
     dos.write(inputFileString.getBytes("UTF-8"));
@@ -103,14 +110,9 @@
   }
 
   protected String[] genArgs() {
-    String cacheArchiveString1 = null;
-    String cacheArchiveString2 = null;
-    try {
-      cacheArchiveString1 = fileSys.getUri().toString()+fileSys.getWorkingDirectory().toString()+"/"+CACHE_ARCHIVE_1+"#symlink1";
-      cacheArchiveString2 = fileSys.getUri().toString()+fileSys.getWorkingDirectory().toString()+"/"+CACHE_ARCHIVE_2+"#symlink2";
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
+    String workDir = fileSys.getWorkingDirectory().toString() + "/";
+    String cache1 = workDir + CACHE_ARCHIVE_1 + "#symlink1";
+    String cache2 = workDir + CACHE_ARCHIVE_2 + "#symlink2";
 
     return new String[] {
       "-input", INPUT_FILE.toString(),
@@ -118,39 +120,32 @@
       "-mapper", "xargs cat", 
       "-reducer", "cat",
       "-jobconf", "mapreduce.job.reduces=1",
-      "-cacheArchive", cacheArchiveString1, 
-      "-cacheArchive", cacheArchiveString2,
+      "-cacheArchive", cache1,
+      "-cacheArchive", cache2,
       "-jobconf", strNamenode,
       "-jobconf", strJobTracker,
       "-jobconf", "stream.tmpdir=" + System.getProperty("test.build.data","/tmp")
     };
   }
 
-  public void testCommandLine() {
-    try {
-      createInput();
-      job = new StreamJob(genArgs(), true);
-      if(job.go() != 0) {
-        throw new Exception("Job Failed");
-      }
-      StringBuffer output = new StringBuffer(256);
-      Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
+  //@Test
+  public void testCommandLine() throws Exception {
+    createInput();
+    String args[] = genArgs();
+    LOG.info("Testing streaming command line:\n" +
+             StringUtils.join(" ", Arrays.asList(args)));
+    job = new StreamJob(genArgs(), true);
+    if(job.go() != 0) {
+      throw new Exception("Job Failed");
+    }
+    StringBuffer output = new StringBuffer(256);
+    Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
                                             new Path(OUTPUT_DIR)));
-      for (int i = 0; i < fileList.length; i++){
-        BufferedReader bread =
-          new BufferedReader(new InputStreamReader(fileSys.open(fileList[i])));
-        output.append(bread.readLine());
-        output.append("\n");
-        output.append(bread.readLine());
-        output.append("\n");
-      }
-      assertEquals(expectedOutput, output.toString());
-    } catch (Exception e) {
-      e.printStackTrace();
-    } finally {
-      CACHE_FILE_1.delete();
-      CACHE_FILE_2.delete();
+    for (int i = 0; i < fileList.length; i++){
+      LOG.info("Adding output from file: " + fileList[i]);
+      output.append(StreamUtil.slurpHadoop(fileList[i], fileSys));
     }
+    assertEquals(expectedOutput, output.toString());
   }
 
   public static void main(String[]args) throws Exception

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java Tue Jan  5 06:09:51 2010
@@ -25,7 +25,8 @@
 import java.io.PrintWriter;
 import java.io.StringWriter;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -40,7 +41,7 @@
  * This test case tests the symlink creation
  * utility provided by distributed caching 
  */
-public class TestMultipleCachefiles extends TestCase
+public class TestMultipleCachefiles
 {
   String INPUT_FILE = "/testing-streaming/input.txt";
   String OUTPUT_DIR = "/testing-streaming/out";
@@ -59,100 +60,89 @@
   {
   }
 
-  public void testMultipleCachefiles()
+  @Test
+  public void testMultipleCachefiles() throws Exception
   {
-    try {
-      boolean mayExit = false;
-      MiniMRCluster mr = null;
-      MiniDFSCluster dfs = null; 
-      try{
-        Configuration conf = new Configuration();
-        dfs = new MiniDFSCluster(conf, 1, true, null);
-        FileSystem fileSys = dfs.getFileSystem();
-        String namenode = fileSys.getUri().toString();
-
-        mr  = new MiniMRCluster(1, namenode, 3);
-        // During tests, the default Configuration will use a local mapred
-        // So don't specify -config or -cluster
-        String strJobtracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
-        String strNamenode = "fs.default.name=" + namenode;
-        String argv[] = new String[] {
-          "-input", INPUT_FILE,
-          "-output", OUTPUT_DIR,
-          "-mapper", map,
-          "-reducer", reduce,
-          //"-verbose",
-          //"-jobconf", "stream.debug=set"
-          "-jobconf", strNamenode,
-          "-jobconf", strJobtracker,
-          "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
-          "-jobconf", 
-            JobConf.MAPRED_MAP_TASK_JAVA_OPTS + "=" +
-              "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
-              "-Dbuild.test=" + System.getProperty("build.test") + " " +
-              conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS, 
-                       conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
-          "-jobconf", 
-            JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + "=" +
-              "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
-              "-Dbuild.test=" + System.getProperty("build.test") + " " +
-              conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, 
-                       conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
-          "-cacheFile", fileSys.getUri() + CACHE_FILE + "#" + mapString,
-          "-cacheFile", fileSys.getUri() + CACHE_FILE_2 + "#" + mapString2
-        };
+    boolean mayExit = false;
+    MiniMRCluster mr = null;
+    MiniDFSCluster dfs = null; 
+    try{
+      Configuration conf = new Configuration();
+      dfs = new MiniDFSCluster(conf, 1, true, null);
+      FileSystem fileSys = dfs.getFileSystem();
+      String namenode = fileSys.getUri().toString();
+
+      mr  = new MiniMRCluster(1, namenode, 3);
+      // During tests, the default Configuration will use a local mapred
+      // So don't specify -config or -cluster
+      String strJobtracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
+      String strNamenode = "fs.default.name=" + namenode;
+      String argv[] = new String[] {
+        "-input", INPUT_FILE,
+        "-output", OUTPUT_DIR,
+        "-mapper", map,
+        "-reducer", reduce,
+        //"-verbose",
+        //"-jobconf", "stream.debug=set"
+        "-jobconf", strNamenode,
+        "-jobconf", strJobtracker,
+        "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
+        "-jobconf", 
+          JobConf.MAPRED_MAP_TASK_JAVA_OPTS + "=" +
+            "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
+            "-Dbuild.test=" + System.getProperty("build.test") + " " +
+            conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS, 
+                     conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
+        "-jobconf", 
+          JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + "=" +
+            "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
+            "-Dbuild.test=" + System.getProperty("build.test") + " " +
+            conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, 
+                     conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
+        "-cacheFile", fileSys.getUri() + CACHE_FILE + "#" + mapString,
+        "-cacheFile", fileSys.getUri() + CACHE_FILE_2 + "#" + mapString2
+      };
 
-        fileSys.delete(new Path(OUTPUT_DIR), true);
+      fileSys.delete(new Path(OUTPUT_DIR), true);
+      
+      DataOutputStream file = fileSys.create(new Path(INPUT_FILE));
+      file.writeBytes(mapString + "\n");
+      file.writeBytes(mapString2 + "\n");
+      file.close();
+      file = fileSys.create(new Path(CACHE_FILE));
+      file.writeBytes(cacheString + "\n");
+      file.close();
+      file = fileSys.create(new Path(CACHE_FILE_2));
+      file.writeBytes(cacheString2 + "\n");
+      file.close();
         
-        DataOutputStream file = fileSys.create(new Path(INPUT_FILE));
-        file.writeBytes(mapString + "\n");
-        file.writeBytes(mapString2 + "\n");
-        file.close();
-        file = fileSys.create(new Path(CACHE_FILE));
-        file.writeBytes(cacheString + "\n");
-        file.close();
-        file = fileSys.create(new Path(CACHE_FILE_2));
-        file.writeBytes(cacheString2 + "\n");
-        file.close();
-          
-        job = new StreamJob(argv, mayExit);     
-        job.go();
-
-        fileSys = dfs.getFileSystem();
-        String line = null;
-        String line2 = null;
-        Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
-                                     new Path(OUTPUT_DIR),
-                                     new Utils.OutputFileUtils
-                                              .OutputFilesFilter()));
-        for (int i = 0; i < fileList.length; i++){
-          System.out.println(fileList[i].toString());
-          BufferedReader bread =
-            new BufferedReader(new InputStreamReader(fileSys.open(fileList[i])));
-          line = bread.readLine();
-          System.out.println(line);
-          line2 = bread.readLine();
-          System.out.println(line2);
-        }
-        assertEquals(cacheString + "\t", line);
-        assertEquals(cacheString2 + "\t", line2);
-      } finally{
-        if (dfs != null) { dfs.shutdown(); }
-        if (mr != null) { mr.shutdown();}
+      job = new StreamJob(argv, mayExit);     
+      job.go();
+
+      fileSys = dfs.getFileSystem();
+      String line = null;
+      String line2 = null;
+      Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
+                                   new Path(OUTPUT_DIR),
+                                   new Utils.OutputFileUtils
+                                            .OutputFilesFilter()));
+      for (int i = 0; i < fileList.length; i++){
+        System.out.println(fileList[i].toString());
+        BufferedReader bread =
+          new BufferedReader(new InputStreamReader(fileSys.open(fileList[i])));
+        line = bread.readLine();
+        System.out.println(line);
+        line2 = bread.readLine();
+        System.out.println(line2);
       }
-      
-    } catch(Exception e) {
-      failTrace(e);
+      assertEquals(cacheString + "\t", line);
+      assertEquals(cacheString2 + "\t", line2);
+    } finally{
+      if (dfs != null) { dfs.shutdown(); }
+      if (mr != null) { mr.shutdown();}
     }
   }
 
-  void failTrace(Exception e)
-  {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
-  }
-
   public static void main(String[]args) throws Exception
   {
     new TestMultipleCachefiles().testMultipleCachefiles();

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java Tue Jan  5 06:09:51 2010
@@ -26,9 +26,10 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestRawBytesStreaming extends TestCase {
+public class TestRawBytesStreaming {
 
   protected File INPUT_FILE = new File("input.txt");
   protected File OUTPUT_DIR = new File("out");
@@ -62,7 +63,8 @@
       "-verbose"
     };
   }
-  
+
+  @Test
   public void testCommandLine() throws Exception {
     try {
       try {

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java Tue Jan  5 06:09:51 2010
@@ -18,7 +18,8 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 import java.io.*;
 
 import org.apache.hadoop.fs.FileUtil;
@@ -29,7 +30,7 @@
  * It uses Hadoop Aggregate to count the numbers of word occurrences 
  * in the input.
  */
-public class TestStreamAggregate extends TestCase
+public class TestStreamAggregate
 {
   protected File INPUT_FILE = new File("stream_aggregate_input.txt");
   protected File OUTPUT_DIR = new File("stream_aggregate_out");
@@ -69,8 +70,8 @@
     };
   }
   
-  public void testCommandLine()
-  {
+  @Test
+  public void testCommandLine() throws Exception {
     try {
       try {
         FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
@@ -90,25 +91,12 @@
       System.err.println("outEx1=" + outputExpect);
       System.err.println("  out1=" + output);
       assertEquals(outputExpect, output);
-    } catch(Exception e) {
-      failTrace(e);
     } finally {
-      try {
-        INPUT_FILE.delete();
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (IOException e) {
-        failTrace(e);
-      }
+      INPUT_FILE.delete();
+      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
     }
   }
 
-  private void failTrace(Exception e)
-  {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
-  }
-
   public static void main(String[]args) throws Exception
   {
     new TestStreaming().testCommandLine();

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java Tue Jan  5 06:09:51 2010
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
 import java.io.*;
 import java.util.*;
 import org.apache.hadoop.conf.Configuration;
@@ -27,10 +26,13 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner;
 
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 /**
  * This class tests hadoopStreaming in MapReduce local mode.
  */
-public class TestStreamDataProtocol extends TestCase
+public class TestStreamDataProtocol
 {
 
   // "map" command: grep -E (red|green|blue)
@@ -78,15 +80,16 @@
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
   }
-  
-  public void testCommandLine()
+
+  @Test
+  public void testCommandLine() throws Exception
   {
     try {
-      try {
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (Exception e) {
-      }
+      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+    } catch (Exception e) {
+    }
 
+    try {
       createInput();
       boolean mayExit = false;
 
@@ -101,25 +104,12 @@
       System.err.println("  out1=" + output);
       System.err.println("  equals=" + outputExpect.compareTo(output));
       assertEquals(outputExpect, output);
-    } catch(Exception e) {
-      failTrace(e);
     } finally {
-      try {
-        INPUT_FILE.delete();
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (IOException e) {
-        failTrace(e);
-      }
+      INPUT_FILE.delete();
+      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
     }
   }
 
-  private void failTrace(Exception e)
-  {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
-  }
-
   public static void main(String[]args) throws Exception
   {
     new TestStreamDataProtocol().testCommandLine();

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamJob.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamJob.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamJob.java Tue Jan  5 06:09:51 2010
@@ -25,13 +25,15 @@
 import org.apache.hadoop.mapred.KeyValueTextInputFormat;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 /**
  * This class tests hadoop Streaming's StreamJob class.
  */
-public class TestStreamJob extends TestCase {
+public class TestStreamJob {
 
+  @Test
   public void testCreateJob() throws IOException {
     JobConf job;
     ArrayList<String> dummyArgs = new ArrayList<String>();

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java Tue Jan  5 06:09:51 2010
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
 import java.io.*;
 import java.util.*;
 import org.apache.hadoop.conf.Configuration;
@@ -26,13 +25,16 @@
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 
+import static org.junit.Assert.*;
+import org.junit.Test;
+
 /**
  * This class tests hadoopStreaming in MapReduce local mode.
  * It tests the case where number of reducers is set to 0.
    In this case, the mappers are expected to write out outputs directly.
    No reducer/combiner will be activated.
  */
-public class TestStreamReduceNone extends TestCase
+public class TestStreamReduceNone
 {
   protected File INPUT_FILE = new File("stream_reduce_none_input.txt");
   protected File OUTPUT_DIR = new File("stream_reduce_none_out");
@@ -69,8 +71,9 @@
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
   }
-  
-  public void testCommandLine()
+
+  @Test
+  public void testCommandLine() throws Exception
   {
     String outFileName = "part-00000";
     File outFile = null;
@@ -92,25 +95,12 @@
       System.err.println("outEx1=" + outputExpect);
       System.err.println("  out1=" + output);
       assertEquals(outputExpect, output);
-    } catch(Exception e) {
-      failTrace(e);
     } finally {
-      try {
-        INPUT_FILE.delete();
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (IOException e) {
-        failTrace(e);
-      }
+      INPUT_FILE.delete();
+      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
     }
   }
 
-  private void failTrace(Exception e)
-  {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
-  }
-
   public static void main(String[]args) throws Exception
   {
     new TestStreamReduceNone().testCommandLine();

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java Tue Jan  5 06:09:51 2010
@@ -24,6 +24,9 @@
 
 import org.apache.hadoop.fs.FileUtil;
 
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 /**
  * This class tests StreamXmlRecordReader
  * The test creates an XML file, uses StreamXmlRecordReader and compares
@@ -60,7 +63,8 @@
     };
   }
 
-  public void testCommandLine() {
+  @Test
+  public void testCommandLine() throws Exception {
     try {
       try {
         FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
@@ -73,8 +77,6 @@
       String output = StreamUtil.slurp(outFile);
       outFile.delete();
       assertEquals(input, output);
-    } catch (Exception e) {
-      e.printStackTrace();
     } finally {
       try {
         INPUT_FILE.delete();

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java Tue Jan  5 06:09:51 2010
@@ -28,7 +28,8 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -51,7 +52,7 @@
  *     test-unix 
  * </pre>
  */
-public class TestStreamedMerge extends TestCase {
+public class TestStreamedMerge {
 
   public TestStreamedMerge() throws IOException {
     UtilTest utilTest = new UtilTest(getClass().getName());
@@ -104,24 +105,16 @@
     return c;
   }
 
-  void lsr() {
-    try {
-      System.out.println("lsr /");
-      ToolRunner.run(conf_, new FsShell(), new String[]{ "-lsr", "/" });
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
+  void lsr() throws Exception {
+    System.out.println("lsr /");
+    ToolRunner.run(conf_, new FsShell(), new String[]{ "-lsr", "/" });
   }
 
-  void printSampleInput() {
-    try {
-      System.out.println("cat /input/part-00");
-      String content = StreamUtil.slurpHadoop(new Path("/input/part-00"), fs_);
-      System.out.println(content);
-      System.out.println("cat done.");
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
+  void printSampleInput() throws IOException {
+    System.out.println("cat /input/part-00");
+    String content = StreamUtil.slurpHadoop(new Path("/input/part-00"), fs_);
+    System.out.println(content);
+    System.out.println("cat done.");
   }
 
   void callStreaming(String argSideOutput, boolean inputTagged) throws IOException {
@@ -210,7 +203,8 @@
     StringBuffer buf_;
   }
 
-  public void testMain() throws IOException {
+  @Test
+  public void testMain() throws Exception {
     boolean success = false;
     String base = new File(".").getAbsolutePath();
     System.setProperty("hadoop.log.dir", base + "/logs");
@@ -228,8 +222,6 @@
       }
       doAllTestJobs();
       success = true;
-    } catch (IOException io) {
-      io.printStackTrace();
     } finally {
       try {
         fs_.close();
@@ -243,14 +235,14 @@
     }
   }
 
-  void doAllTestJobs() throws IOException
+  void doAllTestJobs() throws Exception
   {
     goSocketTagged(true, false);
     goSocketTagged(false, false);
     goSocketTagged(true, true);
   }
   
-  void goSocketTagged(boolean socket, boolean inputTagged) throws IOException {
+  void goSocketTagged(boolean socket, boolean inputTagged) throws Exception {
     System.out.println("***** goSocketTagged: " + socket + ", " + inputTagged);
     String expect = createInputs(inputTagged);
     lsr();

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java Tue Jan  5 06:09:51 2010
@@ -18,9 +18,11 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
 import java.io.*;
 
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -30,13 +32,14 @@
 /**
  * This class tests hadoopStreaming in MapReduce local mode.
  */
-public class TestStreaming extends TestCase
+public class TestStreaming
 {
 
   // "map" command: grep -E (red|green|blue)
   // reduce command: uniq
-  protected File INPUT_FILE = new File("input.txt");
-  protected File OUTPUT_DIR = new File("out");
+  protected File TEST_DIR;
+  protected File INPUT_FILE;
+  protected File OUTPUT_DIR;
   protected String input = "roses.are.red\nviolets.are.blue\nbunnies.are.pink\n";
   // map behaves like "/usr/bin/tr . \\n"; (split words into lines)
   protected String map = StreamUtil.makeJavaCommand(TrApp.class, new String[]{".", "\\n"});
@@ -52,6 +55,9 @@
     UtilTest utilTest = new UtilTest(getClass().getName());
     utilTest.checkUserDir();
     utilTest.redirectIfAntJunit();
+    TEST_DIR = new File(getClass().getName()).getAbsoluteFile();
+    OUTPUT_DIR = new File(TEST_DIR, "out");
+    INPUT_FILE = new File(TEST_DIR, "input.txt");
   }
 
   protected String getInputData() {
@@ -101,31 +107,20 @@
     assertEquals(getExpectedOutput(), output);
   }
 
-  public void testCommandLine() throws IOException
+  @Test
+  public void testCommandLine() throws Exception
   {
-    try {
-      try {
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (Exception e) {
-      }
-
-      createInput();
-      boolean mayExit = false;
-
-      // During tests, the default Configuration will use a local mapred
-      // So don't specify -config or -cluster
-      job = new StreamJob(genArgs(), mayExit);
-      int ret = job.go();
-      assertEquals(0, ret);
-      checkOutput();
-    } finally {
-      try {
-        INPUT_FILE.delete();
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (IOException e) {
-        e.printStackTrace();
-      }
-    }
+    UtilTest.recursiveDelete(TEST_DIR);
+    assertTrue("Creating " + TEST_DIR, TEST_DIR.mkdirs());
+    createInput();
+    boolean mayExit = false;
+
+    // During tests, the default Configuration will use a local mapred
+    // So don't specify -config or -cluster
+    job = new StreamJob(genArgs(), mayExit);
+    int ret = job.go();
+    assertEquals(0, ret);
+    checkOutput();
   }
 
   public static void main(String[]args) throws Exception

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java Tue Jan  5 06:09:51 2010
@@ -22,6 +22,9 @@
 
 import org.apache.hadoop.mapred.Counters;
 
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 public class TestStreamingCombiner extends TestStreaming {
 
   protected String combine = StreamUtil.makeJavaCommand(UniqApp.class, new String[]{""});
@@ -41,7 +44,8 @@
     };
   }
 
-  public void testCommandLine() throws IOException {
+  @Test
+  public void testCommandLine() throws Exception  {
     super.testCommandLine();
     // validate combiner counters
     String counterGrp = "org.apache.hadoop.mapred.Task$Counter";

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCounters.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCounters.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCounters.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCounters.java Tue Jan  5 06:09:51 2010
@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.streaming;
 
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 import java.io.File;
 import java.io.IOException;
 
@@ -30,11 +33,11 @@
  * This class tests streaming counters in MapReduce local mode.
  */
 public class TestStreamingCounters extends TestStreaming {
-
   public TestStreamingCounters() throws IOException {
     super();
   }
 
+  @Test
   public void testCommandLine() throws IOException
   {
     try {

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java Tue Jan  5 06:09:51 2010
@@ -18,7 +18,8 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+
 import java.io.*;
 
 import org.apache.hadoop.fs.FileUtil;
@@ -30,7 +31,7 @@
  * was hanging forever. Now this issue is solved. Similarly reducer is also
  * checked for task completion with empty input and nonempty output.
  */
-public class TestStreamingEmptyInpNonemptyOut extends TestCase
+public class TestStreamingEmptyInpNonemptyOut
 {
 
   protected File INPUT_FILE = new File("emptyInputFile.txt");
@@ -74,7 +75,8 @@
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
   }
-  
+
+  @Test
   public void testEmptyInputNonemptyOutput() throws IOException
   {
     try {

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java Tue Jan  5 06:09:51 2010
@@ -18,7 +18,10 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import org.junit.Before;
+import static org.junit.Assert.*;
+
 import java.io.*;
 import java.util.*;
 import org.apache.hadoop.conf.Configuration;
@@ -30,10 +33,12 @@
  * reducers have non-zero exit status and the
  * stream.non.zero.exit.status.is.failure jobconf is set.
  */
-public class TestStreamingExitStatus extends TestCase
+public class TestStreamingExitStatus
 {
-  protected File INPUT_FILE = new File("input.txt");
-  protected File OUTPUT_DIR = new File("out");  
+  protected File TEST_DIR =
+    new File("TestStreamingExitStatus").getAbsoluteFile();
+  protected File INPUT_FILE = new File(TEST_DIR, "input.txt");
+  protected File OUTPUT_DIR = new File(TEST_DIR, "out");
 
   protected String failingTask = StreamUtil.makeJavaCommand(FailApp.class, new String[]{"true"});
   protected String echoTask = StreamUtil.makeJavaCommand(FailApp.class, new String[]{"false"});
@@ -57,52 +62,48 @@
     };
   }
 
+  @Before
   public void setUp() throws IOException {
-    UtilTest.recursiveDelete(INPUT_FILE);
-    UtilTest.recursiveDelete(OUTPUT_DIR);
-    
+    UtilTest.recursiveDelete(TEST_DIR);
+    assertTrue(TEST_DIR.mkdirs());
+
     FileOutputStream out = new FileOutputStream(INPUT_FILE.getAbsoluteFile());
     out.write("hello\n".getBytes());
     out.close();
   }
 
-  public void runStreamJob(boolean exitStatusIsFailure, boolean failMap) {
-    try {
-      boolean mayExit = false;
-      int returnStatus = 0;
-
-      StreamJob job = new StreamJob(genArgs(exitStatusIsFailure, failMap), mayExit);
-      returnStatus = job.go();
-      
-      if (exitStatusIsFailure) {
-        assertEquals("Streaming Job failure code expected", /*job not successful:*/1, returnStatus);
-      } else {
-        assertEquals("Streaming Job expected to succeed", 0, returnStatus);
-      }
-    } catch (Exception e) {
-      failTrace(e);
+  public void runStreamJob(boolean exitStatusIsFailure, boolean failMap) throws Exception {
+    boolean mayExit = false;
+    int returnStatus = 0;
+
+    StreamJob job = new StreamJob(genArgs(exitStatusIsFailure, failMap), mayExit);
+    returnStatus = job.go();
+    
+    if (exitStatusIsFailure) {
+      assertEquals("Streaming Job failure code expected", /*job not successful:*/1, returnStatus);
+    } else {
+      assertEquals("Streaming Job expected to succeed", 0, returnStatus);
     }
   }
-  
-  public void testMapFailOk() {
+
+  @Test
+  public void testMapFailOk() throws Exception {
     runStreamJob(false, true);
   }
-  
-  public void testMapFailNotOk() {
+
+  @Test
+  public void testMapFailNotOk() throws Exception {
     runStreamJob(true, true);
   }
-  
-  public void testReduceFailOk() {
+
+  @Test
+  public void testReduceFailOk() throws Exception {
     runStreamJob(false, false);
   }
   
-  public void testReduceFailNotOk() {
+  @Test
+  public void testReduceFailNotOk() throws Exception {
     runStreamJob(true, false);
   }  
   
-  protected void failTrace(Exception e) {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
-  }
 }

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java Tue Jan  5 06:09:51 2010
@@ -18,7 +18,9 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 import java.io.*;
 import java.util.*;
 import org.apache.hadoop.conf.Configuration;
@@ -56,6 +58,7 @@
     };
   }
 
+  @Test
   public void testCommandLine()
   {
     try {

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java Tue Jan  5 06:09:51 2010
@@ -18,7 +18,9 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 import java.io.*;
 
 import org.apache.hadoop.fs.FileUtil;
@@ -27,7 +29,7 @@
  * This class tests hadoopStreaming in MapReduce local mode.
  * This testcase looks at different cases of tab position in input. 
  */
-public class TestStreamingKeyValue extends TestCase
+public class TestStreamingKeyValue
 {
   protected File INPUT_FILE = new File("input.txt");
   protected File OUTPUT_DIR = new File("stream_out");
@@ -74,7 +76,8 @@
     };
   }
   
-  public void testCommandLine()
+  @Test
+  public void testCommandLine() throws Exception
   {
     String outFileName = "part-00000";
     File outFile = null;
@@ -96,25 +99,12 @@
       System.err.println("outEx1=" + outputExpect);
       System.err.println("  out1=" + output);
       assertEquals(outputExpect, output);
-    } catch(Exception e) {
-      failTrace(e);
     } finally {
-      try {
-        INPUT_FILE.delete();
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (IOException e) {
-        failTrace(e);
-      }
+      INPUT_FILE.delete();
+      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
     }
   }
 
-  private void failTrace(Exception e)
-  {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
-  }
-
   public static void main(String[]args) throws Exception
   {
     new TestStreamingKeyValue().testCommandLine();

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java Tue Jan  5 06:09:51 2010
@@ -18,7 +18,9 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 import java.io.*;
 import java.util.*;
 import org.apache.hadoop.conf.Configuration;
@@ -29,7 +31,7 @@
 /**
  * This class tests hadoopStreaming with customized separator in MapReduce local mode.
  */
-public class TestStreamingSeparator extends TestCase
+public class TestStreamingSeparator
 {
 
   // "map" command: grep -E (red|green|blue)
@@ -86,7 +88,8 @@
     };
   }
   
-  public void testCommandLine()
+  @Test
+  public void testCommandLine() throws Exception
   {
     try {
       try {
@@ -107,25 +110,12 @@
       System.err.println("outEx1=" + outputExpect);
       System.err.println("  out1=" + output);
       assertEquals(outputExpect, output);
-    } catch(Exception e) {
-      failTrace(e);
     } finally {
-      try {
-        INPUT_FILE.delete();
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (IOException e) {
-        failTrace(e);
-      }
+      INPUT_FILE.delete();
+      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
     }
   }
 
-  private void failTrace(Exception e)
-  {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
-  }
-
   public static void main(String[]args) throws Exception
   {
     new TestStreamingSeparator().testCommandLine();

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java Tue Jan  5 06:09:51 2010
@@ -22,7 +22,8 @@
 import java.io.IOException;
 import java.io.File;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -32,12 +33,13 @@
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
+
 /**
  * Tests for the ability of a streaming task to set the status
  * by writing "reporter:status:" lines to stderr. Uses MiniMR
  * since the local jobtracker doesn't track status.
  */
-public class TestStreamingStatus extends TestCase {
+public class TestStreamingStatus {
   private static String TEST_ROOT_DIR =
     new File(System.getProperty("test.build.data","/tmp"))
     .toURI().toString().replace(' ', '+');
@@ -78,6 +80,7 @@
     } catch (Exception e) {}
   }
   
+  @Test
   public void testStreamingStatus() throws Exception {
     MiniMRCluster mr = null;
     FileSystem fs = null;

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java Tue Jan  5 06:09:51 2010
@@ -18,19 +18,21 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
 import java.io.*;
 import java.util.*;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 /**
  * Test that streaming consumes stderr from the streaming process
  * (before, during, and after the main processing of mapred input),
  * and that stderr messages count as task progress.
  */
-public class TestStreamingStderr extends TestCase
+public class TestStreamingStderr
 {
   public TestStreamingStderr() throws IOException {
     UtilTest utilTest = new UtilTest(getClass().getName());
@@ -71,43 +73,38 @@
   }
 
   public void runStreamJob(String baseName, boolean hasInput,
-                           int preLines, int duringLines, int postLines) {
-    try {
-      File input = setupInput(baseName, hasInput);
-      File output = setupOutput(baseName);
-      boolean mayExit = false;
-      int returnStatus = 0;
-
-      StreamJob job = new StreamJob(genArgs(input, output, preLines, duringLines, postLines), mayExit);
-      returnStatus = job.go();
-      assertEquals("StreamJob success", 0, returnStatus);
-    } catch (Exception e) {
-      failTrace(e);
-    }
+                           int preLines, int duringLines, int postLines)
+    throws Exception {
+    File input = setupInput(baseName, hasInput);
+    File output = setupOutput(baseName);
+    boolean mayExit = false;
+    int returnStatus = 0;
+
+    StreamJob job = new StreamJob(genArgs(input, output, preLines, duringLines, postLines), mayExit);
+    returnStatus = job.go();
+    assertEquals("StreamJob success", 0, returnStatus);
   }
 
   // This test will fail by blocking forever if the stderr isn't
   // consumed by Hadoop for tasks that don't have any input.
-  public void testStderrNoInput() throws IOException {
+  @Test
+  public void testStderrNoInput() throws Exception {
     runStreamJob("stderr-pre", false, 10000, 0, 0);
   }
 
   // Streaming should continue to read stderr even after all input has
   // been consumed.
-  public void testStderrAfterOutput() throws IOException {
+  @Test
+  public void testStderrAfterOutput() throws Exception {
     runStreamJob("stderr-post", false, 0, 0, 10000);
   }
 
   // This test should produce a task timeout if stderr lines aren't
   // counted as progress. This won't actually work until
   // LocalJobRunner supports timeouts.
-  public void testStderrCountsAsProgress() throws IOException {
+  @Test
+  public void testStderrCountsAsProgress() throws Exception {
     runStreamJob("stderr-progress", true, 10, 1000, 0);
   }
   
-  protected void failTrace(Exception e) {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
-  }
 }

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingTaskLog.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingTaskLog.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingTaskLog.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingTaskLog.java Tue Jan  5 06:09:51 2010
@@ -28,13 +28,15 @@
 import org.apache.hadoop.mapred.TestMiniMRWithDFS;
 import org.apache.hadoop.util.Shell;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 
 /**
  * This tests the environment set by TT for the child of task jvm.
  * This will launch a streaming job with a shell script as mapper.
  */
-public class TestStreamingTaskLog extends TestCase {
+public class TestStreamingTaskLog {
   String input = "the dummy input";
   Path inputPath = new Path("inDir");
   Path outputPath = new Path("outDir");
@@ -65,6 +67,7 @@
    *  (b) hadoop.tasklog.totalLogFileSize
    * for the children of java tasks in streaming jobs.
    */
+  @Test
   public void testStreamingTaskLogWithHadoopCmd() {
     try {
       final int numSlaves = 1;

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java Tue Jan  5 06:09:51 2010
@@ -25,7 +25,8 @@
 import java.io.PrintWriter;
 import java.io.StringWriter;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -40,7 +41,7 @@
  * This test case tests the symlink creation
  * utility provided by distributed caching 
  */
-public class TestSymLink extends TestCase
+public class TestSymLink
 {
   String INPUT_FILE = "/testing-streaming/input.txt";
   String OUTPUT_DIR = "/testing-streaming/out";
@@ -52,92 +53,78 @@
   String cacheString = "This is just the cache string";
   StreamJob job;
 
-  public TestSymLink() throws IOException
-  {
-  }
-
-  public void testSymLink()
+  @Test
+  public void testSymLink() throws Exception
   {
+    boolean mayExit = false;
+    MiniMRCluster mr = null;
+    MiniDFSCluster dfs = null; 
     try {
-      boolean mayExit = false;
-      MiniMRCluster mr = null;
-      MiniDFSCluster dfs = null; 
-      try{
-        Configuration conf = new Configuration();
-        dfs = new MiniDFSCluster(conf, 1, true, null);
-        FileSystem fileSys = dfs.getFileSystem();
-        String namenode = fileSys.getUri().toString();
-        mr  = new MiniMRCluster(1, namenode, 3);
-        // During tests, the default Configuration will use a local mapred
-        // So don't specify -config or -cluster
-        String strJobtracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
-        String strNamenode = "fs.default.name=" + namenode;
-        String argv[] = new String[] {
-          "-input", INPUT_FILE,
-          "-output", OUTPUT_DIR,
-          "-mapper", map,
-          "-reducer", reduce,
-          //"-verbose",
-          //"-jobconf", "stream.debug=set"
-          "-jobconf", strNamenode,
-          "-jobconf", strJobtracker,
-          "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
-          "-jobconf", 
-            JobConf.MAPRED_MAP_TASK_JAVA_OPTS+ "=" +
-              "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
-              "-Dbuild.test=" + System.getProperty("build.test") + " " +
-              conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS, 
-                       conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
-          "-jobconf", 
-            JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS+ "=" +
-              "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
-              "-Dbuild.test=" + System.getProperty("build.test") + " " +
-              conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, 
-                       conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
-          "-cacheFile", fileSys.getUri() + CACHE_FILE + "#testlink"
-        };
+      Configuration conf = new Configuration();
+      dfs = new MiniDFSCluster(conf, 1, true, null);
+      FileSystem fileSys = dfs.getFileSystem();
+      String namenode = fileSys.getUri().toString();
+      mr  = new MiniMRCluster(1, namenode, 3);
+      // During tests, the default Configuration will use a local mapred
+      // So don't specify -config or -cluster
+      String strJobtracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
+      String strNamenode = "fs.default.name=" + namenode;
+      String argv[] = new String[] {
+        "-input", INPUT_FILE,
+        "-output", OUTPUT_DIR,
+        "-mapper", map,
+        "-reducer", reduce,
+        //"-verbose",
+        //"-jobconf", "stream.debug=set"
+        "-jobconf", strNamenode,
+        "-jobconf", strJobtracker,
+        "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
+        "-jobconf", 
+          JobConf.MAPRED_MAP_TASK_JAVA_OPTS+ "=" +
+            "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
+            "-Dbuild.test=" + System.getProperty("build.test") + " " +
+            conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS, 
+                     conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
+        "-jobconf", 
+          JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS+ "=" +
+            "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
+            "-Dbuild.test=" + System.getProperty("build.test") + " " +
+            conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, 
+                     conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
+        "-cacheFile", fileSys.getUri() + CACHE_FILE + "#testlink"
+      };
 
-        fileSys.delete(new Path(OUTPUT_DIR), true);
+      fileSys.delete(new Path(OUTPUT_DIR), true);
+      
+      DataOutputStream file = fileSys.create(new Path(INPUT_FILE));
+      file.writeBytes(mapString);
+      file.close();
+      file = fileSys.create(new Path(CACHE_FILE));
+      file.writeBytes(cacheString);
+      file.close();
         
-        DataOutputStream file = fileSys.create(new Path(INPUT_FILE));
-        file.writeBytes(mapString);
-        file.close();
-        file = fileSys.create(new Path(CACHE_FILE));
-        file.writeBytes(cacheString);
-        file.close();
-          
-        job = new StreamJob(argv, mayExit);      
-        job.go();
-
-        fileSys = dfs.getFileSystem();
-        String line = null;
-        Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
-                                                new Path(OUTPUT_DIR),
-                                                new Utils.OutputFileUtils
-                                                         .OutputFilesFilter()));
-        for (int i = 0; i < fileList.length; i++){
-          System.out.println(fileList[i].toString());
-          BufferedReader bread =
-            new BufferedReader(new InputStreamReader(fileSys.open(fileList[i])));
-          line = bread.readLine();
-          System.out.println(line);
-        }
-        assertEquals(cacheString + "\t", line);
-      } finally{
-        if (dfs != null) { dfs.shutdown(); }
-        if (mr != null) { mr.shutdown();}
+      job = new StreamJob(argv, mayExit);      
+      job.go();
+
+      fileSys = dfs.getFileSystem();
+      String line = null;
+      Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
+                                              new Path(OUTPUT_DIR),
+                                              new Utils.OutputFileUtils
+                                                       .OutputFilesFilter()));
+      for (int i = 0; i < fileList.length; i++){
+        System.out.println(fileList[i].toString());
+        BufferedReader bread =
+          new BufferedReader(new InputStreamReader(fileSys.open(fileList[i])));
+        line = bread.readLine();
+        System.out.println(line);
       }
-      
-    } catch(Exception e) {
-      failTrace(e);
+      assertEquals(cacheString + "\t", line);
+    } finally{
+      if (dfs != null) { dfs.shutdown(); }
+      if (mr != null) { mr.shutdown();}
     }
-  }
-
-  void failTrace(Exception e)
-  {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
+    
   }
 
   public static void main(String[]args) throws Exception

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java Tue Jan  5 06:09:51 2010
@@ -26,9 +26,12 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 
-import junit.framework.TestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestTypedBytesStreaming extends TestCase {
+public class TestTypedBytesStreaming {
 
   protected File INPUT_FILE = new File("input.txt");
   protected File OUTPUT_DIR = new File("out");
@@ -60,33 +63,29 @@
       "-io", "typedbytes"
     };
   }
-  
+
+  @Before
+  @After
+  public void cleanupOutput() throws Exception {
+    FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+    INPUT_FILE.delete();
+    createInput();
+  }
+
+  @Test
   public void testCommandLine() throws Exception {
-    try {
-      try {
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (Exception e) {
-      }
-
-      createInput();
-      OUTPUT_DIR.delete();
-
-      // During tests, the default Configuration will use a local mapred
-      // So don't specify -config or -cluster
-      StreamJob job = new StreamJob();
-      job.setConf(new Configuration());
-      job.run(genArgs());
-      File outFile = new File(OUTPUT_DIR, "part-00000").getAbsoluteFile();
-      String output = StreamUtil.slurp(outFile);
-      outFile.delete();
-      System.out.println("   map=" + map);
-      System.out.println("reduce=" + reduce);
-      System.err.println("outEx1=" + outputExpect);
-      System.err.println("  out1=" + output);
-      assertEquals(outputExpect, output);
-    } finally {
-      INPUT_FILE.delete();
-      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-    }
+    // During tests, the default Configuration will use a local mapred
+    // So don't specify -config or -cluster
+    StreamJob job = new StreamJob();
+    job.setConf(new Configuration());
+    job.run(genArgs());
+    File outFile = new File(OUTPUT_DIR, "part-00000").getAbsoluteFile();
+    String output = StreamUtil.slurp(outFile);
+    outFile.delete();
+    System.out.println("   map=" + map);
+    System.out.println("reduce=" + reduce);
+    System.err.println("outEx1=" + outputExpect);
+    System.err.println("  out1=" + output);
+    assertEquals(outputExpect, output);
   }
 }

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java?rev=895914&r1=895913&r2=895914&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java Tue Jan  5 06:09:51 2010
@@ -31,7 +31,8 @@
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.util.StringUtils;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 /**
  * This tests the setting of memory limit for streaming processes.
@@ -40,7 +41,7 @@
  * it to succeed. Then program is launched with insufficient memory and 
  * is expected to be a failure.  
  */
-public class TestUlimit extends TestCase {
+public class TestUlimit {
   String input = "the dummy input";
   Path inputPath = new Path("/testing/in");
   Path outputPath = new Path("/testing/out");
@@ -75,6 +76,7 @@
    * it to succeed. Then program is launched with insufficient memory and 
    * is expected to be a failure.  
    */
+  @Test
   public void testCommandLine() {
     if (StreamUtil.isCygwin()) {
       return;



Mime
View raw message