hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ste...@apache.org
Subject svn commit: r903227 [7/16] - in /hadoop/mapreduce/branches/MAPREDUCE-233: ./ .eclipse.templates/ conf/ ivy/ src/benchmarks/gridmix/ src/benchmarks/gridmix/javasort/ src/benchmarks/gridmix/maxent/ src/benchmarks/gridmix/monsterQuery/ src/benchmarks/grid...
Date Tue, 26 Jan 2010 14:03:09 GMT
Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/normalImport.q
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/normalImport.q?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/normalImport.q (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/normalImport.q Tue Jan 26 14:02:53 2010
@@ -1,2 +1,2 @@
-CREATE TABLE NORMAL_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 INT, DATA_COL2 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\01' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
+CREATE TABLE NORMAL_HIVE_IMPORT ( DATA_COL0 STRING, DATA_COL1 INT, DATA_COL2 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\001' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
 LOAD DATA INPATH 'file:BASEPATH/sqoop/warehouse/NORMAL_HIVE_IMPORT' INTO TABLE NORMAL_HIVE_IMPORT;

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/numericImport.q
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/numericImport.q?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/numericImport.q (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/sqoop/testdata/hive/scripts/numericImport.q Tue Jan 26 14:02:53 2010
@@ -1,2 +1,2 @@
-CREATE TABLE NUMERIC_HIVE_IMPORT ( DATA_COL0 DOUBLE, DATA_COL1 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\01' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
+CREATE TABLE NUMERIC_HIVE_IMPORT ( DATA_COL0 DOUBLE, DATA_COL1 STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\001' LINES TERMINATED BY '\012' STORED AS TEXTFILE;
 LOAD DATA INPATH 'file:BASEPATH/sqoop/warehouse/NUMERIC_HIVE_IMPORT' INTO TABLE NUMERIC_HIVE_IMPORT;

Propchange: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Jan 26 14:02:53 2010
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/streaming:713112
 /hadoop/core/trunk/src/contrib/streaming:776175-786373
-/hadoop/mapreduce/trunk/src/contrib/streaming:804974-885774
+/hadoop/mapreduce/trunk/src/contrib/streaming:804974-903221

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/ivy.xml?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/ivy.xml (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/ivy.xml Tue Jan 26 14:02:53 2010
@@ -1,4 +1,20 @@
 <?xml version="1.0" ?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
 <ivy-module version="1.0">
   <info organisation="org.apache.hadoop" module="${ant.project.name}">
     <license name="Apache 2.0"/>
@@ -27,11 +43,11 @@
     <dependency org="org.apache.hadoop" name="hadoop-core" 
                 rev="${hadoop-core.version}" conf="common->default"/>
     <dependency org="org.apache.hadoop" name="hadoop-core-test" 
-                rev="${hadoop-core.version}" conf="common->default"/>
+                rev="${hadoop-core.version}" conf="test->default"/>
     <dependency org="org.apache.hadoop" name="hadoop-hdfs" 
                 rev="${hadoop-hdfs.version}" conf="common->default"/>
     <dependency org="org.apache.hadoop" name="hadoop-hdfs-test"
-                rev="${hadoop-hdfs.version}" conf="common->default"/>
+                rev="${hadoop-hdfs.version}" conf="test->default"/>
     <dependency org="commons-cli" name="commons-cli" 
                 rev="${commons-cli.version}" conf="common->default"/>
     <dependency org="commons-logging" name="commons-logging" 
@@ -56,7 +72,5 @@
                 rev="${avro.version}" conf="common->default"/>
     <dependency org="org.slf4j" name="slf4j-api" 
                 rev="${slf4j-api.version}" conf="common->master"/>
-    <dependency org="org.slf4j" name="slf4j-log4j12" 
-                rev="${slf4j-log4j12.version}" conf="common->master"/>
   </dependencies>
 </ivy-module>

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/ivy/libraries.properties?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/ivy/libraries.properties (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/ivy/libraries.properties Tue Jan 26 14:02:53 2010
@@ -1,3 +1,15 @@
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
 #This properties file lists the versions of the various artifacts used by streaming.
 #It drives ivy and the generation of a maven POM
 

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/AutoInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/AutoInputFormat.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/AutoInputFormat.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/AutoInputFormat.java Tue Jan 26 14:02:53 2010
@@ -52,7 +52,7 @@
   public RecordReader getRecordReader(InputSplit split, JobConf job,
     Reporter reporter) throws IOException {
     FileSplit fileSplit = (FileSplit) split;
-    FileSystem fs = FileSystem.get(job);
+    FileSystem fs = FileSystem.get(fileSplit.getPath().toUri(), job);
     FSDataInputStream is = fs.open(fileSplit.getPath());
     byte[] header = new byte[3];
     RecordReader reader = null;

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java Tue Jan 26 14:02:53 2010
@@ -25,10 +25,9 @@
 import java.net.URLEncoder;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Iterator;
 import java.util.List;
-import java.util.ListIterator;
 import java.util.Map;
+import java.util.regex.Pattern;
 import java.util.TreeMap;
 import java.util.TreeSet;
 
@@ -123,13 +122,13 @@
       postProcessArgs();
   
       setJobConf();
-      return submitAndMonitorJob();
     }catch (IllegalArgumentException ex) {
       //ignore, since log will already be printed
       // print the log in debug mode.
       LOG.debug("Error in streaming job", ex);
       return 1;
     }
+    return submitAndMonitorJob();
   }
   
   /**
@@ -191,9 +190,8 @@
     }
     msg("addTaskEnvironment=" + addTaskEnvironment_);
 
-    Iterator it = packageFiles_.iterator();
-    while (it.hasNext()) {
-      File f = new File((String) it.next());
+    for (final String packageFile : packageFiles_) {
+      File f = new File(packageFile);
       if (f.isFile()) {
         shippedCanonFiles_.add(f.getCanonicalPath());
       }
@@ -262,37 +260,44 @@
           inputSpecs_.add(input);
         }
       }
-      output_ = (String) cmdLine.getOptionValue("output"); 
+      output_ =  cmdLine.getOptionValue("output"); 
       
-      mapCmd_ = (String)cmdLine.getOptionValue("mapper"); 
-      comCmd_ = (String)cmdLine.getOptionValue("combiner"); 
-      redCmd_ = (String)cmdLine.getOptionValue("reducer"); 
+      mapCmd_ = cmdLine.getOptionValue("mapper"); 
+      comCmd_ = cmdLine.getOptionValue("combiner"); 
+      redCmd_ = cmdLine.getOptionValue("reducer"); 
       
       lazyOutput_ = cmdLine.hasOption("lazyOutput");
       
       values = cmdLine.getOptionValues("file");
       if (values != null && values.length > 0) {
+        StringBuilder unpackRegex = new StringBuilder(
+          config_.getPattern(JobContext.JAR_UNPACK_PATTERN,
+                             JobConf.UNPACK_JAR_PATTERN_DEFAULT).pattern());
         for (String file : values) {
           packageFiles_.add(file);
+          String fname = new File(file).getName();
+          unpackRegex.append("|(?:").append(Pattern.quote(fname)).append(")");
         }
+        config_.setPattern(JobContext.JAR_UNPACK_PATTERN,
+                           Pattern.compile(unpackRegex.toString()));
         validate(packageFiles_);
       }
          
-      String fsName = (String)cmdLine.getOptionValue("dfs");
+      String fsName = cmdLine.getOptionValue("dfs");
       if (null != fsName){
         LOG.warn("-dfs option is deprecated, please use -fs instead.");
         config_.set("fs.default.name", fsName);
       }
       
-      additionalConfSpec_ = (String)cmdLine.getOptionValue("additionalconfspec"); 
-      inputFormatSpec_ = (String)cmdLine.getOptionValue("inputformat"); 
-      outputFormatSpec_ = (String)cmdLine.getOptionValue("outputformat");
-      numReduceTasksSpec_ = (String)cmdLine.getOptionValue("numReduceTasks"); 
-      partitionerSpec_ = (String)cmdLine.getOptionValue("partitioner");
-      inReaderSpec_ = (String)cmdLine.getOptionValue("inputreader"); 
-      mapDebugSpec_ = (String)cmdLine.getOptionValue("mapdebug");    
-      reduceDebugSpec_ = (String)cmdLine.getOptionValue("reducedebug");
-      ioSpec_ = (String)cmdLine.getOptionValue("io");
+      additionalConfSpec_ = cmdLine.getOptionValue("additionalconfspec"); 
+      inputFormatSpec_ = cmdLine.getOptionValue("inputformat"); 
+      outputFormatSpec_ = cmdLine.getOptionValue("outputformat");
+      numReduceTasksSpec_ = cmdLine.getOptionValue("numReduceTasks"); 
+      partitionerSpec_ = cmdLine.getOptionValue("partitioner");
+      inReaderSpec_ = cmdLine.getOptionValue("inputreader"); 
+      mapDebugSpec_ = cmdLine.getOptionValue("mapdebug");    
+      reduceDebugSpec_ = cmdLine.getOptionValue("reducedebug");
+      ioSpec_ = cmdLine.getOptionValue("io");
       
       String[] car = cmdLine.getOptionValues("cacheArchive"); 
       if (null != car && car.length > 0){
@@ -582,7 +587,7 @@
   /** @return path to the created Jar file or null if no files are necessary.
    */
   protected String packageJobJar() throws IOException {
-    ArrayList unjarFiles = new ArrayList();
+    ArrayList<String> unjarFiles = new ArrayList<String>();
 
     // Runtime code: ship same version of code as self (job submitter code)
     // usually found in: build/contrib or build/hadoop-<version>-dev-streaming.jar
@@ -884,15 +889,11 @@
   protected void listJobConfProperties()
   {
     msg("==== JobConf properties:");
-    Iterator it = jobConf_.iterator();
-    TreeMap sorted = new TreeMap();
-    while(it.hasNext()) {
-      Map.Entry en = (Map.Entry)it.next();
+    TreeMap<String,String> sorted = new TreeMap<String,String>();
+    for (final Map.Entry<String, String> en : jobConf_)  {
       sorted.put(en.getKey(), en.getValue());
     }
-    it = sorted.entrySet().iterator();
-    while(it.hasNext()) {
-      Map.Entry en = (Map.Entry)it.next();
+    for (final Map.Entry<String,String> en: sorted.entrySet()) {
       msg(en.getKey() + "=" + en.getValue());
     }
     msg("====");
@@ -997,11 +998,11 @@
   protected JobClient jc_;
 
   // command-line arguments
-  protected ArrayList inputSpecs_ = new ArrayList(); // <String>
-  protected TreeSet seenPrimary_ = new TreeSet(); // <String>
+  protected ArrayList<String> inputSpecs_ = new ArrayList<String>();
+  protected TreeSet<String> seenPrimary_ = new TreeSet<String>();
   protected boolean hasSimpleInputSpecs_;
-  protected ArrayList packageFiles_ = new ArrayList(); // <String>
-  protected ArrayList shippedCanonFiles_ = new ArrayList(); // <String>
+  protected ArrayList<String> packageFiles_ = new ArrayList<String>(); 
+  protected ArrayList<String> shippedCanonFiles_ = new ArrayList<String>();
   //protected TreeMap<String, String> userJobConfProps_ = new TreeMap<String, String>(); 
   protected String output_;
   protected String mapCmd_;

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestAutoInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestAutoInputFormat.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestAutoInputFormat.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestAutoInputFormat.java Tue Jan 26 14:02:53 2010
@@ -36,9 +36,10 @@
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.streaming.AutoInputFormat;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestAutoInputFormat extends TestCase {
+public class TestAutoInputFormat {
 
   private static Configuration conf = new Configuration();
 
@@ -49,6 +50,7 @@
   private static final int SPLITS_COUNT = 2;
 
   @SuppressWarnings( { "unchecked", "deprecation" })
+  @Test
   public void testFormat() throws IOException {
     JobConf job = new JobConf(conf);
     FileSystem fs = FileSystem.getLocal(conf);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestDumpTypedBytes.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestDumpTypedBytes.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestDumpTypedBytes.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestDumpTypedBytes.java Tue Jan 26 14:02:53 2010
@@ -31,10 +31,12 @@
 import org.apache.hadoop.streaming.DumpTypedBytes;
 import org.apache.hadoop.typedbytes.TypedBytesInput;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestDumpTypedBytes extends TestCase {
+public class TestDumpTypedBytes {
 
+  @Test
   public void testDumping() throws Exception {
     Configuration conf = new Configuration();
     MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestGzipInput.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestGzipInput.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestGzipInput.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestGzipInput.java Tue Jan 26 14:02:53 2010
@@ -30,13 +30,13 @@
 {
 
   public TestGzipInput() throws IOException {
-    INPUT_FILE = new File("input.txt.gz");
+    INPUT_FILE = new File(TEST_DIR, "input.txt.gz");
   }
   
   protected void createInput() throws IOException
   {
     GZIPOutputStream out = new GZIPOutputStream(
-                                                new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
+      new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
     out.write(input.getBytes("UTF-8"));
     out.close();
   }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestLoadTypedBytes.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestLoadTypedBytes.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestLoadTypedBytes.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestLoadTypedBytes.java Tue Jan 26 14:02:53 2010
@@ -31,10 +31,12 @@
 import org.apache.hadoop.typedbytes.TypedBytesOutput;
 import org.apache.hadoop.typedbytes.TypedBytesWritable;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestLoadTypedBytes extends TestCase {
+public class TestLoadTypedBytes {
 
+  @Test
   public void testLoading() throws Exception {
     Configuration conf = new Configuration();
     MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java Tue Jan 26 14:02:53 2010
@@ -24,9 +24,14 @@
 import java.io.DataOutputStream;
 import java.io.InputStreamReader;
 import java.io.BufferedReader;
+import java.util.Arrays;
 import java.util.zip.ZipEntry;
 import java.util.jar.JarOutputStream;
 import java.util.zip.ZipOutputStream;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
@@ -34,6 +39,10 @@
 import org.apache.hadoop.mapred.*;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.util.StringUtils;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 /**
  * This class tests cacheArchive option of streaming 
@@ -42,12 +51,14 @@
  */
 public class TestMultipleArchiveFiles extends TestStreaming
 {
+  private static final Log LOG = LogFactory.getLog(TestMultipleArchiveFiles.class);
 
   private StreamJob job;
-  private String INPUT_FILE = "input.txt";
-  private String CACHE_ARCHIVE_1 = "cacheArchive1.zip";
+  private String INPUT_DIR = "multiple-archive-files/";
+  private String INPUT_FILE = INPUT_DIR + "input.txt";
+  private String CACHE_ARCHIVE_1 = INPUT_DIR + "cacheArchive1.zip";
   private File CACHE_FILE_1 = null;
-  private String CACHE_ARCHIVE_2 = "cacheArchive2.zip";
+  private String CACHE_ARCHIVE_2 = INPUT_DIR + "cacheArchive2.zip";
   private File CACHE_FILE_2 = null;
   private String expectedOutput = null;
   private String OUTPUT_DIR = "out";
@@ -59,27 +70,23 @@
   private String strNamenode = null;
   private String namenode = null;
 
-  public TestMultipleArchiveFiles() throws IOException {
+  public TestMultipleArchiveFiles() throws Exception {
     CACHE_FILE_1 = new File("cacheArchive1");
     CACHE_FILE_2 = new File("cacheArchive2");
     input = "HADOOP";
     expectedOutput = "HADOOP\t\nHADOOP\t\n";
-    try {
-      conf = new Configuration();      
-      dfs = new MiniDFSCluster(conf, 1, true, null);      
-      fileSys = dfs.getFileSystem();
-      namenode = fileSys.getUri().getAuthority();
-      mr  = new MiniMRCluster(1, namenode, 3);
-      strJobTracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
-      strNamenode = "fs.default.name=" + namenode;
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
+    conf = new Configuration();      
+    dfs = new MiniDFSCluster(conf, 1, true, null);      
+    fileSys = dfs.getFileSystem();
+    namenode = fileSys.getUri().getAuthority();
+    mr  = new MiniMRCluster(1, namenode, 3);
+    strJobTracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
+    strNamenode = "fs.default.name=" + namenode;
   }
   
   protected void createInput() throws IOException
   {
-
+    fileSys.delete(new Path(INPUT_DIR), true);
     DataOutputStream dos = fileSys.create(new Path(INPUT_FILE));
     String inputFileString = "symlink1/cacheArchive1\nsymlink2/cacheArchive2";
     dos.write(inputFileString.getBytes("UTF-8"));
@@ -103,14 +110,9 @@
   }
 
   protected String[] genArgs() {
-    String cacheArchiveString1 = null;
-    String cacheArchiveString2 = null;
-    try {
-      cacheArchiveString1 = fileSys.getUri().toString()+fileSys.getWorkingDirectory().toString()+"/"+CACHE_ARCHIVE_1+"#symlink1";
-      cacheArchiveString2 = fileSys.getUri().toString()+fileSys.getWorkingDirectory().toString()+"/"+CACHE_ARCHIVE_2+"#symlink2";
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
+    String workDir = fileSys.getWorkingDirectory().toString() + "/";
+    String cache1 = workDir + CACHE_ARCHIVE_1 + "#symlink1";
+    String cache2 = workDir + CACHE_ARCHIVE_2 + "#symlink2";
 
     return new String[] {
       "-input", INPUT_FILE.toString(),
@@ -118,39 +120,32 @@
       "-mapper", "xargs cat", 
       "-reducer", "cat",
       "-jobconf", "mapreduce.job.reduces=1",
-      "-cacheArchive", cacheArchiveString1, 
-      "-cacheArchive", cacheArchiveString2,
+      "-cacheArchive", cache1,
+      "-cacheArchive", cache2,
       "-jobconf", strNamenode,
       "-jobconf", strJobTracker,
       "-jobconf", "stream.tmpdir=" + System.getProperty("test.build.data","/tmp")
     };
   }
 
-  public void testCommandLine() {
-    try {
-      createInput();
-      job = new StreamJob(genArgs(), true);
-      if(job.go() != 0) {
-        throw new Exception("Job Failed");
-      }
-      StringBuffer output = new StringBuffer(256);
-      Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
+  //@Test
+  public void testCommandLine() throws Exception {
+    createInput();
+    String args[] = genArgs();
+    LOG.info("Testing streaming command line:\n" +
+             StringUtils.join(" ", Arrays.asList(args)));
+    job = new StreamJob(genArgs(), true);
+    if(job.go() != 0) {
+      throw new Exception("Job Failed");
+    }
+    StringBuffer output = new StringBuffer(256);
+    Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
                                             new Path(OUTPUT_DIR)));
-      for (int i = 0; i < fileList.length; i++){
-        BufferedReader bread =
-          new BufferedReader(new InputStreamReader(fileSys.open(fileList[i])));
-        output.append(bread.readLine());
-        output.append("\n");
-        output.append(bread.readLine());
-        output.append("\n");
-      }
-      assertEquals(expectedOutput, output.toString());
-    } catch (Exception e) {
-      e.printStackTrace();
-    } finally {
-      CACHE_FILE_1.delete();
-      CACHE_FILE_2.delete();
+    for (int i = 0; i < fileList.length; i++){
+      LOG.info("Adding output from file: " + fileList[i]);
+      output.append(StreamUtil.slurpHadoop(fileList[i], fileSys));
     }
+    assertEquals(expectedOutput, output.toString());
   }
 
   public static void main(String[]args) throws Exception

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java Tue Jan 26 14:02:53 2010
@@ -25,7 +25,8 @@
 import java.io.PrintWriter;
 import java.io.StringWriter;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -40,7 +41,7 @@
  * This test case tests the symlink creation
  * utility provided by distributed caching 
  */
-public class TestMultipleCachefiles extends TestCase
+public class TestMultipleCachefiles
 {
   String INPUT_FILE = "/testing-streaming/input.txt";
   String OUTPUT_DIR = "/testing-streaming/out";
@@ -59,100 +60,89 @@
   {
   }
 
-  public void testMultipleCachefiles()
+  @Test
+  public void testMultipleCachefiles() throws Exception
   {
-    try {
-      boolean mayExit = false;
-      MiniMRCluster mr = null;
-      MiniDFSCluster dfs = null; 
-      try{
-        Configuration conf = new Configuration();
-        dfs = new MiniDFSCluster(conf, 1, true, null);
-        FileSystem fileSys = dfs.getFileSystem();
-        String namenode = fileSys.getUri().toString();
-
-        mr  = new MiniMRCluster(1, namenode, 3);
-        // During tests, the default Configuration will use a local mapred
-        // So don't specify -config or -cluster
-        String strJobtracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
-        String strNamenode = "fs.default.name=" + namenode;
-        String argv[] = new String[] {
-          "-input", INPUT_FILE,
-          "-output", OUTPUT_DIR,
-          "-mapper", map,
-          "-reducer", reduce,
-          //"-verbose",
-          //"-jobconf", "stream.debug=set"
-          "-jobconf", strNamenode,
-          "-jobconf", strJobtracker,
-          "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
-          "-jobconf", 
-            JobConf.MAPRED_MAP_TASK_JAVA_OPTS + "=" +
-              "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
-              "-Dbuild.test=" + System.getProperty("build.test") + " " +
-              conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS, 
-                       conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
-          "-jobconf", 
-            JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + "=" +
-              "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
-              "-Dbuild.test=" + System.getProperty("build.test") + " " +
-              conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, 
-                       conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
-          "-cacheFile", fileSys.getUri() + CACHE_FILE + "#" + mapString,
-          "-cacheFile", fileSys.getUri() + CACHE_FILE_2 + "#" + mapString2
-        };
+    boolean mayExit = false;
+    MiniMRCluster mr = null;
+    MiniDFSCluster dfs = null; 
+    try{
+      Configuration conf = new Configuration();
+      dfs = new MiniDFSCluster(conf, 1, true, null);
+      FileSystem fileSys = dfs.getFileSystem();
+      String namenode = fileSys.getUri().toString();
+
+      mr  = new MiniMRCluster(1, namenode, 3);
+      // During tests, the default Configuration will use a local mapred
+      // So don't specify -config or -cluster
+      String strJobtracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
+      String strNamenode = "fs.default.name=" + namenode;
+      String argv[] = new String[] {
+        "-input", INPUT_FILE,
+        "-output", OUTPUT_DIR,
+        "-mapper", map,
+        "-reducer", reduce,
+        //"-verbose",
+        //"-jobconf", "stream.debug=set"
+        "-jobconf", strNamenode,
+        "-jobconf", strJobtracker,
+        "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
+        "-jobconf", 
+          JobConf.MAPRED_MAP_TASK_JAVA_OPTS + "=" +
+            "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
+            "-Dbuild.test=" + System.getProperty("build.test") + " " +
+            conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS, 
+                     conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
+        "-jobconf", 
+          JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + "=" +
+            "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
+            "-Dbuild.test=" + System.getProperty("build.test") + " " +
+            conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, 
+                     conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
+        "-cacheFile", fileSys.getUri() + CACHE_FILE + "#" + mapString,
+        "-cacheFile", fileSys.getUri() + CACHE_FILE_2 + "#" + mapString2
+      };
 
-        fileSys.delete(new Path(OUTPUT_DIR), true);
+      fileSys.delete(new Path(OUTPUT_DIR), true);
+      
+      DataOutputStream file = fileSys.create(new Path(INPUT_FILE));
+      file.writeBytes(mapString + "\n");
+      file.writeBytes(mapString2 + "\n");
+      file.close();
+      file = fileSys.create(new Path(CACHE_FILE));
+      file.writeBytes(cacheString + "\n");
+      file.close();
+      file = fileSys.create(new Path(CACHE_FILE_2));
+      file.writeBytes(cacheString2 + "\n");
+      file.close();
         
-        DataOutputStream file = fileSys.create(new Path(INPUT_FILE));
-        file.writeBytes(mapString + "\n");
-        file.writeBytes(mapString2 + "\n");
-        file.close();
-        file = fileSys.create(new Path(CACHE_FILE));
-        file.writeBytes(cacheString + "\n");
-        file.close();
-        file = fileSys.create(new Path(CACHE_FILE_2));
-        file.writeBytes(cacheString2 + "\n");
-        file.close();
-          
-        job = new StreamJob(argv, mayExit);     
-        job.go();
-
-        fileSys = dfs.getFileSystem();
-        String line = null;
-        String line2 = null;
-        Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
-                                     new Path(OUTPUT_DIR),
-                                     new Utils.OutputFileUtils
-                                              .OutputFilesFilter()));
-        for (int i = 0; i < fileList.length; i++){
-          System.out.println(fileList[i].toString());
-          BufferedReader bread =
-            new BufferedReader(new InputStreamReader(fileSys.open(fileList[i])));
-          line = bread.readLine();
-          System.out.println(line);
-          line2 = bread.readLine();
-          System.out.println(line2);
-        }
-        assertEquals(cacheString + "\t", line);
-        assertEquals(cacheString2 + "\t", line2);
-      } finally{
-        if (dfs != null) { dfs.shutdown(); }
-        if (mr != null) { mr.shutdown();}
+      job = new StreamJob(argv, mayExit);     
+      job.go();
+
+      fileSys = dfs.getFileSystem();
+      String line = null;
+      String line2 = null;
+      Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
+                                   new Path(OUTPUT_DIR),
+                                   new Utils.OutputFileUtils
+                                            .OutputFilesFilter()));
+      for (int i = 0; i < fileList.length; i++){
+        System.out.println(fileList[i].toString());
+        BufferedReader bread =
+          new BufferedReader(new InputStreamReader(fileSys.open(fileList[i])));
+        line = bread.readLine();
+        System.out.println(line);
+        line2 = bread.readLine();
+        System.out.println(line2);
       }
-      
-    } catch(Exception e) {
-      failTrace(e);
+      assertEquals(cacheString + "\t", line);
+      assertEquals(cacheString2 + "\t", line2);
+    } finally{
+      if (dfs != null) { dfs.shutdown(); }
+      if (mr != null) { mr.shutdown();}
     }
   }
 
-  void failTrace(Exception e)
-  {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
-  }
-
   public static void main(String[]args) throws Exception
   {
     new TestMultipleCachefiles().testMultipleCachefiles();

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java Tue Jan 26 14:02:53 2010
@@ -26,9 +26,10 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestRawBytesStreaming extends TestCase {
+public class TestRawBytesStreaming {
 
   protected File INPUT_FILE = new File("input.txt");
   protected File OUTPUT_DIR = new File("out");
@@ -62,7 +63,8 @@
       "-verbose"
     };
   }
-  
+
+  @Test
   public void testCommandLine() throws Exception {
     try {
       try {

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java Tue Jan 26 14:02:53 2010
@@ -18,7 +18,8 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 import java.io.*;
 
 import org.apache.hadoop.fs.FileUtil;
@@ -29,7 +30,7 @@
  * It uses Hadoop Aggregate to count the numbers of word occurrences 
  * in the input.
  */
-public class TestStreamAggregate extends TestCase
+public class TestStreamAggregate
 {
   protected File INPUT_FILE = new File("stream_aggregate_input.txt");
   protected File OUTPUT_DIR = new File("stream_aggregate_out");
@@ -69,8 +70,8 @@
     };
   }
   
-  public void testCommandLine()
-  {
+  @Test
+  public void testCommandLine() throws Exception {
     try {
       try {
         FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
@@ -90,25 +91,12 @@
       System.err.println("outEx1=" + outputExpect);
       System.err.println("  out1=" + output);
       assertEquals(outputExpect, output);
-    } catch(Exception e) {
-      failTrace(e);
     } finally {
-      try {
-        INPUT_FILE.delete();
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (IOException e) {
-        failTrace(e);
-      }
+      INPUT_FILE.delete();
+      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
     }
   }
 
-  private void failTrace(Exception e)
-  {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
-  }
-
   public static void main(String[]args) throws Exception
   {
     new TestStreaming().testCommandLine();

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java Tue Jan 26 14:02:53 2010
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
 import java.io.*;
 import java.util.*;
 import org.apache.hadoop.conf.Configuration;
@@ -27,10 +26,13 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner;
 
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 /**
  * This class tests hadoopStreaming in MapReduce local mode.
  */
-public class TestStreamDataProtocol extends TestCase
+public class TestStreamDataProtocol
 {
 
   // "map" command: grep -E (red|green|blue)
@@ -71,22 +73,23 @@
       //"-verbose",
       "-jobconf", "stream.map.output.field.separator=.",
       "-jobconf", "stream.num.map.output.key.fields=2",
-      "-jobconf", "mapreduce.mapreduce.mapreduce.map.output.key.field.separator=.",
+      "-jobconf", "mapreduce.map.output.key.field.separator=.",
       "-jobconf", "num.key.fields.for.partition=1",
       "-jobconf", "mapreduce.job.reduces=2",
       "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
   }
-  
-  public void testCommandLine()
+
+  @Test
+  public void testCommandLine() throws Exception
   {
     try {
-      try {
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (Exception e) {
-      }
+      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+    } catch (Exception e) {
+    }
 
+    try {
       createInput();
       boolean mayExit = false;
 
@@ -101,25 +104,12 @@
       System.err.println("  out1=" + output);
       System.err.println("  equals=" + outputExpect.compareTo(output));
       assertEquals(outputExpect, output);
-    } catch(Exception e) {
-      failTrace(e);
     } finally {
-      try {
-        INPUT_FILE.delete();
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (IOException e) {
-        failTrace(e);
-      }
+      INPUT_FILE.delete();
+      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
     }
   }
 
-  private void failTrace(Exception e)
-  {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
-  }
-
   public static void main(String[]args) throws Exception
   {
     new TestStreamDataProtocol().testCommandLine();

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamJob.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamJob.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamJob.java Tue Jan 26 14:02:53 2010
@@ -25,13 +25,15 @@
 import org.apache.hadoop.mapred.KeyValueTextInputFormat;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 /**
  * This class tests hadoop Streaming's StreamJob class.
  */
-public class TestStreamJob extends TestCase {
+public class TestStreamJob {
 
+  @Test
   public void testCreateJob() throws IOException {
     JobConf job;
     ArrayList<String> dummyArgs = new ArrayList<String>();

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java Tue Jan 26 14:02:53 2010
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
 import java.io.*;
 import java.util.*;
 import org.apache.hadoop.conf.Configuration;
@@ -26,13 +25,16 @@
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 
+import static org.junit.Assert.*;
+import org.junit.Test;
+
 /**
  * This class tests hadoopStreaming in MapReduce local mode.
  * It tests the case where number of reducers is set to 0.
    In this case, the mappers are expected to write out outputs directly.
    No reducer/combiner will be activated.
  */
-public class TestStreamReduceNone extends TestCase
+public class TestStreamReduceNone
 {
   protected File INPUT_FILE = new File("stream_reduce_none_input.txt");
   protected File OUTPUT_DIR = new File("stream_reduce_none_out");
@@ -69,8 +71,9 @@
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
   }
-  
-  public void testCommandLine()
+
+  @Test
+  public void testCommandLine() throws Exception
   {
     String outFileName = "part-00000";
     File outFile = null;
@@ -92,25 +95,12 @@
       System.err.println("outEx1=" + outputExpect);
       System.err.println("  out1=" + output);
       assertEquals(outputExpect, output);
-    } catch(Exception e) {
-      failTrace(e);
     } finally {
-      try {
-        INPUT_FILE.delete();
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (IOException e) {
-        failTrace(e);
-      }
+      INPUT_FILE.delete();
+      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
     }
   }
 
-  private void failTrace(Exception e)
-  {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
-  }
-
   public static void main(String[]args) throws Exception
   {
     new TestStreamReduceNone().testCommandLine();

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java Tue Jan 26 14:02:53 2010
@@ -24,6 +24,9 @@
 
 import org.apache.hadoop.fs.FileUtil;
 
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 /**
  * This class tests StreamXmlRecordReader
  * The test creates an XML file, uses StreamXmlRecordReader and compares
@@ -60,7 +63,8 @@
     };
   }
 
-  public void testCommandLine() {
+  @Test
+  public void testCommandLine() throws Exception {
     try {
       try {
         FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
@@ -73,8 +77,6 @@
       String output = StreamUtil.slurp(outFile);
       outFile.delete();
       assertEquals(input, output);
-    } catch (Exception e) {
-      e.printStackTrace();
     } finally {
       try {
         INPUT_FILE.delete();

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java Tue Jan 26 14:02:53 2010
@@ -28,7 +28,8 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -51,7 +52,7 @@
  *     test-unix 
  * </pre>
  */
-public class TestStreamedMerge extends TestCase {
+public class TestStreamedMerge {
 
   public TestStreamedMerge() throws IOException {
     UtilTest utilTest = new UtilTest(getClass().getName());
@@ -104,24 +105,16 @@
     return c;
   }
 
-  void lsr() {
-    try {
-      System.out.println("lsr /");
-      ToolRunner.run(conf_, new FsShell(), new String[]{ "-lsr", "/" });
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
+  void lsr() throws Exception {
+    System.out.println("lsr /");
+    ToolRunner.run(conf_, new FsShell(), new String[]{ "-lsr", "/" });
   }
 
-  void printSampleInput() {
-    try {
-      System.out.println("cat /input/part-00");
-      String content = StreamUtil.slurpHadoop(new Path("/input/part-00"), fs_);
-      System.out.println(content);
-      System.out.println("cat done.");
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
+  void printSampleInput() throws IOException {
+    System.out.println("cat /input/part-00");
+    String content = StreamUtil.slurpHadoop(new Path("/input/part-00"), fs_);
+    System.out.println(content);
+    System.out.println("cat done.");
   }
 
   void callStreaming(String argSideOutput, boolean inputTagged) throws IOException {
@@ -210,7 +203,8 @@
     StringBuffer buf_;
   }
 
-  public void testMain() throws IOException {
+  @Test
+  public void testMain() throws Exception {
     boolean success = false;
     String base = new File(".").getAbsolutePath();
     System.setProperty("hadoop.log.dir", base + "/logs");
@@ -228,8 +222,6 @@
       }
       doAllTestJobs();
       success = true;
-    } catch (IOException io) {
-      io.printStackTrace();
     } finally {
       try {
         fs_.close();
@@ -243,14 +235,14 @@
     }
   }
 
-  void doAllTestJobs() throws IOException
+  void doAllTestJobs() throws Exception
   {
     goSocketTagged(true, false);
     goSocketTagged(false, false);
     goSocketTagged(true, true);
   }
   
-  void goSocketTagged(boolean socket, boolean inputTagged) throws IOException {
+  void goSocketTagged(boolean socket, boolean inputTagged) throws Exception {
     System.out.println("***** goSocketTagged: " + socket + ", " + inputTagged);
     String expect = createInputs(inputTagged);
     lsr();

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java Tue Jan 26 14:02:53 2010
@@ -18,21 +18,28 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
 import java.io.*;
 
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.conf.Configuration;
+
 
 /**
  * This class tests hadoopStreaming in MapReduce local mode.
  */
-public class TestStreaming extends TestCase
+public class TestStreaming
 {
 
   // "map" command: grep -E (red|green|blue)
   // reduce command: uniq
-  protected File INPUT_FILE = new File("input.txt");
-  protected File OUTPUT_DIR = new File("out");
+  protected File TEST_DIR;
+  protected File INPUT_FILE;
+  protected File OUTPUT_DIR;
   protected String input = "roses.are.red\nviolets.are.blue\nbunnies.are.pink\n";
   // map behaves like "/usr/bin/tr . \\n"; (split words into lines)
   protected String map = StreamUtil.makeJavaCommand(TrApp.class, new String[]{".", "\\n"});
@@ -48,13 +55,20 @@
     UtilTest utilTest = new UtilTest(getClass().getName());
     utilTest.checkUserDir();
     utilTest.redirectIfAntJunit();
+    TEST_DIR = new File(getClass().getName()).getAbsoluteFile();
+    OUTPUT_DIR = new File(TEST_DIR, "out");
+    INPUT_FILE = new File(TEST_DIR, "input.txt");
+  }
+
+  protected String getInputData() {
+    return input;
   }
 
   protected void createInput() throws IOException
   {
-    DataOutputStream out = new DataOutputStream(
-                                                new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
-    out.write(input.getBytes("UTF-8"));
+    DataOutputStream out = getFileSystem().create(
+      new Path(INPUT_FILE.getAbsolutePath()));
+    out.write(getInputData().getBytes("UTF-8"));
     out.close();
   }
 
@@ -70,36 +84,43 @@
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
   }
-  
-  public void testCommandLine() throws IOException
+
+  protected Configuration getConf() {
+    return new Configuration();
+  }
+
+  protected FileSystem getFileSystem() throws IOException {
+    return FileSystem.get(getConf());
+  }
+
+  protected String getExpectedOutput() {
+    return outputExpect;
+  }
+
+  protected void checkOutput() throws IOException {
+    Path outPath = new Path(OUTPUT_DIR.getAbsolutePath(), "part-00000");
+    FileSystem fs = getFileSystem();
+    String output = StreamUtil.slurpHadoop(outPath, fs);
+    fs.delete(outPath, true);
+    System.err.println("outEx1=" + getExpectedOutput());
+    System.err.println("  out1=" + output);
+    assertEquals(getExpectedOutput(), output);
+  }
+
+  @Test
+  public void testCommandLine() throws Exception
   {
-    try {
-      try {
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (Exception e) {
-      }
-
-      createInput();
-      boolean mayExit = false;
-
-      // During tests, the default Configuration will use a local mapred
-      // So don't specify -config or -cluster
-      job = new StreamJob(genArgs(), mayExit);      
-      job.go();
-      File outFile = new File(OUTPUT_DIR, "part-00000").getAbsoluteFile();
-      String output = StreamUtil.slurp(outFile);
-      outFile.delete();
-      System.err.println("outEx1=" + outputExpect);
-      System.err.println("  out1=" + output);
-      assertEquals(outputExpect, output);
-    } finally {
-      try {
-        INPUT_FILE.delete();
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (IOException e) {
-        e.printStackTrace();
-      }
-    }
+    UtilTest.recursiveDelete(TEST_DIR);
+    assertTrue("Creating " + TEST_DIR, TEST_DIR.mkdirs());
+    createInput();
+    boolean mayExit = false;
+
+    // During tests, the default Configuration will use a local mapred
+    // So don't specify -config or -cluster
+    job = new StreamJob(genArgs(), mayExit);
+    int ret = job.go();
+    assertEquals(0, ret);
+    checkOutput();
   }
 
   public static void main(String[]args) throws Exception

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java Tue Jan 26 14:02:53 2010
@@ -22,6 +22,9 @@
 
 import org.apache.hadoop.mapred.Counters;
 
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 public class TestStreamingCombiner extends TestStreaming {
 
   protected String combine = StreamUtil.makeJavaCommand(UniqApp.class, new String[]{""});
@@ -41,7 +44,8 @@
     };
   }
 
-  public void testCommandLine() throws IOException {
+  @Test
+  public void testCommandLine() throws Exception  {
     super.testCommandLine();
     // validate combiner counters
     String counterGrp = "org.apache.hadoop.mapred.Task$Counter";

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCounters.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCounters.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCounters.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCounters.java Tue Jan 26 14:02:53 2010
@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.streaming;
 
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 import java.io.File;
 import java.io.IOException;
 
@@ -30,11 +33,11 @@
  * This class tests streaming counters in MapReduce local mode.
  */
 public class TestStreamingCounters extends TestStreaming {
-
   public TestStreamingCounters() throws IOException {
     super();
   }
 
+  @Test
   public void testCommandLine() throws IOException
   {
     try {

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingEmptyInpNonemptyOut.java Tue Jan 26 14:02:53 2010
@@ -18,7 +18,8 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+
 import java.io.*;
 
 import org.apache.hadoop.fs.FileUtil;
@@ -30,7 +31,7 @@
  * was hanging forever. Now this issue is solved. Similarly reducer is also
  * checked for task completion with empty input and nonempty output.
  */
-public class TestStreamingEmptyInpNonemptyOut extends TestCase
+public class TestStreamingEmptyInpNonemptyOut
 {
 
   protected File INPUT_FILE = new File("emptyInputFile.txt");
@@ -74,7 +75,8 @@
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
   }
-  
+
+  @Test
   public void testEmptyInputNonemptyOutput() throws IOException
   {
     try {

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java Tue Jan 26 14:02:53 2010
@@ -18,7 +18,10 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import org.junit.Before;
+import static org.junit.Assert.*;
+
 import java.io.*;
 import java.util.*;
 import org.apache.hadoop.conf.Configuration;
@@ -30,10 +33,12 @@
  * reducers have non-zero exit status and the
  * stream.non.zero.exit.status.is.failure jobconf is set.
  */
-public class TestStreamingExitStatus extends TestCase
+public class TestStreamingExitStatus
 {
-  protected File INPUT_FILE = new File("input.txt");
-  protected File OUTPUT_DIR = new File("out");  
+  protected File TEST_DIR =
+    new File("TestStreamingExitStatus").getAbsoluteFile();
+  protected File INPUT_FILE = new File(TEST_DIR, "input.txt");
+  protected File OUTPUT_DIR = new File(TEST_DIR, "out");
 
   protected String failingTask = StreamUtil.makeJavaCommand(FailApp.class, new String[]{"true"});
   protected String echoTask = StreamUtil.makeJavaCommand(FailApp.class, new String[]{"false"});
@@ -57,52 +62,48 @@
     };
   }
 
+  @Before
   public void setUp() throws IOException {
-    UtilTest.recursiveDelete(INPUT_FILE);
-    UtilTest.recursiveDelete(OUTPUT_DIR);
-    
+    UtilTest.recursiveDelete(TEST_DIR);
+    assertTrue(TEST_DIR.mkdirs());
+
     FileOutputStream out = new FileOutputStream(INPUT_FILE.getAbsoluteFile());
     out.write("hello\n".getBytes());
     out.close();
   }
 
-  public void runStreamJob(boolean exitStatusIsFailure, boolean failMap) {
-    try {
-      boolean mayExit = false;
-      int returnStatus = 0;
-
-      StreamJob job = new StreamJob(genArgs(exitStatusIsFailure, failMap), mayExit);
-      returnStatus = job.go();
-      
-      if (exitStatusIsFailure) {
-        assertEquals("Streaming Job failure code expected", /*job not successful:*/1, returnStatus);
-      } else {
-        assertEquals("Streaming Job expected to succeed", 0, returnStatus);
-      }
-    } catch (Exception e) {
-      failTrace(e);
+  public void runStreamJob(boolean exitStatusIsFailure, boolean failMap) throws Exception {
+    boolean mayExit = false;
+    int returnStatus = 0;
+
+    StreamJob job = new StreamJob(genArgs(exitStatusIsFailure, failMap), mayExit);
+    returnStatus = job.go();
+    
+    if (exitStatusIsFailure) {
+      assertEquals("Streaming Job failure code expected", /*job not successful:*/1, returnStatus);
+    } else {
+      assertEquals("Streaming Job expected to succeed", 0, returnStatus);
     }
   }
-  
-  public void testMapFailOk() {
+
+  @Test
+  public void testMapFailOk() throws Exception {
     runStreamJob(false, true);
   }
-  
-  public void testMapFailNotOk() {
+
+  @Test
+  public void testMapFailNotOk() throws Exception {
     runStreamJob(true, true);
   }
-  
-  public void testReduceFailOk() {
+
+  @Test
+  public void testReduceFailOk() throws Exception {
     runStreamJob(false, false);
   }
   
-  public void testReduceFailNotOk() {
+  @Test
+  public void testReduceFailNotOk() throws Exception {
     runStreamJob(true, false);
   }  
   
-  protected void failTrace(Exception e) {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
-  }
 }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingFailure.java Tue Jan 26 14:02:53 2010
@@ -18,7 +18,9 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 import java.io.*;
 import java.util.*;
 import org.apache.hadoop.conf.Configuration;
@@ -56,6 +58,7 @@
     };
   }
 
+  @Test
   public void testCommandLine()
   {
     try {

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingKeyValue.java Tue Jan 26 14:02:53 2010
@@ -18,16 +18,19 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 import java.io.*;
 
 import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.mapreduce.JobContext;
 
 /**
  * This class tests hadoopStreaming in MapReduce local mode.
  * This testcase looks at different cases of tab position in input. 
  */
-public class TestStreamingKeyValue extends TestCase
+public class TestStreamingKeyValue
 {
   protected File INPUT_FILE = new File("input.txt");
   protected File OUTPUT_DIR = new File("stream_out");
@@ -68,13 +71,14 @@
       "-input", INPUT_FILE.getAbsolutePath(),
       "-output", OUTPUT_DIR.getAbsolutePath(),
       "-mapper", "cat",
-      "-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
+      "-jobconf", JobContext.PRESERVE_FAILED_TASK_FILES + "=true", 
       "-jobconf", "stream.non.zero.exit.is.failure=true",
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
     };
   }
   
-  public void testCommandLine()
+  @Test
+  public void testCommandLine() throws Exception
   {
     String outFileName = "part-00000";
     File outFile = null;
@@ -96,25 +100,12 @@
       System.err.println("outEx1=" + outputExpect);
       System.err.println("  out1=" + output);
       assertEquals(outputExpect, output);
-    } catch(Exception e) {
-      failTrace(e);
     } finally {
-      try {
-        INPUT_FILE.delete();
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (IOException e) {
-        failTrace(e);
-      }
+      INPUT_FILE.delete();
+      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
     }
   }
 
-  private void failTrace(Exception e)
-  {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
-  }
-
   public static void main(String[]args) throws Exception
   {
     new TestStreamingKeyValue().testCommandLine();

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java Tue Jan 26 14:02:53 2010
@@ -18,7 +18,9 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 import java.io.*;
 import java.util.*;
 import org.apache.hadoop.conf.Configuration;
@@ -29,7 +31,7 @@
 /**
  * This class tests hadoopStreaming with customized separator in MapReduce local mode.
  */
-public class TestStreamingSeparator extends TestCase
+public class TestStreamingSeparator
 {
 
   // "map" command: grep -E (red|green|blue)
@@ -86,7 +88,8 @@
     };
   }
   
-  public void testCommandLine()
+  @Test
+  public void testCommandLine() throws Exception
   {
     try {
       try {
@@ -107,25 +110,12 @@
       System.err.println("outEx1=" + outputExpect);
       System.err.println("  out1=" + output);
       assertEquals(outputExpect, output);
-    } catch(Exception e) {
-      failTrace(e);
     } finally {
-      try {
-        INPUT_FILE.delete();
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (IOException e) {
-        failTrace(e);
-      }
+      INPUT_FILE.delete();
+      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
     }
   }
 
-  private void failTrace(Exception e)
-  {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
-  }
-
   public static void main(String[]args) throws Exception
   {
     new TestStreamingSeparator().testCommandLine();

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStatus.java Tue Jan 26 14:02:53 2010
@@ -22,7 +22,8 @@
 import java.io.IOException;
 import java.io.File;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -32,12 +33,13 @@
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
+
 /**
  * Tests for the ability of a streaming task to set the status
  * by writing "reporter:status:" lines to stderr. Uses MiniMR
  * since the local jobtracker doesn't track status.
  */
-public class TestStreamingStatus extends TestCase {
+public class TestStreamingStatus {
   private static String TEST_ROOT_DIR =
     new File(System.getProperty("test.build.data","/tmp"))
     .toURI().toString().replace(' ', '+');
@@ -78,6 +80,7 @@
     } catch (Exception e) {}
   }
   
+  @Test
   public void testStreamingStatus() throws Exception {
     MiniMRCluster mr = null;
     FileSystem fs = null;

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java Tue Jan 26 14:02:53 2010
@@ -18,19 +18,21 @@
 
 package org.apache.hadoop.streaming;
 
-import junit.framework.TestCase;
 import java.io.*;
 import java.util.*;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 /**
  * Test that streaming consumes stderr from the streaming process
  * (before, during, and after the main processing of mapred input),
  * and that stderr messages count as task progress.
  */
-public class TestStreamingStderr extends TestCase
+public class TestStreamingStderr
 {
   public TestStreamingStderr() throws IOException {
     UtilTest utilTest = new UtilTest(getClass().getName());
@@ -71,43 +73,38 @@
   }
 
   public void runStreamJob(String baseName, boolean hasInput,
-                           int preLines, int duringLines, int postLines) {
-    try {
-      File input = setupInput(baseName, hasInput);
-      File output = setupOutput(baseName);
-      boolean mayExit = false;
-      int returnStatus = 0;
-
-      StreamJob job = new StreamJob(genArgs(input, output, preLines, duringLines, postLines), mayExit);
-      returnStatus = job.go();
-      assertEquals("StreamJob success", 0, returnStatus);
-    } catch (Exception e) {
-      failTrace(e);
-    }
+                           int preLines, int duringLines, int postLines)
+    throws Exception {
+    File input = setupInput(baseName, hasInput);
+    File output = setupOutput(baseName);
+    boolean mayExit = false;
+    int returnStatus = 0;
+
+    StreamJob job = new StreamJob(genArgs(input, output, preLines, duringLines, postLines), mayExit);
+    returnStatus = job.go();
+    assertEquals("StreamJob success", 0, returnStatus);
   }
 
   // This test will fail by blocking forever if the stderr isn't
   // consumed by Hadoop for tasks that don't have any input.
-  public void testStderrNoInput() throws IOException {
+  @Test
+  public void testStderrNoInput() throws Exception {
     runStreamJob("stderr-pre", false, 10000, 0, 0);
   }
 
   // Streaming should continue to read stderr even after all input has
   // been consumed.
-  public void testStderrAfterOutput() throws IOException {
+  @Test
+  public void testStderrAfterOutput() throws Exception {
     runStreamJob("stderr-post", false, 0, 0, 10000);
   }
 
   // This test should produce a task timeout if stderr lines aren't
   // counted as progress. This won't actually work until
   // LocalJobRunner supports timeouts.
-  public void testStderrCountsAsProgress() throws IOException {
+  @Test
+  public void testStderrCountsAsProgress() throws Exception {
     runStreamJob("stderr-progress", true, 10, 1000, 0);
   }
   
-  protected void failTrace(Exception e) {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
-  }
 }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingTaskLog.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingTaskLog.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingTaskLog.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingTaskLog.java Tue Jan 26 14:02:53 2010
@@ -28,13 +28,15 @@
 import org.apache.hadoop.mapred.TestMiniMRWithDFS;
 import org.apache.hadoop.util.Shell;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
+
 
 /**
  * This tests the environment set by TT for the child of task jvm.
  * This will launch a streaming job with a shell script as mapper.
  */
-public class TestStreamingTaskLog extends TestCase {
+public class TestStreamingTaskLog {
   String input = "the dummy input";
   Path inputPath = new Path("inDir");
   Path outputPath = new Path("outDir");
@@ -65,6 +67,7 @@
    *  (b) hadoop.tasklog.totalLogFileSize
    * for the children of java tasks in streaming jobs.
    */
+  @Test
   public void testStreamingTaskLogWithHadoopCmd() {
     try {
       final int numSlaves = 1;

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java Tue Jan 26 14:02:53 2010
@@ -25,7 +25,8 @@
 import java.io.PrintWriter;
 import java.io.StringWriter;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -40,7 +41,7 @@
  * This test case tests the symlink creation
  * utility provided by distributed caching 
  */
-public class TestSymLink extends TestCase
+public class TestSymLink
 {
   String INPUT_FILE = "/testing-streaming/input.txt";
   String OUTPUT_DIR = "/testing-streaming/out";
@@ -52,92 +53,78 @@
   String cacheString = "This is just the cache string";
   StreamJob job;
 
-  public TestSymLink() throws IOException
-  {
-  }
-
-  public void testSymLink()
+  @Test
+  public void testSymLink() throws Exception
   {
+    boolean mayExit = false;
+    MiniMRCluster mr = null;
+    MiniDFSCluster dfs = null; 
     try {
-      boolean mayExit = false;
-      MiniMRCluster mr = null;
-      MiniDFSCluster dfs = null; 
-      try{
-        Configuration conf = new Configuration();
-        dfs = new MiniDFSCluster(conf, 1, true, null);
-        FileSystem fileSys = dfs.getFileSystem();
-        String namenode = fileSys.getUri().toString();
-        mr  = new MiniMRCluster(1, namenode, 3);
-        // During tests, the default Configuration will use a local mapred
-        // So don't specify -config or -cluster
-        String strJobtracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
-        String strNamenode = "fs.default.name=" + namenode;
-        String argv[] = new String[] {
-          "-input", INPUT_FILE,
-          "-output", OUTPUT_DIR,
-          "-mapper", map,
-          "-reducer", reduce,
-          //"-verbose",
-          //"-jobconf", "stream.debug=set"
-          "-jobconf", strNamenode,
-          "-jobconf", strJobtracker,
-          "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
-          "-jobconf", 
-            JobConf.MAPRED_MAP_TASK_JAVA_OPTS+ "=" +
-              "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
-              "-Dbuild.test=" + System.getProperty("build.test") + " " +
-              conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS, 
-                       conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
-          "-jobconf", 
-            JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS+ "=" +
-              "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
-              "-Dbuild.test=" + System.getProperty("build.test") + " " +
-              conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, 
-                       conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
-          "-cacheFile", fileSys.getUri() + CACHE_FILE + "#testlink"
-        };
+      Configuration conf = new Configuration();
+      dfs = new MiniDFSCluster(conf, 1, true, null);
+      FileSystem fileSys = dfs.getFileSystem();
+      String namenode = fileSys.getUri().toString();
+      mr  = new MiniMRCluster(1, namenode, 3);
+      // During tests, the default Configuration will use a local mapred
+      // So don't specify -config or -cluster
+      String strJobtracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.getJobTrackerPort();
+      String strNamenode = "fs.default.name=" + namenode;
+      String argv[] = new String[] {
+        "-input", INPUT_FILE,
+        "-output", OUTPUT_DIR,
+        "-mapper", map,
+        "-reducer", reduce,
+        //"-verbose",
+        //"-jobconf", "stream.debug=set"
+        "-jobconf", strNamenode,
+        "-jobconf", strJobtracker,
+        "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
+        "-jobconf", 
+          JobConf.MAPRED_MAP_TASK_JAVA_OPTS+ "=" +
+            "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
+            "-Dbuild.test=" + System.getProperty("build.test") + " " +
+            conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS, 
+                     conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
+        "-jobconf", 
+          JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS+ "=" +
+            "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
+            "-Dbuild.test=" + System.getProperty("build.test") + " " +
+            conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, 
+                     conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
+        "-cacheFile", fileSys.getUri() + CACHE_FILE + "#testlink"
+      };
 
-        fileSys.delete(new Path(OUTPUT_DIR), true);
+      fileSys.delete(new Path(OUTPUT_DIR), true);
+      
+      DataOutputStream file = fileSys.create(new Path(INPUT_FILE));
+      file.writeBytes(mapString);
+      file.close();
+      file = fileSys.create(new Path(CACHE_FILE));
+      file.writeBytes(cacheString);
+      file.close();
         
-        DataOutputStream file = fileSys.create(new Path(INPUT_FILE));
-        file.writeBytes(mapString);
-        file.close();
-        file = fileSys.create(new Path(CACHE_FILE));
-        file.writeBytes(cacheString);
-        file.close();
-          
-        job = new StreamJob(argv, mayExit);      
-        job.go();
-
-        fileSys = dfs.getFileSystem();
-        String line = null;
-        Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
-                                                new Path(OUTPUT_DIR),
-                                                new Utils.OutputFileUtils
-                                                         .OutputFilesFilter()));
-        for (int i = 0; i < fileList.length; i++){
-          System.out.println(fileList[i].toString());
-          BufferedReader bread =
-            new BufferedReader(new InputStreamReader(fileSys.open(fileList[i])));
-          line = bread.readLine();
-          System.out.println(line);
-        }
-        assertEquals(cacheString + "\t", line);
-      } finally{
-        if (dfs != null) { dfs.shutdown(); }
-        if (mr != null) { mr.shutdown();}
+      job = new StreamJob(argv, mayExit);      
+      job.go();
+
+      fileSys = dfs.getFileSystem();
+      String line = null;
+      Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus(
+                                              new Path(OUTPUT_DIR),
+                                              new Utils.OutputFileUtils
+                                                       .OutputFilesFilter()));
+      for (int i = 0; i < fileList.length; i++){
+        System.out.println(fileList[i].toString());
+        BufferedReader bread =
+          new BufferedReader(new InputStreamReader(fileSys.open(fileList[i])));
+        line = bread.readLine();
+        System.out.println(line);
       }
-      
-    } catch(Exception e) {
-      failTrace(e);
+      assertEquals(cacheString + "\t", line);
+    } finally{
+      if (dfs != null) { dfs.shutdown(); }
+      if (mr != null) { mr.shutdown();}
     }
-  }
-
-  void failTrace(Exception e)
-  {
-    StringWriter sw = new StringWriter();
-    e.printStackTrace(new PrintWriter(sw));
-    fail(sw.toString());
+    
   }
 
   public static void main(String[]args) throws Exception

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java Tue Jan 26 14:02:53 2010
@@ -26,9 +26,12 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 
-import junit.framework.TestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestTypedBytesStreaming extends TestCase {
+public class TestTypedBytesStreaming {
 
   protected File INPUT_FILE = new File("input.txt");
   protected File OUTPUT_DIR = new File("out");
@@ -60,33 +63,29 @@
       "-io", "typedbytes"
     };
   }
-  
+
+  @Before
+  @After
+  public void cleanupOutput() throws Exception {
+    FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
+    INPUT_FILE.delete();
+    createInput();
+  }
+
+  @Test
   public void testCommandLine() throws Exception {
-    try {
-      try {
-        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-      } catch (Exception e) {
-      }
-
-      createInput();
-      OUTPUT_DIR.delete();
-
-      // During tests, the default Configuration will use a local mapred
-      // So don't specify -config or -cluster
-      StreamJob job = new StreamJob();
-      job.setConf(new Configuration());
-      job.run(genArgs());
-      File outFile = new File(OUTPUT_DIR, "part-00000").getAbsoluteFile();
-      String output = StreamUtil.slurp(outFile);
-      outFile.delete();
-      System.out.println("   map=" + map);
-      System.out.println("reduce=" + reduce);
-      System.err.println("outEx1=" + outputExpect);
-      System.err.println("  out1=" + output);
-      assertEquals(outputExpect, output);
-    } finally {
-      INPUT_FILE.delete();
-      FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
-    }
+    // During tests, the default Configuration will use a local mapred
+    // So don't specify -config or -cluster
+    StreamJob job = new StreamJob();
+    job.setConf(new Configuration());
+    job.run(genArgs());
+    File outFile = new File(OUTPUT_DIR, "part-00000").getAbsoluteFile();
+    String output = StreamUtil.slurp(outFile);
+    outFile.delete();
+    System.out.println("   map=" + map);
+    System.out.println("reduce=" + reduce);
+    System.err.println("outEx1=" + outputExpect);
+    System.err.println("  out1=" + output);
+    assertEquals(outputExpect, output);
   }
 }

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java Tue Jan 26 14:02:53 2010
@@ -31,7 +31,8 @@
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.util.StringUtils;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
 /**
  * This tests the setting of memory limit for streaming processes.
@@ -40,7 +41,7 @@
  * it to succeed. Then program is launched with insufficient memory and 
  * is expected to be a failure.  
  */
-public class TestUlimit extends TestCase {
+public class TestUlimit {
   String input = "the dummy input";
   Path inputPath = new Path("/testing/in");
   Path outputPath = new Path("/testing/out");
@@ -75,6 +76,7 @@
    * it to succeed. Then program is launched with insufficient memory and 
    * is expected to be a failure.  
    */
+  @Test
   public void testCommandLine() {
     if (StreamUtil.isCygwin()) {
       return;

Modified: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/TestIO.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/TestIO.java?rev=903227&r1=903226&r2=903227&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/TestIO.java (original)
+++ hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/TestIO.java Tue Jan 26 14:02:53 2010
@@ -49,21 +49,34 @@
 import org.apache.hadoop.record.RecRecord0;
 import org.apache.hadoop.record.RecRecord1;
 
-import junit.framework.TestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
-public class TestIO extends TestCase {
+public class TestIO {
 
   private File tmpfile;
+  private File tmpdir;
 
-  protected void setUp() throws Exception {
-    this.tmpfile = new File(System.getProperty("test.build.data", "/tmp"),
-      "typedbytes.bin");
+  @Before
+  public void setUp() throws Exception {
+    this.tmpdir = new File(System.getProperty("test.build.data", "/tmp"));
+    if(this.tmpdir.exists() || this.tmpdir.mkdirs()) {
+      this.tmpfile = new File(this.tmpdir, 
+        "typedbytes.bin");
+    } else {
+      throw new IOException("Failed to create directory " + tmpdir.getAbsolutePath());	
+    }
   }
 
-  protected void tearDown() throws Exception {
+  @After
+  public void tearDown() throws Exception {
     tmpfile.delete();
   }
 
+  @Test
   public void testIO() throws IOException {
     ArrayList<Object> vector = new ArrayList<Object>();
     vector.add("test");
@@ -121,6 +134,7 @@
     istream.close();
   }
 
+  @Test
   public void testCustomTypesIO() throws IOException {
     byte[] rawBytes = new byte[] { 100, 0, 0, 0, 3, 1, 2, 3 };
     
@@ -139,6 +153,7 @@
     istream.close();
   }
   
+  @Test
   public void testRecordIO() throws IOException {
     RecRecord1 r1 = new RecRecord1();
     r1.setBoolVal(true);
@@ -172,6 +187,7 @@
     assertEquals(r1, r2);
   }
 
+  @Test
   public void testWritableIO() throws IOException {
     Writable[] vectorValues = new Writable[] {
       new Text("test1"), new Text("test2"), new Text("test3")

Propchange: hadoop/mapreduce/branches/MAPREDUCE-233/src/contrib/vaidya/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Jan 26 14:02:53 2010
@@ -1,3 +1,3 @@
 /hadoop/core/branches/branch-0.19/mapred/src/contrib/vaidya:713112
 /hadoop/core/trunk/src/contrib/vaidya:776175-786373
-/hadoop/mapreduce/trunk/src/contrib/vaidya:804974-885774
+/hadoop/mapreduce/trunk/src/contrib/vaidya:804974-903221



Mime
View raw message