hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tomwh...@apache.org
Subject svn commit: r1027685 - in /hadoop/mapreduce/trunk: ./ src/contrib/streaming/src/java/org/apache/hadoop/streaming/ src/contrib/streaming/src/test/org/apache/hadoop/streaming/
Date Tue, 26 Oct 2010 19:04:24 GMT
Author: tomwhite
Date: Tue Oct 26 19:04:23 2010
New Revision: 1027685

URL: http://svn.apache.org/viewvc?rev=1027685&view=rev
Log:
MAPREDUCE-1867. Remove unused methods in org.apache.hadoop.streaming.StreamUtil. Contributed
by Amareshwari Sriramadasu.

Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/Environment.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PathFinder.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBackground.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/UtilTest.java

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Tue Oct 26 19:04:23 2010
@@ -347,6 +347,9 @@ Trunk (unreleased changes)
     MAPREDUCE-2143.  HarFileSystem is able to handle spaces in pathnames.
     (Ramkumar Vadali via dhruba)
 
+    MAPREDUCE-1867.  Remove unused methods in
+    org.apache.hadoop.streaming.StreamUtil.  (amareshwari via tomwhite)
+
 Release 0.21.1 - Unreleased
 
   NEW FEATURES

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/Environment.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/Environment.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/Environment.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/Environment.java
Tue Oct 26 19:04:23 2010
@@ -22,14 +22,19 @@ import java.io.*;
 import java.net.InetAddress;
 import java.util.*;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * This is a class used to get the current environment
  * on the host machines running the map/reduce. This class
  * assumes that setting the environment in streaming is 
  * allowed on windows/ix/linuz/freebsd/sunos/solaris/hp-ux
  */
+@InterfaceAudience.Private
 public class Environment extends Properties {
 
+  private static final long serialVersionUID = 1L;
+
   public Environment() throws IOException {
     // Extend this code to fit all operating
     // environments that you expect to run in
@@ -78,7 +83,7 @@ public class Environment extends Propert
   // to be used with Runtime.exec(String[] cmdarray, String[] envp) 
   String[] toArray() {
     String[] arr = new String[super.size()];
-    Enumeration it = super.keys();
+    Enumeration<Object> it = super.keys();
     int i = -1;
     while (it.hasMoreElements()) {
       String key = (String) it.nextElement();

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PathFinder.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PathFinder.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PathFinder.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PathFinder.java
Tue Oct 26 19:04:23 2010
@@ -19,36 +19,32 @@
 package org.apache.hadoop.streaming;
 
 import java.io.*;
-import java.util.*;
-import java.util.Map.Entry;
+
+import org.apache.hadoop.classification.InterfaceAudience;
 
 /**
- * Maps a relative pathname to an absolute pathname using the
- * PATH enviroment.
+ * Maps a relative pathname to an absolute pathname using the PATH environment.
  */
-public class PathFinder
-{
-  String pathenv;        // a string of pathnames
-  String pathSep;        // the path seperator
-  String fileSep;        // the file seperator in a directory
+@InterfaceAudience.Private
+public class PathFinder {
+  String pathenv; // a string of pathnames
+  String pathSep; // the path separator
+  String fileSep; // the file separator in a directory
 
   /**
-   * Construct a PathFinder object using the path from
-   * java.class.path
+   * Construct a PathFinder object using the path from java.class.path
    */
-  public PathFinder()
-  {
+  public PathFinder() {
     pathenv = System.getProperty("java.class.path");
     pathSep = System.getProperty("path.separator");
     fileSep = System.getProperty("file.separator");
   }
 
   /**
-   * Construct a PathFinder object using the path from
-   * the specified system environment variable.
+   * Construct a PathFinder object using the path from the specified system
+   * environment variable.
    */
-  public PathFinder(String envpath)
-  {
+  public PathFinder(String envpath) {
     pathenv = System.getenv(envpath);
     pathSep = System.getProperty("path.separator");
     fileSep = System.getProperty("file.separator");
@@ -57,85 +53,45 @@ public class PathFinder
   /**
    * Appends the specified component to the path list
    */
-  public void prependPathComponent(String str)
-  {
+  public void prependPathComponent(String str) {
     pathenv = str + pathSep + pathenv;
   }
 
   /**
-   * Returns the full path name of this file if it is listed in the
-   * path
+   * Returns the full path name of this file if it is listed in the path
    */
-  public File getAbsolutePath(String filename)
-  {
-    if (pathenv == null || pathSep == null  || fileSep == null)
-      {
-        return null;
-      }
-    int     val = -1;
-    String    classvalue = pathenv + pathSep;
+  public File getAbsolutePath(String filename) {
+    if (pathenv == null || pathSep == null || fileSep == null) {
+      return null;
+    }
+    int val = -1;
+    String classvalue = pathenv + pathSep;
 
-    while (((val = classvalue.indexOf(pathSep)) >= 0) &&
-           classvalue.length() > 0) {
-      //
+    while (((val = classvalue.indexOf(pathSep)) >= 0)
+        && classvalue.length() > 0) {
       // Extract each entry from the pathenv
-      //
       String entry = classvalue.substring(0, val).trim();
       File f = new File(entry);
 
-      try {
-        if (f.isDirectory()) {
-          //
-          // this entry in the pathenv is a directory.
-          // see if the required file is in this directory
-          //
-          f = new File(entry + fileSep + filename);
-        }
-        //
-        // see if the filename matches and  we can read it
-        //
-        if (f.isFile() && f.canRead()) {
-          return f;
-        }
-      } catch (Exception exp){ }
-      classvalue = classvalue.substring(val+1).trim();
+      if (f.isDirectory()) {
+        // this entry in the pathenv is a directory.
+        // see if the required file is in this directory
+        f = new File(entry + fileSep + filename);
+      }
+      // see if the filename matches and we can read it
+      if (f.isFile() && f.canRead()) {
+        return f;
+      }
+      classvalue = classvalue.substring(val + 1).trim();
     }
     return null;
   }
 
-  /**
-   * prints all environment variables for this process
-   */
-  private static void printEnvVariables() {
-    System.out.println("Environment Variables: ");
-    Map<String,String> map = System.getenv();
-    Set<Entry<String, String>> entrySet = map.entrySet();
-    for(Entry<String, String> entry : entrySet) {
-      System.out.println(entry.getKey() + " = " + entry.getValue());
-    }
-  }
-
-  /**
-   * prints all system properties for this process
-   */
-  private static void printSystemProperties() {
-    System.out.println("System properties: ");
-    java.util.Properties p = System.getProperties();
-    java.util.Enumeration keys = p.keys();
-    while(keys.hasMoreElements()) {
-      String thiskey = (String)keys.nextElement();
-      String value = p.getProperty(thiskey);
-      System.out.println(thiskey + " = " + value);
-    }
-  }
-
   public static void main(String args[]) throws IOException {
-
     if (args.length < 1) {
       System.out.println("Usage: java PathFinder <filename>");
       System.exit(1);
     }
-
     PathFinder finder = new PathFinder("PATH");
     File file = finder.getAbsolutePath(args[0]);
     if (file != null) {

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
Tue Oct 26 19:04:23 2010
@@ -233,7 +233,6 @@ public abstract class PipeMapRed {
       LOG.info("JobConf set minRecWrittenToEnableSkip_ ="
           + minRecWrittenToEnableSkip_);
     }
-    taskId_ = StreamUtil.getTaskInfo(job_);
   }
 
   void addJobConfToEnvironment(JobConf conf, Properties env) {
@@ -611,7 +610,6 @@ public abstract class PipeMapRed {
 
   // set in PipeMapper/PipeReducer subclasses
   int numExceptions_;
-  StreamUtil.TaskId taskId_;
 
   protected volatile Throwable outerrThreadsThrowable;
 

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
Tue Oct 26 19:04:23 2010
@@ -66,6 +66,7 @@ import org.apache.hadoop.streaming.io.In
 import org.apache.hadoop.streaming.io.OutputReader;
 import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.RunJar;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 
@@ -991,7 +992,7 @@ public class StreamJob implements Tool {
     if (jar_ != null && isLocalHadoop()) {
       // getAbs became required when shell and subvm have different working dirs...
       File wd = new File(".").getAbsoluteFile();
-      StreamUtil.unJar(new File(jar_), wd);
+      RunJar.unJar(new File(jar_), wd);
     }
 
     // if jobConf_ changes must recreate a JobClient

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
Tue Oct 26 19:04:23 2010
@@ -18,35 +18,23 @@
 
 package org.apache.hadoop.streaming;
 
-import java.text.DecimalFormat;
 import java.io.*;
 import java.net.*;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Enumeration;
-import java.util.Iterator;
-import java.util.List;
-import java.util.jar.*;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.mapred.FileSplit;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 
-/** Utilities not available elsewhere in Hadoop.
- *  
+/** 
+ * Utilities used in streaming
  */
+@InterfaceAudience.Private
 public class StreamUtil {
 
-  private static final Log LOG = 
-    LogFactory.getLog(StreamUtil.class.getName());
-  
   /** It may seem strange to silently switch behaviour when a String
    * is not a classname; the reason is simplified Usage:<pre>
    * -mapper [classname | program ]
@@ -114,166 +102,6 @@ public class StreamUtil {
     return codePath;
   }
 
-  // copied from TaskRunner  
-  static void unJar(File jarFile, File toDir) throws IOException {
-    JarFile jar = new JarFile(jarFile);
-    try {
-      Enumeration entries = jar.entries();
-      while (entries.hasMoreElements()) {
-        JarEntry entry = (JarEntry) entries.nextElement();
-        if (!entry.isDirectory()) {
-          InputStream in = jar.getInputStream(entry);
-          try {
-            File file = new File(toDir, entry.getName());
-            boolean b = file.getParentFile().mkdirs();
-            if (!b) { LOG.warn("Ignoring failure of mkdirs"); }
-            OutputStream out = new FileOutputStream(file);
-            try {
-              byte[] buffer = new byte[8192];
-              int i;
-              while ((i = in.read(buffer)) != -1) {
-                out.write(buffer, 0, i);
-              }
-            } finally {
-              out.close();
-            }
-          } finally {
-            in.close();
-          }
-        }
-      }
-    } finally {
-      jar.close();
-    }
-  }
-
-  final static long KB = 1024L * 1;
-  final static long MB = 1024L * KB;
-  final static long GB = 1024L * MB;
-  final static long TB = 1024L * GB;
-  final static long PB = 1024L * TB;
-
-  static DecimalFormat dfm = new DecimalFormat("####.000");
-  static DecimalFormat ifm = new DecimalFormat("###,###,###,###,###");
-
-  public static String dfmt(double d) {
-    return dfm.format(d);
-  }
-
-  public static String ifmt(double d) {
-    return ifm.format(d);
-  }
-
-  public static String formatBytes(long numBytes) {
-    StringBuffer buf = new StringBuffer();
-    boolean bDetails = true;
-    double num = numBytes;
-
-    if (numBytes < KB) {
-      buf.append(numBytes).append(" B");
-      bDetails = false;
-    } else if (numBytes < MB) {
-      buf.append(dfmt(num / KB)).append(" KB");
-    } else if (numBytes < GB) {
-      buf.append(dfmt(num / MB)).append(" MB");
-    } else if (numBytes < TB) {
-      buf.append(dfmt(num / GB)).append(" GB");
-    } else if (numBytes < PB) {
-      buf.append(dfmt(num / TB)).append(" TB");
-    } else {
-      buf.append(dfmt(num / PB)).append(" PB");
-    }
-    if (bDetails) {
-      buf.append(" (").append(ifmt(numBytes)).append(" bytes)");
-    }
-    return buf.toString();
-  }
-
-  public static String formatBytes2(long numBytes) {
-    StringBuffer buf = new StringBuffer();
-    long u = 0;
-    if (numBytes >= TB) {
-      u = numBytes / TB;
-      numBytes -= u * TB;
-      buf.append(u).append(" TB ");
-    }
-    if (numBytes >= GB) {
-      u = numBytes / GB;
-      numBytes -= u * GB;
-      buf.append(u).append(" GB ");
-    }
-    if (numBytes >= MB) {
-      u = numBytes / MB;
-      numBytes -= u * MB;
-      buf.append(u).append(" MB ");
-    }
-    if (numBytes >= KB) {
-      u = numBytes / KB;
-      buf.append(u).append(" KB ");
-    }
-    buf.append(u).append(" B"); //even if zero
-    return buf.toString();
-  }
-
-  static Environment env;
-  static String HOST;
-
-  static {
-    try {
-      env = new Environment();
-      HOST = env.getHost();
-    } catch (IOException io) {
-      io.printStackTrace();
-    }
-  }
-
-  static class StreamConsumer extends Thread {
-
-    StreamConsumer(InputStream in, OutputStream out) {
-      this.bin = new LineNumberReader(new BufferedReader(new InputStreamReader(in)));
-      if (out != null) {
-        this.bout = new DataOutputStream(out);
-      }
-    }
-
-    public void run() {
-      try {
-        String line;
-        while ((line = bin.readLine()) != null) {
-          if (bout != null) {
-            bout.writeUTF(line); //writeChars
-            bout.writeChar('\n');
-          }
-        }
-        bout.flush();
-      } catch (IOException io) {
-      }
-    }
-
-    LineNumberReader bin;
-    DataOutputStream bout;
-  }
-
-  static void exec(String arg, PrintStream log) {
-    exec(new String[] { arg }, log);
-  }
-
-  static void exec(String[] args, PrintStream log) {
-    try {
-      log.println("Exec: start: " + Arrays.asList(args));
-      Process proc = Runtime.getRuntime().exec(args);
-      new StreamConsumer(proc.getErrorStream(), log).start();
-      new StreamConsumer(proc.getInputStream(), log).start();
-      int status = proc.waitFor();
-      //if status != 0
-      log.println("Exec: status=" + status + ": " + Arrays.asList(args));
-    } catch (InterruptedException in) {
-      in.printStackTrace();
-    } catch (IOException io) {
-      io.printStackTrace();
-    }
-  }
-
   static String qualifyHost(String url) {
     try {
       return qualifyHost(new URL(url)).toString();
@@ -308,15 +136,6 @@ public class StreamUtil {
     return buf.toString();
   }
 
-  public static String safeGetCanonicalPath(File f) {
-    try {
-      String s = f.getCanonicalPath();
-      return (s == null) ? f.toString() : s;
-    } catch (IOException io) {
-      return f.toString();
-    }
-  }
-
   static String slurp(File f) throws IOException {
     int len = (int) f.length();
     byte[] buf = new byte[len];
@@ -345,165 +164,31 @@ public class StreamUtil {
     return contents;
   }
 
-  public static String rjustify(String s, int width) {
-    if (s == null) s = "null";
-    if (width > s.length()) {
-      s = getSpace(width - s.length()) + s;
-    }
-    return s;
-  }
-
-  public static String ljustify(String s, int width) {
-    if (s == null) s = "null";
-    if (width > s.length()) {
-      s = s + getSpace(width - s.length());
-    }
-    return s;
-  }
+  static private Environment env;
+  static String HOST;
 
-  static char[] space;
   static {
-    space = new char[300];
-    Arrays.fill(space, '\u0020');
-  }
-
-  public static String getSpace(int len) {
-    if (len > space.length) {
-      space = new char[Math.max(len, 2 * space.length)];
-      Arrays.fill(space, '\u0020');
+    try {
+      env = new Environment();
+      HOST = env.getHost();
+    } catch (IOException io) {
+      io.printStackTrace();
     }
-    return new String(space, 0, len);
   }
 
-  static private Environment env_;
-
   static Environment env() {
-    if (env_ != null) {
-      return env_;
+    if (env != null) {
+      return env;
     }
     try {
-      env_ = new Environment();
+      env = new Environment();
     } catch (IOException io) {
       io.printStackTrace();
     }
-    return env_;
-  }
-
-  public static String makeJavaCommand(Class main, String[] argv) {
-    ArrayList vargs = new ArrayList();
-    File javaHomeBin = new File(System.getProperty("java.home"), "bin");
-    File jvm = new File(javaHomeBin, "java");
-    vargs.add(jvm.toString());
-    // copy parent classpath
-    vargs.add("-classpath");
-    vargs.add("\"" + System.getProperty("java.class.path") + "\"");
-
-    // add heap-size limit
-    vargs.add("-Xmx" + Runtime.getRuntime().maxMemory());
-
-    // Add main class and its arguments
-    vargs.add(main.getName());
-    for (int i = 0; i < argv.length; i++) {
-      vargs.add(argv[i]);
-    }
-    return collate(vargs, " ");
-  }
-
-  public static String collate(Object[] args, String sep) {
-    return collate(Arrays.asList(args), sep);
-  }
-
-  public static String collate(List args, String sep) {
-    StringBuffer buf = new StringBuffer();
-    Iterator it = args.iterator();
-    while (it.hasNext()) {
-      if (buf.length() > 0) {
-        buf.append(" ");
-      }
-      buf.append(it.next());
-    }
-    return buf.toString();
-  }
-
-  // JobConf helpers
-
-  public static FileSplit getCurrentSplit(JobConf job) {
-    String path = job.get(MRJobConfig.MAP_INPUT_FILE);
-    if (path == null) {
-      return null;
-    }
-    Path p = new Path(path);
-    long start = Long.parseLong(job.get(MRJobConfig.MAP_INPUT_START));
-    long length = Long.parseLong(job.get(MRJobConfig.MAP_INPUT_PATH));
-    return new FileSplit(p, start, length, job);
-  }
-
-  static class TaskId {
-
-    boolean mapTask;
-    String jobid;
-    int taskid;
-    int execid;
+    return env;
   }
 
   public static boolean isLocalJobTracker(JobConf job) {
     return job.get(JTConfig.JT_IPC_ADDRESS, "local").equals("local");
   }
-
-  public static TaskId getTaskInfo(JobConf job) {
-    TaskId res = new TaskId();
-
-    String id = job.get(MRJobConfig.TASK_ATTEMPT_ID);
-    if (isLocalJobTracker(job)) {
-      // it uses difft naming 
-      res.mapTask = job.getBoolean(MRJobConfig.TASK_ISMAP, true);
-      res.jobid = "0";
-      res.taskid = 0;
-      res.execid = 0;
-    } else {
-      String[] e = id.split("_");
-      res.mapTask = e[3].equals("m");
-      res.jobid = e[1] + "_" + e[2];
-      res.taskid = Integer.parseInt(e[4]);
-      res.execid = Integer.parseInt(e[5]);
-    }
-    return res;
-  }
-
-  public static void touch(File file) throws IOException {
-    file = file.getAbsoluteFile();
-    FileOutputStream out = new FileOutputStream(file);
-    out.close();
-    if (!file.exists()) {
-      throw new IOException("touch failed: " + file);
-    }
-  }
-
-  public static boolean isCygwin() {
-    String OS = System.getProperty("os.name");
-    return (OS.indexOf("Windows") > -1);
-  }
-
-  public static String localizeBin(String path) {
-    if (isCygwin()) {
-      path = "C:/cygwin/" + path;
-    }
-    return path;
-  }
-  
-  /** @param name foo where &lt;junit>&lt;sysproperty key="foo" value="${foo}"/>

-   * If foo is undefined then Ant sets the unevaluated value. 
-   * Take this into account when setting defaultVal. */
-  public static String getBoundAntProperty(String name, String defaultVal)
-  {
-    String val = System.getProperty(name);
-    if (val != null && val.indexOf("${") >= 0) {
-      val = null;
-    }
-    if (val == null) {
-      val = defaultVal;
-    }
-    return val;
-  }
-
 }

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestRawBytesStreaming.java
Tue Oct 26 19:04:23 2010
@@ -34,8 +34,8 @@ public class TestRawBytesStreaming {
   protected File INPUT_FILE = new File("input.txt");
   protected File OUTPUT_DIR = new File("out");
   protected String input = "roses.are.red\nviolets.are.blue\nbunnies.are.pink\n";
-  protected String map = StreamUtil.makeJavaCommand(RawBytesMapApp.class, new String[]{"."});
-  protected String reduce = StreamUtil.makeJavaCommand(RawBytesReduceApp.class, new String[0]);
+  protected String map = UtilTest.makeJavaCommand(RawBytesMapApp.class, new String[]{"."});
+  protected String reduce = UtilTest.makeJavaCommand(RawBytesReduceApp.class, new String[0]);
   protected String outputExpect = "are\t3\nblue\t1\nbunnies\t1\npink\t1\nred\t1\nroses\t1\nviolets\t1\n";
   
   public TestRawBytesStreaming() throws IOException {

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamAggregate.java
Tue Oct 26 19:04:23 2010
@@ -36,7 +36,7 @@ public class TestStreamAggregate
   protected File OUTPUT_DIR = new File("stream_aggregate_out");
   protected String input = "roses are red\nviolets are blue\nbunnies are pink\n";
   // map parses input lines and generates count entries for each word.
-  protected String map = StreamUtil.makeJavaCommand(StreamAggregate.class, new String[]{".",
"\\n"});
+  protected String map = UtilTest.makeJavaCommand(StreamAggregate.class, new String[]{".",
"\\n"});
   // Use the aggregate combine, reducei to aggregate the counts
   protected String outputExpect = "are\t3\nblue\t1\nbunnies\t1\npink\t1\nred\t1\nroses\t1\nviolets\t1\n";
 

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamDataProtocol.java
Tue Oct 26 19:04:23 2010
@@ -41,7 +41,7 @@ public class TestStreamDataProtocol
   protected File OUTPUT_DIR = new File("out_for_data_protocol_test");
   protected String input = "roses.smell.good\nroses.look.good\nroses.need.care\nroses.attract.bees\nroses.are.red\nroses.are.not.blue\nbunnies.are.pink\nbunnies.run.fast\nbunnies.have.short.tail\nbunnies.have.long.ears\n";
   // map behaves like "/usr/bin/cat"; 
-  protected String map = StreamUtil.makeJavaCommand(TrApp.class, new String[]{".", "."});
+  protected String map = UtilTest.makeJavaCommand(TrApp.class, new String[]{".", "."});
   // reduce counts the number of values for each key
   protected String reduce = "org.apache.hadoop.streaming.ValueCountReduce";
   protected String outputExpect = "bunnies.are\t1\nbunnies.have\t2\nbunnies.run\t1\nroses.are\t2\nroses.attract\t1\nroses.look\t1\nroses.need\t1\nroses.smell\t1\n";

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamReduceNone.java
Tue Oct 26 19:04:23 2010
@@ -40,7 +40,7 @@ public class TestStreamReduceNone
   protected File OUTPUT_DIR = new File("stream_reduce_none_out");
   protected String input = "roses.are.red\nviolets.are.blue\nbunnies.are.pink\n";
   // map parses input lines and generates count entries for each word.
-  protected String map = StreamUtil.makeJavaCommand(TrApp.class, new String[]{".", "\\n"});
+  protected String map = UtilTest.makeJavaCommand(TrApp.class, new String[]{".", "\\n"});
   protected String outputExpect = "roses\t\nare\t\nred\t\nviolets\t\nare\t\nblue\t\nbunnies\t\nare\t\npink\t\n";
 
   private StreamJob job;

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreaming.java
Tue Oct 26 19:04:23 2010
@@ -46,10 +46,10 @@ public class TestStreaming
   protected String outDir;
   protected String input = "roses.are.red\nviolets.are.blue\nbunnies.are.pink\n";
   // map behaves like "/usr/bin/tr . \\n"; (split words into lines)
-  protected String map = StreamUtil.makeJavaCommand(TrApp.class, new String[]{".", "\\n"});
+  protected String map = UtilTest.makeJavaCommand(TrApp.class, new String[]{".", "\\n"});
   // reduce behave like /usr/bin/uniq. But also prepend lines with R.
   // command-line combiner does not have any effect any more.
-  protected String reduce = StreamUtil.makeJavaCommand(UniqApp.class, new String[]{"R"});
+  protected String reduce = UtilTest.makeJavaCommand(UniqApp.class, new String[]{"R"});
   protected String outputExpect = "Rare\t\nRblue\t\nRbunnies\t\nRpink\t\nRred\t\nRroses\t\nRviolets\t\n";
 
   protected ArrayList<String> args = new ArrayList<String>();

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingAsDifferentUser.java
Tue Oct 26 19:04:23 2010
@@ -45,9 +45,9 @@ public class TestStreamingAsDifferentUse
   private Path outputPath = new Path("output");
   private String input = "roses.are.red\nviolets.are.blue\nbunnies.are.pink\n";
   private String map =
-      StreamUtil.makeJavaCommand(TrApp.class, new String[] { ".", "\\n" });
+      UtilTest.makeJavaCommand(TrApp.class, new String[] { ".", "\\n" });
   private String reduce =
-      StreamUtil.makeJavaCommand(UniqApp.class, new String[] { "R" });
+      UtilTest.makeJavaCommand(UniqApp.class, new String[] { "R" });
 
   public void testStreaming()
       throws Exception {

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBackground.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBackground.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBackground.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBackground.java
Tue Oct 26 19:04:23 2010
@@ -18,15 +18,15 @@
 
 package org.apache.hadoop.streaming;
 
-import org.junit.Test;
-import org.junit.Before;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
 
-import java.io.*;
-import java.util.*;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
+import org.junit.Before;
+import org.junit.Test;
 
 /**
  * This class tests if hadoopStreaming background works fine. A DelayEchoApp
@@ -38,7 +38,7 @@ public class TestStreamingBackground {
   protected File INPUT_FILE = new File(TEST_DIR, "input.txt");
   protected File OUTPUT_DIR = new File(TEST_DIR, "out");
 
-  protected String tenSecondsTask = StreamUtil.makeJavaCommand(
+  protected String tenSecondsTask = UtilTest.makeJavaCommand(
       DelayEchoApp.class, new String[] { "10" });
 
   public TestStreamingBackground() throws IOException {

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingBadRecords.java
Tue Oct 26 19:04:23 2010
@@ -56,9 +56,9 @@ public class TestStreamingBadRecords ext
     Arrays.asList("hey001","hey018");
   
   private static final String badMapper = 
-    StreamUtil.makeJavaCommand(BadApp.class, new String[]{});
+    UtilTest.makeJavaCommand(BadApp.class, new String[]{});
   private static final String badReducer = 
-    StreamUtil.makeJavaCommand(BadApp.class, new String[]{"true"});
+    UtilTest.makeJavaCommand(BadApp.class, new String[]{"true"});
   private static final int INPUTSIZE=100;
   
   public TestStreamingBadRecords() throws IOException

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingCombiner.java
Tue Oct 26 19:04:23 2010
@@ -27,7 +27,7 @@ import static org.junit.Assert.*;
 
 public class TestStreamingCombiner extends TestStreaming {
 
-  protected String combine = StreamUtil.makeJavaCommand(
+  protected String combine = UtilTest.makeJavaCommand(
       UniqApp.class, new String[]{""});
   
   public TestStreamingCombiner() throws IOException {

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingExitStatus.java
Tue Oct 26 19:04:23 2010
@@ -40,8 +40,8 @@ public class TestStreamingExitStatus
   protected File INPUT_FILE = new File(TEST_DIR, "input.txt");
   protected File OUTPUT_DIR = new File(TEST_DIR, "out");
 
-  protected String failingTask = StreamUtil.makeJavaCommand(FailApp.class, new String[]{"true"});
-  protected String echoTask = StreamUtil.makeJavaCommand(FailApp.class, new String[]{"false"});
+  protected String failingTask = UtilTest.makeJavaCommand(FailApp.class, new String[]{"true"});
+  protected String echoTask = UtilTest.makeJavaCommand(FailApp.class, new String[]{"false"});
 
   public TestStreamingExitStatus() throws IOException {
     UtilTest utilTest = new UtilTest(getClass().getName());

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingSeparator.java
Tue Oct 26 19:04:23 2010
@@ -42,11 +42,11 @@ public class TestStreamingSeparator
   // mapreduce.input.keyvaluelinerecordreader.key.value.separator reads 1 as separator
   // stream.map.input.field.separator uses 2 as separator
   // map behaves like "/usr/bin/tr 2 3"; (translate 2 to 3)
-  protected String map = StreamUtil.makeJavaCommand(TrApp.class, new String[]{"2", "3"});
+  protected String map = UtilTest.makeJavaCommand(TrApp.class, new String[]{"2", "3"});
   // stream.map.output.field.separator recognize 3 as separator
   // stream.reduce.input.field.separator recognize 3 as separator
   // reduce behaves like "/usr/bin/tr 3 4"; (translate 3 to 4)
-  protected String reduce = StreamUtil.makeJavaCommand(TrAppReduce.class, new String[]{"3",
"4"});
+  protected String reduce = UtilTest.makeJavaCommand(TrAppReduce.class, new String[]{"3",
"4"});
   // stream.reduce.output.field.separator recognize 4 as separator
   // mapreduce.output.textoutputformat.separator outputs 5 as separator
   protected String outputExpect = "bunnies5are.pink\nroses5are.red\nviolets5are.blue\n";

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamingStderr.java
Tue Oct 26 19:04:23 2010
@@ -44,7 +44,7 @@ public class TestStreamingStderr
     return new String[] {
       "-input", input.getAbsolutePath(),
       "-output", output.getAbsolutePath(),
-      "-mapper", StreamUtil.makeJavaCommand(StderrApp.class,
+      "-mapper", UtilTest.makeJavaCommand(StderrApp.class,
                                             new String[]{Integer.toString(preLines),
                                                          Integer.toString(duringLines),
                                                          Integer.toString(postLines)}),

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestTypedBytesStreaming.java
Tue Oct 26 19:04:23 2010
@@ -36,8 +36,8 @@ public class TestTypedBytesStreaming {
   protected File INPUT_FILE = new File("input.txt");
   protected File OUTPUT_DIR = new File("out");
   protected String input = "roses.are.red\nviolets.are.blue\nbunnies.are.pink\n";
-  protected String map = StreamUtil.makeJavaCommand(TypedBytesMapApp.class, new String[]{"."});
-  protected String reduce = StreamUtil.makeJavaCommand(TypedBytesReduceApp.class, new String[0]);
+  protected String map = UtilTest.makeJavaCommand(TypedBytesMapApp.class, new String[]{"."});
+  protected String reduce = UtilTest.makeJavaCommand(TypedBytesReduceApp.class, new String[0]);
   protected String outputExpect = "are\t3\nred\t1\nblue\t1\npink\t1\nroses\t1\nbunnies\t1\nviolets\t1\n";
   
   public TestTypedBytesStreaming() throws IOException {

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
Tue Oct 26 19:04:23 2010
@@ -78,7 +78,7 @@ public class TestUlimit {
    */
   @Test
   public void testCommandLine() {
-    if (StreamUtil.isCygwin()) {
+    if (UtilTest.isCygwin()) {
       return;
     }
     try {
@@ -89,7 +89,7 @@ public class TestUlimit {
       
       mr = new MiniMRCluster(numSlaves, fs.getUri().toString(), 1);
       writeInputFile(fs, inputPath);
-      map = StreamUtil.makeJavaCommand(UlimitApp.class, new String[]{});  
+      map = UtilTest.makeJavaCommand(UlimitApp.class, new String[]{});  
       runProgram(SET_MEMORY_LIMIT);
       fs.delete(outputPath, true);
       assertFalse("output not cleaned up", fs.exists(outputPath));

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/UtilTest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/UtilTest.java?rev=1027685&r1=1027684&r2=1027685&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/UtilTest.java
(original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/UtilTest.java
Tue Oct 26 19:04:23 2010
@@ -22,6 +22,9 @@ import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -82,6 +85,43 @@ class UtilTest {
     }
   }
 
+  public static String collate(List<String> args, String sep) {
+    StringBuffer buf = new StringBuffer();
+    Iterator<String> it = args.iterator();
+    while (it.hasNext()) {
+      if (buf.length() > 0) {
+        buf.append(" ");
+      }
+      buf.append(it.next());
+    }
+    return buf.toString();
+  }
+
+  public static String makeJavaCommand(Class<?> main, String[] argv) {
+    ArrayList<String> vargs = new ArrayList<String>();
+    File javaHomeBin = new File(System.getProperty("java.home"), "bin");
+    File jvm = new File(javaHomeBin, "java");
+    vargs.add(jvm.toString());
+    // copy parent classpath
+    vargs.add("-classpath");
+    vargs.add("\"" + System.getProperty("java.class.path") + "\"");
+  
+    // add heap-size limit
+    vargs.add("-Xmx" + Runtime.getRuntime().maxMemory());
+  
+    // Add main class and its arguments
+    vargs.add(main.getName());
+    for (int i = 0; i < argv.length; i++) {
+      vargs.add(argv[i]);
+    }
+    return collate(vargs, " ");
+  }
+
+  public static boolean isCygwin() {
+    String OS = System.getProperty("os.name");
+    return (OS.indexOf("Windows") > -1);
+  }
+
   /**
    * Is perl supported on this machine ?
    * @return true if perl is available and is working as expected



Mime
View raw message