hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r394984 [2/3] - in /lucene/hadoop/trunk: ./ conf/ src/examples/org/apache/hadoop/examples/ src/java/org/apache/hadoop/conf/ src/java/org/apache/hadoop/dfs/ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/io/ src/java/org/apache/ha...
Date Tue, 18 Apr 2006 17:05:34 GMT
Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Path.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Path.java?rev=394984&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Path.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Path.java Tue Apr 18 10:05:31 2006
@@ -0,0 +1,178 @@
+/**
+ * Copyright 2006 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs;
+
+import java.util.*;
+
+/** Names a file or directory in a {@link FileSystem}.
+ * Path strings use slash as the directory separator.  A path string is
+ * absolute if it begins with a slash.
+ */
+public class Path implements Comparable {
+
+  /** The directory separator, a slash. */
+  public static final String SEPARATOR = "/";
+  
+  static final boolean WINDOWS
+    = System.getProperty("os.name").startsWith("Windows");
+
+  private boolean isAbsolute;                     // if path starts with sepr
+  private String[] elements;                      // tokenized path elements
+  private String drive;                           // Windows drive letter
+  private String asString;                        // cached toString() value
+
+  /** Resolve a child path against a parent path. */
+  public Path(String parent, String child) {
+    this(new Path(parent), new Path(child));
+  }
+
+  /** Resolve a child path against a parent path. */
+  public Path(Path parent, String child) {
+    this(parent, new Path(child));
+  }
+
+  /** Resolve a child path against a parent path. */
+  public Path(String parent, Path child) {
+    this(new Path(parent), child);
+  }
+
+  /** Resolve a child path against a parent path. */
+  public Path(Path parent, Path child) {
+    if (child.isAbsolute()) {
+      this.isAbsolute = child.isAbsolute;
+      this.elements = child.elements;
+    } else {
+      this.isAbsolute = parent.isAbsolute;
+      this.elements = new String[parent.elements.length+child.elements.length];
+      for (int i = 0; i < parent.elements.length; i++) {
+        elements[i] = parent.elements[i];
+      }
+      for (int i = 0; i < child.elements.length; i++) {
+        elements[i+parent.elements.length] = child.elements[i];
+      }
+    }
+    this.drive = child.drive == null ? parent.drive : child.drive;
+  }
+
+  /** Construct a path from a String. */
+  public Path(String pathString) {
+    if (WINDOWS) {                                // parse Windows path
+      int colon = pathString.indexOf(':');
+      if (colon == 1) {                           // parse Windows drive letter
+        this.drive = pathString.substring(0, 1);
+        pathString = pathString.substring(2);
+      }
+      pathString = pathString.replace('\\','/');  // convert backslash to slash
+    }
+
+    // determine whether the path is absolute
+    this.isAbsolute = pathString.startsWith(SEPARATOR);
+
+
+    // tokenize the path into elements
+    Enumeration tokens = new StringTokenizer(pathString, SEPARATOR);
+    ArrayList list = Collections.list(tokens);
+    this.elements = (String[])list.toArray(new String[list.size()]);
+  }
+
+  private Path(boolean isAbsolute, String[] elements, String drive) {
+    this.isAbsolute = isAbsolute;
+    this.elements = elements;
+    this.drive = drive;
+  }
+
+  /** True if this path is absolute. */
+  public boolean isAbsolute() { return isAbsolute; }
+
+  /** Returns the final component of this path.*/
+  public String getName() {
+    if (elements.length == 0) {
+      return "";
+    } else {
+      return elements[elements.length-1];
+    }
+  }
+
+  /** Returns the parent of a path. */
+  public Path getParent() {
+    if (elements.length  == 0) {
+      return null;
+    }
+    String[] newElements = new String[elements.length-1];
+    for (int i = 0; i < newElements.length; i++) {
+      newElements[i] = elements[i];
+    }
+    return new Path(isAbsolute, newElements, drive);
+  }
+
+  /** Adds a suffix to a the final name in the path.*/
+  public Path suffix(String suffix) {
+    return new Path(getParent(), getName()+suffix);
+  }
+
+  public String toString() {
+    if (asString == null) {
+      StringBuffer buffer = new StringBuffer();
+
+      if (drive != null) {
+        buffer.append(drive);
+        buffer.append(':');
+      }
+
+      if (elements.length == 0 && isAbsolute) {
+        buffer.append(SEPARATOR);
+      }
+
+      for (int i = 0; i < elements.length; i++) {
+        if (i !=0 || isAbsolute) {
+          buffer.append(SEPARATOR);
+        }
+        buffer.append(elements[i]);
+      }
+      asString = buffer.toString();
+    }
+    return asString;
+  }
+
+  public boolean equals(Object o) {
+    if (!(o instanceof Path)) {
+      return false;
+    }
+    Path that = (Path)o;
+    return
+      this.isAbsolute == that.isAbsolute &&
+      Arrays.equals(this.elements, that.elements) &&
+      this.drive == null ? true : this.drive.equals(that.drive);
+  }
+
+  public int hashCode() {
+    int hashCode = isAbsolute ? 1 : -1;
+    for (int i = 0; i < elements.length; i++) {
+      hashCode ^= elements[i].hashCode();
+    }
+    if (drive != null) {
+      hashCode ^= drive.hashCode();
+    }
+    return hashCode;
+  }
+
+  public int compareTo(Object o) {
+    Path that = (Path)o;
+    return this.toString().compareTo(that.toString());
+  }
+
+}

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/PathFilter.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/PathFilter.java?rev=394984&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/PathFilter.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/PathFilter.java Tue Apr 18 10:05:31 2006
@@ -0,0 +1,30 @@
+/**
+ * Copyright 2006 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs;
+
+public interface PathFilter {
+  /**
+   * Tests whether or not the specified abstract pathname should be
+   * included in a pathname list.
+   *
+   * @param  path  The abstract pathname to be tested
+   * @return  <code>true</code> if and only if <code>pathname</code>
+   *          should be included
+   */
+  boolean accept(Path path);
+}
+
+

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java Tue Apr 18 10:05:31 2006
@@ -91,19 +91,17 @@
       this.comparator = comparator;
       this.lastKey = comparator.newKey();
 
-      File dir = new File(dirName);
+      Path dir = new Path(dirName);
       fs.mkdirs(dir);
 
-      File dataFile = new File(dir, DATA_FILE_NAME);
-      File indexFile = new File(dir, INDEX_FILE_NAME);
+      Path dataFile = new Path(dir, DATA_FILE_NAME);
+      Path indexFile = new Path(dir, INDEX_FILE_NAME);
 
       Class keyClass = comparator.getKeyClass();
       this.data =
-        new SequenceFile.Writer(fs, dataFile.getPath(), keyClass, valClass,
-                                compress);
+        new SequenceFile.Writer(fs, dataFile, keyClass, valClass, compress);
       this.index =
-        new SequenceFile.Writer(fs, indexFile.getPath(),
-                                keyClass, LongWritable.class);
+        new SequenceFile.Writer(fs, indexFile, keyClass, LongWritable.class);
     }
     
     /** The number of entries that are added before an index entry is added.*/
@@ -197,12 +195,12 @@
     /** Construct a map reader for the named map using the named comparator.*/
     public Reader(FileSystem fs, String dirName, WritableComparator comparator, Configuration conf)
       throws IOException {
-      File dir = new File(dirName);
-      File dataFile = new File(dir, DATA_FILE_NAME);
-      File indexFile = new File(dir, INDEX_FILE_NAME);
+      Path dir = new Path(dirName);
+      Path dataFile = new Path(dir, DATA_FILE_NAME);
+      Path indexFile = new Path(dir, INDEX_FILE_NAME);
 
       // open the data
-      this.data = new SequenceFile.Reader(fs, dataFile.getPath(),  conf);
+      this.data = new SequenceFile.Reader(fs, dataFile,  conf);
       this.firstPosition = data.getPosition();
 
       if (comparator == null)
@@ -213,7 +211,7 @@
       this.getKey = this.comparator.newKey();
 
       // open the index
-      this.index = new SequenceFile.Reader(fs, indexFile.getPath(), conf);
+      this.index = new SequenceFile.Reader(fs, indexFile, conf);
     }
 
     private void readIndex() throws IOException {
@@ -387,8 +385,8 @@
   /** Renames an existing map directory. */
   public static void rename(FileSystem fs, String oldName, String newName)
     throws IOException {
-    File oldDir = new File(oldName);
-    File newDir = new File(newName);
+    Path oldDir = new Path(oldName);
+    Path newDir = new Path(newName);
     if (!fs.rename(oldDir, newDir)) {
       throw new IOException("Could not rename " + oldDir + " to " + newDir);
     }
@@ -396,9 +394,9 @@
 
   /** Deletes the named map file. */
   public static void delete(FileSystem fs, String name) throws IOException {
-    File dir = new File(name);
-    File data = new File(dir, DATA_FILE_NAME);
-    File index = new File(dir, INDEX_FILE_NAME);
+    Path dir = new Path(name);
+    Path data = new Path(dir, DATA_FILE_NAME);
+    Path index = new Path(dir, INDEX_FILE_NAME);
 
     fs.delete(data);
     fs.delete(index);
@@ -415,11 +413,11 @@
    * @return number of valid entries in this MapFile, or -1 if no fixing was needed
    * @throws Exception
    */
-  public static long fix(FileSystem fs, File dir,
+  public static long fix(FileSystem fs, Path dir,
           Class keyClass, Class valueClass, boolean dryrun, Configuration conf) throws Exception {
     String dr = (dryrun ? "[DRY RUN ] " : "");
-    File data = new File(dir, DATA_FILE_NAME);
-    File index = new File(dir, INDEX_FILE_NAME);
+    Path data = new Path(dir, DATA_FILE_NAME);
+    Path index = new Path(dir, INDEX_FILE_NAME);
     int indexInterval = 128;
     if (!fs.exists(data)) {
       // there's nothing we can do to fix this!
@@ -429,7 +427,7 @@
       // no fixing needed
       return -1;
     }
-    SequenceFile.Reader dataReader = new SequenceFile.Reader(fs, data.toString(), conf);
+    SequenceFile.Reader dataReader = new SequenceFile.Reader(fs, data, conf);
     if (!dataReader.getKeyClass().equals(keyClass)) {
       throw new Exception(dr + "Wrong key class in " + dir + ", expected" + keyClass.getName() +
               ", got " + dataReader.getKeyClass().getName());
@@ -442,7 +440,7 @@
     Writable key = (Writable)keyClass.getConstructor(new Class[0]).newInstance(new Object[0]);
     Writable value = (Writable)valueClass.getConstructor(new Class[0]).newInstance(new Object[0]);
     SequenceFile.Writer indexWriter = null;
-    if (!dryrun) indexWriter = new SequenceFile.Writer(fs, index.toString(), keyClass, LongWritable.class);
+    if (!dryrun) indexWriter = new SequenceFile.Writer(fs, index, keyClass, LongWritable.class);
     try {
       long pos = 0L;
       LongWritable position = new LongWritable();

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java Tue Apr 18 10:05:31 2006
@@ -51,7 +51,7 @@
     private FSDataOutputStream out;
     private DataOutputBuffer buffer = new DataOutputBuffer();
     private FileSystem fs = null;
-    private File target = null;
+    private Path target = null;
 
     private Class keyClass;
     private Class valClass;
@@ -78,9 +78,14 @@
       }
     }
 
+    /** @deprecated Call {@link #Writer(FileSystem,Path,Class,Class)}. */
+    public Writer(FileSystem fs, String name, Class keyClass, Class valClass)
+      throws IOException {
+      this(fs, new Path(name), keyClass, valClass, false);
+    }
+
     /** Create the named file. */
-    public Writer(FileSystem fs, String name,
-                  Class keyClass, Class valClass)
+    public Writer(FileSystem fs, Path name, Class keyClass, Class valClass)
       throws IOException {
       this(fs, name, keyClass, valClass, false);
     }
@@ -88,11 +93,11 @@
     /** Create the named file.
      * @param compress if true, values are compressed.
      */
-    public Writer(FileSystem fs, String name,
+    public Writer(FileSystem fs, Path name,
                   Class keyClass, Class valClass, boolean compress)
       throws IOException {
       this.fs = fs;
-      this.target = new File(name);
+      this.target = name;
       init(fs.create(target), keyClass, valClass, compress);
     }
     
@@ -197,7 +202,7 @@
 
   /** Writes key/value pairs from a sequence-format file. */
   public static class Reader {
-    private String file;
+    private Path file;
     private FSDataInputStream in;
     private DataOutputBuffer outBuf = new DataOutputBuffer();
     private DataInputBuffer inBuf = new DataInputBuffer();
@@ -221,26 +226,32 @@
     private Inflater inflater = new Inflater();
     private Configuration conf;
 
+    /** @deprecated Call {@link #Reader(FileSystem,Path,Configuration)}.*/
+    public Reader(FileSystem fs, String file, Configuration conf)
+      throws IOException {
+      this(fs, new Path(file), conf);
+    }
+
     /** Open the named file. */
-    public Reader(FileSystem fs, String file, Configuration conf) throws IOException {
+    public Reader(FileSystem fs, Path file, Configuration conf)
+      throws IOException {
       this(fs, file, conf.getInt("io.file.buffer.size", 4096));
       this.conf = conf;
     }
 
-    private Reader(FileSystem fs, String name, int bufferSize) throws IOException {
+    private Reader(FileSystem fs, Path name, int bufferSize) throws IOException {
       this.fs = fs;
       this.file = name;
-      File file = new File(name);
       this.in = fs.open(file, bufferSize);
       this.end = fs.getLength(file);
       init();
     }
     
-    private Reader(FileSystem fs, String file, int bufferSize, long start, long length)
+    private Reader(FileSystem fs, Path file, int bufferSize, long start, long length)
       throws IOException {
       this.fs = fs;
       this.file = file;
-      this.in = fs.open(new File(file), bufferSize);
+      this.in = fs.open(file, bufferSize);
       seek(start);
       init();
 
@@ -438,7 +449,7 @@
 
     /** Returns the name of the file. */
     public String toString() {
-      return file;
+      return file.toString();
     }
 
   }
@@ -453,10 +464,10 @@
 
     private WritableComparator comparator;
 
-    private String inFile;                        // when sorting
-    private String[] inFiles;                     // when merging
+    private Path inFile;                        // when sorting
+    private Path[] inFiles;                     // when merging
 
-    private String outFile;
+    private Path outFile;
 
     private int memory; // bytes
     private int factor; // merged per pass
@@ -497,8 +508,8 @@
     public int getMemory() { return memory; }
 
     /** Perform a file sort.*/
-    public void sort(String inFile, String outFile) throws IOException {
-      if (fs.exists(new File(outFile))) {
+    public void sort(Path inFile, Path outFile) throws IOException {
+      if (fs.exists(outFile)) {
         throw new IOException("already exists: " + outFile);
       }
 
@@ -536,7 +547,7 @@
       
       private Reader in;
       private FSDataOutputStream out;
-        private String outName;
+      private Path outName;
 
       public SortPass(Configuration conf) throws IOException {
         in = new Reader(fs, inFile, conf);
@@ -605,8 +616,8 @@
 
       private void flush(int count, boolean done) throws IOException {
         if (out == null) {
-          outName = done ? outFile : outFile+".0";
-          out = fs.create(new File(outName));
+          outName = done ? outFile : outFile.suffix(".0");
+          out = fs.create(outName);
         }
 
         if (!done) {                              // an intermediate file
@@ -694,29 +705,29 @@
 
       private MergeQueue queue;
       private FSDataInputStream in;
-      private String inName;
+      private Path inName;
 
       public MergePass(int pass, boolean last) throws IOException {
         this.pass = pass;
         this.last = last;
 
         this.queue =
-          new MergeQueue(factor, last ? outFile : outFile+"."+pass, last);
+          new MergeQueue(factor, last?outFile:outFile.suffix("."+pass), last);
 
-        this.inName = outFile+"."+(pass-1);
-        this.in = fs.open(new File(inName));
+        this.inName = outFile.suffix("."+(pass-1));
+        this.in = fs.open(inName);
       }
 
       public void close() throws IOException {
         in.close();                               // close and delete input
-        fs.delete(new File(inName));
+        fs.delete(inName);
 
         queue.close();                            // close queue
       }
 
       public int run() throws IOException {
         int segments = 0;
-        long end = fs.getLength(new File(inName));
+        long end = fs.getLength(inName);
 
         while (in.getPos() < end) {
           LOG.finer("merging segment " + segments);
@@ -756,12 +767,12 @@
     }
 
     /** Merge the provided files.*/
-    public void merge(String[] inFiles, String outFile) throws IOException {
+    public void merge(Path[] inFiles, Path outFile) throws IOException {
       this.inFiles = inFiles;
       this.outFile = outFile;
       this.factor = inFiles.length;
 
-      if (new File(outFile).exists()) {
+      if (fs.exists(outFile)) {
         throw new IOException("already exists: " + outFile);
       }
 
@@ -788,7 +799,7 @@
       public void run() throws IOException {
         LOG.finer("merging files=" + inFiles.length);
         for (int i = 0; i < inFiles.length; i++) {
-          String inFile = inFiles[i];
+          Path inFile = inFiles[i];
           MergeStream ms =
             new MergeStream(new Reader(fs, inFile, memory/(factor+1)));
           if (ms.next())
@@ -836,10 +847,10 @@
         put(stream);
       }
 
-      public MergeQueue(int size, String outName, boolean done)
+      public MergeQueue(int size, Path outName, boolean done)
         throws IOException {
         initialize(size);
-        this.out = fs.create(new File(outName), true, memory/(factor+1));
+        this.out = fs.create(outName, true, memory/(factor+1));
         this.done = done;
       }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java Tue Apr 18 10:05:31 2006
@@ -17,38 +17,47 @@
 package org.apache.hadoop.mapred;
 
 import java.io.IOException;
-import java.io.File;
 import java.io.DataInput;
 import java.io.DataOutput;
+import java.io.File;                              // deprecated
 
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.UTF8;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 
 /** A section of an input file.  Returned by {@link
  * InputFormat#getSplits(FileSystem, JobConf, int)} and passed to
  * InputFormat#getRecordReader(FileSystem,FileSplit,JobConf,Reporter). */
 public class FileSplit implements Writable {
-  private File file;
+  private Path file;
   private long start;
   private long length;
   
   FileSplit() {}
 
+  /** @deprecated Call {@link #FileSplit(Path,long,long)} instead. */
+  public FileSplit(File file, long start, long length) {
+    this(new Path(file.toString()), start, length);
+  }
+
   /** Constructs a split.
    *
    * @param file the file name
    * @param start the position of the first byte in the file to process
    * @param length the number of bytes in the file to process
    */
-  public FileSplit(File file, long start, long length) {
+  public FileSplit(Path file, long start, long length) {
     this.file = file;
     this.start = start;
     this.length = length;
   }
   
+  /** @deprecated Call {@link #getPath()} instead. */
+  public File getFile() { return new File(file.toString()); }
+  
   /** The file containing this split's data. */
-  public File getFile() { return file; }
+  public Path getPath() { return file; }
   
   /** The position of the first byte in the file to process. */
   public long getStart() { return start; }
@@ -68,7 +77,7 @@
     out.writeLong(length);
   }
   public void readFields(DataInput in) throws IOException {
-    file = new File(UTF8.readString(in));
+    file = new Path(UTF8.readString(in));
     start = in.readLong();
     length = in.readLong();
   }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormatBase.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormatBase.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormatBase.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormatBase.java Tue Apr 18 10:05:31 2006
@@ -17,12 +17,13 @@
 package org.apache.hadoop.mapred;
 
 import java.io.IOException;
-import java.io.File;
+import java.io.File;                              // deprecated
 
 import java.util.ArrayList;
 import java.util.logging.Logger;
 
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.LogFormatter;
 
 /** A base class for {@link InputFormat}. */
@@ -45,35 +46,40 @@
                                                Reporter reporter)
     throws IOException;
 
+  /** @deprecated Call {@link #listFiles(FileSystem,JobConf)} instead. */
+  protected File[] listFiles(FileSystem fs, JobConf job)
+    throws IOException {
+    Path[] paths = listPaths(fs, job);
+    File[] result = new File[paths.length];
+    for (int i = 0 ; i < paths.length; i++) {
+      result[i] = new File(paths[i].toString());
+    }
+    return result;
+  }
+
   /** List input directories.
    * Subclasses may override to, e.g., select only files matching a regular
    * expression.
    * Property mapred.input.subdir, if set, names a subdirectory that
    * is appended to all input dirs specified by job, and if the given fs
-   * lists those too, each is added to the returned array of File.
+   * lists those too, each is added to the returned array of Path.
    * @param fs
    * @param job
-   * @return array of File objects, never zero length.
+   * @return array of Path objects, never zero length.
    * @throws IOException if zero items.
    */
-  protected File[] listFiles(FileSystem fs, JobConf job)
+  protected Path[] listPaths(FileSystem fs, JobConf job)
     throws IOException {
-    File[] dirs = job.getInputDirs();
-    String workDir = job.getWorkingDirectory();
+    Path[] dirs = job.getInputPaths();
     String subdir = job.get("mapred.input.subdir");
     ArrayList result = new ArrayList();
     for (int i = 0; i < dirs.length; i++) {
-      // if it is relative, make it absolute using the directory from the 
-      // JobConf
-      if (workDir != null && !fs.isAbsolute(dirs[i])) {
-        dirs[i] = new File(workDir, dirs[i].toString());
-      }
-      File[] dir = fs.listFiles(dirs[i]);
+      Path[] dir = fs.listPaths(dirs[i]);
       if (dir != null) {
         for (int j = 0; j < dir.length; j++) {
-          File file = dir[j];
+          Path file = dir[j];
           if (subdir != null) {
-            File[] subFiles = fs.listFiles(new File(file, subdir));
+            Path[] subFiles = fs.listPaths(new Path(file, subdir));
             if (subFiles != null) {
               for (int k = 0; k < subFiles.length; k++) {
                 result.add(subFiles[k]);
@@ -89,18 +95,18 @@
     if (result.size() == 0) {
       throw new IOException("No input directories specified in: "+job);
     }
-    return (File[])result.toArray(new File[result.size()]);
+    return (Path[])result.toArray(new Path[result.size()]);
   }
 
-  /** Splits files returned by {#listFiles(FileSystem,JobConf) when
+  /** Splits files returned by {#listPaths(FileSystem,JobConf) when
    * they're too big.*/ 
   public FileSplit[] getSplits(FileSystem fs, JobConf job, int numSplits)
     throws IOException {
 
-    File[] files = listFiles(fs, job);
+    Path[] files = listPaths(fs, job);
 
     for (int i = 0; i < files.length; i++) {      // check we have valid files
-      File file = files[i];
+      Path file = files[i];
       if (fs.isDirectory(file) || !fs.exists(file)) {
         throw new IOException("Not a file: "+files[i]);
       }
@@ -132,7 +138,7 @@
 
     ArrayList splits = new ArrayList(numSplits);  // generate splits
     for (int i = 0; i < files.length; i++) {
-      File file = files[i];
+      Path file = files[i];
       long length = fs.getLength(file);
 
       long bytesRemaining = length;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java Tue Apr 18 10:05:31 2006
@@ -236,31 +236,32 @@
         //
 
         // Create a number of filenames in the JobTracker's fs namespace
-        File submitJobDir = new File(job.getSystemDir(), "submit_" + Integer.toString(Math.abs(r.nextInt()), 36));
-        File submitJobFile = new File(submitJobDir, "job.xml");
-        File submitJarFile = new File(submitJobDir, "job.jar");
+        Path submitJobDir = new Path(job.getSystemDir(), "submit_" + Integer.toString(Math.abs(r.nextInt()), 36));
+        Path submitJobFile = new Path(submitJobDir, "job.xml");
+        Path submitJarFile = new Path(submitJobDir, "job.jar");
 
         String originalJarPath = job.getJar();
 
-        if (originalJarPath != null) {           // Copy jar to JobTracker's fs
+        FileSystem localFs = FileSystem.getNamed("local", job);
+        FileSystem fs = getFs();
+
+        if (originalJarPath != null) {           // copy jar to JobTracker's fs
           job.setJar(submitJarFile.toString());
-          getFs().copyFromLocalFile(new File(originalJarPath), submitJarFile);
+          fs.copyFromLocalFile(new Path(originalJarPath), submitJarFile);
         }
 
-        FileSystem fileSys = getFs();
-
         // Set the user's name and working directory
         String user = System.getProperty("user.name");
         job.setUser(user != null ? user : "Dr Who");
         if (job.getWorkingDirectory() == null) {
-          job.setWorkingDirectory(fileSys.getWorkingDirectory().toString());          
+          job.setWorkingDirectory(fs.getWorkingDirectory());          
         }
 
         // Check the output specification
         job.getOutputFormat().checkOutputSpecs(fs, job);
 
         // Write job file to JobTracker's fs        
-        FSDataOutputStream out = fileSys.create(submitJobFile);
+        FSDataOutputStream out = fs.create(submitJobFile);
         try {
           job.write(out);
         } finally {
@@ -270,7 +271,7 @@
         //
         // Now, actually submit the job (using the submit name)
         //
-        JobStatus status = jobSubmitClient.submitJob(submitJobFile.getPath());
+        JobStatus status = jobSubmitClient.submitJob(submitJobFile.toString());
         if (status != null) {
             return new NetworkedJob(status);
         } else {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java Tue Apr 18 10:05:31 2006
@@ -28,7 +28,9 @@
 import java.net.URL;
 import java.net.URLDecoder;
 
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.conf.Configuration;
 
 import org.apache.hadoop.io.Writable;
@@ -82,14 +84,14 @@
    * @param config a Configuration-format XML job description file
    */
   public JobConf(String config) {
-    this(new File(config));
+    this(new Path(config));
   }
 
   /** Construct a map/reduce configuration.
    *
    * @param config a Configuration-format XML job description file
    */
-  public JobConf(File config) {
+  public JobConf(Path config) {
     super();
     addDefaultResource("mapred-default.xml");
     addDefaultResource(config);
@@ -98,9 +100,8 @@
   public String getJar() { return get("mapred.jar"); }
   public void setJar(String jar) { set("mapred.jar", jar); }
 
-  public File getSystemDir() {
-    return new File(get("mapred.system.dir", "/tmp/hadoop/mapred/system"))
-      .getAbsoluteFile();
+  public Path getSystemDir() {
+    return new Path(get("mapred.system.dir", "/tmp/hadoop/mapred/system"));
   }
 
   public String[] getLocalDirs() throws IOException {
@@ -110,35 +111,50 @@
   public void deleteLocalFiles() throws IOException {
     String[] localDirs = getLocalDirs();
     for (int i = 0; i < localDirs.length; i++) {
-      FileUtil.fullyDelete(new File(localDirs[i]), this);
+      FileSystem.getNamed("local", this).delete(new Path(localDirs[i]));
     }
   }
 
   public void deleteLocalFiles(String subdir) throws IOException {
     String[] localDirs = getLocalDirs();
     for (int i = 0; i < localDirs.length; i++) {
-      FileUtil.fullyDelete(new File(localDirs[i], subdir), this);
+      FileSystem.getNamed("local", this).delete(new Path(localDirs[i], subdir));
     }
   }
 
+  /** @deprecated Call {@link #getLocalPath(String)} instead. */
+  public File getLocalFile(String subdir, String name) throws IOException {
+    return new File(getLocalPath(subdir+Path.SEPARATOR+name).toString());
+  }
+
   /** Constructs a local file name.  Files are distributed among configured
    * local directories.*/
-  public File getLocalFile(String subdir, String name) throws IOException {
-    return getFile("mapred.local.dir", subdir + File.separator + name);
+  public Path getLocalPath(String pathString) throws IOException {
+    return getLocalPath("mapred.local.dir", pathString);
+  }
+
+  /** @deprecated Call {@link #setInputPath(Path)} instead.*/
+  public void setInputDir(File dir) { setInputPath(new Path(dir.toString())); }
+
+  public void setInputPath(Path dir) {
+    dir = new Path(getWorkingDirectory(), dir);
+    set("mapred.input.dir", dir);
   }
 
-  public void setInputDir(File dir) { set("mapred.input.dir", dir); }
+  /** @deprecated Call {@link #addInputPath(Path)} instead.*/
+  public void addInputDir(File dir) { addInputPath(new Path(dir.toString())); }
 
-  public void addInputDir(File dir) {
+  public void addInputPath(Path dir) {
+    dir = new Path(getWorkingDirectory(), dir);
     String dirs = get("mapred.input.dir");
     set("mapred.input.dir", dirs == null ? dir.toString() : dirs + "," + dir);
   }
-  public File[] getInputDirs() {
+  public Path[] getInputPaths() {
     String dirs = get("mapred.input.dir", "");
     ArrayList list = Collections.list(new StringTokenizer(dirs, ","));
-    File[] result = new File[list.size()];
+    Path[] result = new Path[list.size()];
     for (int i = 0; i < list.size(); i++) {
-      result[i] = new File((String)list.get(i));
+      result[i] = new Path((String)list.get(i));
     }
     return result;
   }
@@ -163,24 +179,47 @@
    * Set the current working directory for the default file system
    * @param dir the new current working directory
    */
-  public void setWorkingDirectory(String dir) {
-    set("mapred.working.dir", dir);
+  public void setWorkingDirectory(Path dir) {
+    dir = new Path(getWorkingDirectory(), dir);
+    set("mapred.working.dir", dir.toString());
   }
   
   /**
    * Get the current working directory for the default file system.
    * @return the directory name
    */
-  public String getWorkingDirectory() {
-    return get("mapred.working.dir"); 
+  public Path getWorkingDirectory() {
+    String name = get("mapred.working.dir");
+    if (name != null) {
+      return new Path(name);
+    } else {
+      try {
+        Path dir = FileSystem.get(this).getWorkingDirectory();
+        set("mapred.working.dir", dir.toString());
+        return dir;
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+    }
   }
   
-  public File getOutputDir() { 
+  /** @deprecated Call {@link #getOutputPath()} instead.*/
+  public File getOutputDir() { return new File(getOutputPath().toString()); }
+
+  public Path getOutputPath() { 
     String name = get("mapred.output.dir");
-    return name == null ? null: new File(name);
+    return name == null ? null: new Path(name);
   }
 
-  public void setOutputDir(File dir) { set("mapred.output.dir", dir); }
+  /** @deprecated Call {@link #setOutputPath(Path)} instead.*/
+  public void setOutputDir(File dir) {
+    setOutputPath(new Path(dir.toString()));
+  }
+
+  public void setOutputPath(Path dir) {
+    dir = new Path(getWorkingDirectory(), dir);
+    set("mapred.output.dir", dir);
+  }
 
   public InputFormat getInputFormat() {
     return (InputFormat)newInstance(getClass("mapred.input.format.class",

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java Tue Apr 18 10:05:31 2006
@@ -35,8 +35,8 @@
 
     JobProfile profile;
     JobStatus status;
-    File localJobFile = null;
-    File localJarFile = null;
+    Path localJobFile = null;
+    Path localJarFile = null;
 
     TaskInProgress maps[] = new TaskInProgress[0];
     TaskInProgress reduces[] = new TaskInProgress[0];
@@ -52,6 +52,8 @@
     private JobConf conf;
     boolean tasksInited = false;
 
+    private LocalFileSystem localFs;
+  
     /**
      * Create a JobInProgress with the given job file, plus a handle
      * to the tracker.
@@ -63,22 +65,22 @@
         this.jobtracker = jobtracker;
         this.status = new JobStatus(jobid, 0.0f, 0.0f, JobStatus.PREP);
         this.startTime = System.currentTimeMillis();
+        this.localFs = (LocalFileSystem)FileSystem.getNamed("local", default_conf);
 
         JobConf default_job_conf = new JobConf(default_conf);
-        this.localJobFile = default_job_conf.getLocalFile(JobTracker.SUBDIR, 
-            jobid + ".xml");
-        this.localJarFile = default_job_conf.getLocalFile(JobTracker.SUBDIR, 
-            jobid + ".jar");
+        this.localJobFile = default_job_conf.getLocalPath(JobTracker.SUBDIR 
+                                                          +"/"+jobid + ".xml");
+        this.localJarFile = default_job_conf.getLocalPath(JobTracker.SUBDIR
+                                                          +"/"+ jobid + ".jar");
         FileSystem fs = FileSystem.get(default_conf);
-        fs.copyToLocalFile(new File(jobFile), localJobFile);
-
+        fs.copyToLocalFile(new Path(jobFile), localJobFile);
         conf = new JobConf(localJobFile);
         this.profile = new JobProfile(conf.getUser(), jobid, jobFile, url,
                                       conf.getJobName());
         String jarFile = conf.getJar();
         if (jarFile != null) {
-          fs.copyToLocalFile(new File(jarFile), localJarFile);
-          conf.setJar(localJarFile.getCanonicalPath());
+          fs.copyToLocalFile(new Path(jarFile), localJarFile);
+          conf.setJar(localJarFile.toString());
         }
 
         this.numMapTasks = conf.getNumMapTasks();
@@ -107,7 +109,7 @@
         if (ifClassName != null && localJarFile != null) {
           try {
             ClassLoader loader =
-              new URLClassLoader(new URL[]{ localJarFile.toURL() });
+              new URLClassLoader(new URL[]{ localFs.pathToFile(localJarFile).toURL() });
             Class inputFormatClass = loader.loadClass(ifClassName);
             inputFormat = (InputFormat)inputFormatClass.newInstance();
           } catch (Exception e) {
@@ -152,7 +154,7 @@
         // Obtain some tasktracker-cache information for the map task splits.
         //
         for (int i = 0; i < maps.length; i++) {
-            String hints[][] = fs.getFileCacheHints(splits[i].getFile(), splits[i].getStart(), splits[i].getLength());
+            String hints[][] = fs.getFileCacheHints(splits[i].getPath(), splits[i].getStart(), splits[i].getLength());
             cachedHints.put(maps[i].getTIPId(), hints);
         }
 
@@ -165,7 +167,7 @@
      * prefetches and caches a lot of these hints.  If the hint is
      * not available, then we pass it through to the filesystem.
      */
-    String[][] getFileCacheHints(String tipID, File f, long start, long len) throws IOException {
+    String[][] getFileCacheHints(String tipID, Path f, long start, long len) throws IOException {
         String results[][] = (String[][]) cachedHints.get(tipID);
         if (tipID == null) {
             FileSystem fs = FileSystem.get(conf);
@@ -471,7 +473,6 @@
     synchronized void garbageCollect() {
       try {
         // Definitely remove the local-disk copy of the job file
-        FileSystem localFs = FileSystem.getNamed("local", conf);
         if (localJobFile != null) {
             localFs.delete(localJobFile);
             localJobFile = null;
@@ -484,7 +485,7 @@
         // JobClient always creates a new directory with job files
         // so we remove that directory to cleanup
         FileSystem fs = FileSystem.get(conf);
-        fs.delete(new File(profile.getJobFile()).getParentFile());
+        fs.delete(new Path(profile.getJobFile()).getParent());
 
       } catch (IOException e) {
         LOG.warning("Error cleaning up "+profile.getJobId()+": "+e);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java Tue Apr 18 10:05:31 2006
@@ -300,7 +300,7 @@
     // the files when we're done with the job.
     static final String SUBDIR = "jobTracker";
     FileSystem fs;
-    File systemDir;
+    Path systemDir;
     private Configuration conf;
 
     /**
@@ -323,7 +323,7 @@
         JobConf jobConf = new JobConf(conf);
         this.systemDir = jobConf.getSystemDir();
         this.fs = FileSystem.get(conf);
-        FileUtil.fullyDelete(fs, systemDir);
+        fs.delete(systemDir);
         fs.mkdirs(systemDir);
 
         // Same with 'localDir' except it's always on the local disk.

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java Tue Apr 18 10:05:31 2006
@@ -45,7 +45,7 @@
     private ArrayList mapIds = new ArrayList();
     private MapOutputFile mapoutputFile;
     private JobProfile profile;
-    private File localFile;
+    private Path localFile;
     private FileSystem localFs;
 
     public Job(String file, Configuration conf) throws IOException {
@@ -54,10 +54,10 @@
       this.mapoutputFile = new MapOutputFile();
       this.mapoutputFile.setConf(conf);
 
-      this.localFile = new JobConf(conf).getLocalFile("localRunner", id+".xml");
+      this.localFile = new JobConf(conf).getLocalPath("localRunner/"+id+".xml");
       this.localFs = FileSystem.getNamed("local", conf);
 
-      fs.copyToLocalFile(new File(file), localFile);
+      fs.copyToLocalFile(new Path(file), localFile);
       this.job = new JobConf(localFile);
       profile = new JobProfile(job.getUser(), id, file, 
                                "http://localhost:8080/", job.getJobName());
@@ -73,18 +73,10 @@
       return profile;
     }
     
-    private void setWorkingDirectory(JobConf conf, FileSystem fs) {
-      String dir = conf.getWorkingDirectory();
-      if (dir != null) {
-        fs.setWorkingDirectory(new File(dir));
-      }
-    }
-    
     public void run() {
       try {
         // split input into minimum number of splits
         FileSplit[] splits;
-        setWorkingDirectory(job, fs);
         splits = job.getInputFormat().getSplits(fs, job, 1);
 
         
@@ -103,9 +95,9 @@
         String reduceId = "reduce_" + newId();
         for (int i = 0; i < mapIds.size(); i++) {
           String mapId = (String)mapIds.get(i);
-          File mapOut = this.mapoutputFile.getOutputFile(mapId, 0);
-          File reduceIn = this.mapoutputFile.getInputFile(mapId, reduceId);
-          reduceIn.getParentFile().mkdirs();
+          Path mapOut = this.mapoutputFile.getOutputFile(mapId, 0);
+          Path reduceIn = this.mapoutputFile.getInputFile(mapId, reduceId);
+          localFs.mkdirs(reduceIn.getParent());
           if (!localFs.rename(mapOut, reduceIn))
             throw new IOException("Couldn't rename " + mapOut);
           this.mapoutputFile.removeAll(mapId);
@@ -116,9 +108,7 @@
         for (int i = 0; i < mapIds.size(); i++) {
             mapDependencies[i][0] = (String) mapIds.get(i);
         }
-        setWorkingDirectory(job, fs);
-        ReduceTask reduce = new ReduceTask(file, reduceId,
-            mapDependencies,0);
+        ReduceTask reduce = new ReduceTask(file, reduceId, mapDependencies,0);
         reduce.setConf(job);
         reduce_tasks += 1;
         reduce.run(job, this);
@@ -133,7 +123,7 @@
 
       } finally {
         try {
-          fs.delete(new File(file).getParentFile()); // delete submit dir
+          fs.delete(new Path(file).getParent());  // delete submit dir
           localFs.delete(localFile);              // delete local copy
         } catch (IOException e) {
           LOG.warning("Error cleaning up "+id+": "+e);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java Tue Apr 18 10:05:31 2006
@@ -17,10 +17,11 @@
 package org.apache.hadoop.mapred;
 
 import java.io.IOException;
-import java.io.File;
+import java.io.File;                              // deprecated
 import java.util.Arrays;
 
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 
 import org.apache.hadoop.io.MapFile;
 import org.apache.hadoop.io.WritableComparable;
@@ -33,7 +34,7 @@
   public RecordWriter getRecordWriter(FileSystem fs, JobConf job,
                                       String name) throws IOException {
 
-    File file = new File(job.getOutputDir(), name);
+    Path file = new Path(job.getOutputPath(), name);
 
     final MapFile.Writer out =
       new MapFile.Writer(fs, file.toString(),
@@ -53,10 +54,18 @@
       };
   }
 
+  /** @deprecated Call {@link #getReaders(FileSystem, Path, Configuration)}. */
+  public static MapFile.Reader[] getReaders(FileSystem fs, File dir,
+                                            Configuration conf)
+    throws IOException {
+    return getReaders(fs, new Path(dir.toString()), conf);
+  }
+
   /** Open the output generated by this format. */
-  public static MapFile.Reader[] getReaders(FileSystem fs, File dir, Configuration conf)
+  public static MapFile.Reader[] getReaders(FileSystem fs, Path dir,
+                                            Configuration conf)
     throws IOException {
-    File[] names = fs.listFiles(dir);
+    Path[] names = fs.listPaths(dir);
     
     // sort names, so that hash partitioning works
     Arrays.sort(names);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputFile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputFile.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputFile.java Tue Apr 18 10:05:31 2006
@@ -57,24 +57,24 @@
    * @param mapTaskId a map task id
    * @param partition a reduce partition
    */
-  public File getOutputFile(String mapTaskId, int partition)
+  public Path getOutputFile(String mapTaskId, int partition)
     throws IOException {
-    return this.jobConf.getLocalFile(mapTaskId, "part-"+partition+".out");
+    return this.jobConf.getLocalPath(mapTaskId+"/part-"+partition+".out");
   }
 
   /** Create a local reduce input file name.
    * @param mapTaskId a map task id
    * @param reduceTaskId a reduce task id
    */
-  public File getInputFile(String mapTaskId, String reduceTaskId)
+  public Path getInputFile(String mapTaskId, String reduceTaskId)
     throws IOException {
-    return this.jobConf.getLocalFile(reduceTaskId, mapTaskId+".out");
+    return this.jobConf.getLocalPath(reduceTaskId+"/"+mapTaskId+".out");
   }
-  public File getInputFile(String mapTaskIds[], String reduceTaskId)
+  public Path getInputFile(String mapTaskIds[], String reduceTaskId)
     throws IOException {
     for (int i = 0; i < mapTaskIds.length; i++) {
-      File file = jobConf.getLocalFile(reduceTaskId, mapTaskIds[i]+".out");
-      if (file.exists())
+      Path file = jobConf.getLocalPath(reduceTaskId+"/"+mapTaskIds[i]+".out");
+      if (getLocalFs().exists(file))
         return file;
     }
     throw new IOException("Input file not found!");
@@ -103,17 +103,21 @@
     this.partition = partition;
   }
 
+  private FileSystem getLocalFs() throws IOException {
+    return FileSystem.getNamed("local", this.jobConf);
+  }
+
   public void write(DataOutput out) throws IOException {
     UTF8.writeString(out, mapTaskId);
     UTF8.writeString(out, reduceTaskId);
     out.writeInt(partition);
     
-    File file = getOutputFile(mapTaskId, partition);
+    Path file = getOutputFile(mapTaskId, partition);
     FSDataInputStream in = null;
     try {
       // write the length-prefixed file content to the wire
-      out.writeLong(file.length());
-      in = FileSystem.getNamed("local", this.jobConf).open(file);
+      out.writeLong(getLocalFs().getLength(file));
+      in = getLocalFs().open(file);
     } catch (FileNotFoundException e) {
       TaskTracker.LOG.log(Level.SEVERE, "Can't open map output:" + file, e);
       ((MapOutputServer)Server.get()).getTaskTracker().mapOutputLost(mapTaskId);
@@ -146,11 +150,11 @@
     ProgressReporter reporter = (ProgressReporter)REPORTERS.get();
 
     // read the length-prefixed file content into a local file
-    File file = getInputFile(mapTaskId, reduceTaskId);
+    Path file = getInputFile(mapTaskId, reduceTaskId);
     long length = in.readLong();
     float progPerByte = 1.0f / length;
     long unread = length;
-    FSDataOutputStream out = FileSystem.getNamed("local", this.jobConf).create(file);
+    FSDataOutputStream out = getLocalFs().create(file);
     try {
       byte[] buffer = new byte[8192];
       while (unread > 0) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java Tue Apr 18 10:05:31 2006
@@ -76,7 +76,7 @@
       for (int i = 0; i < partitions; i++) {
         outs[i] =
           new SequenceFile.Writer(FileSystem.getNamed("local", job),
-                                  this.mapOutputFile.getOutputFile(getTaskId(), i).toString(),
+                                  this.mapOutputFile.getOutputFile(getTaskId(), i),
                                   job.getOutputKeyClass(),
                                   job.getOutputValueClass());
       }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormatBase.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormatBase.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormatBase.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormatBase.java Tue Apr 18 10:05:31 2006
@@ -17,9 +17,9 @@
 package org.apache.hadoop.mapred;
 
 import java.io.IOException;
-import java.io.File;
 
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 
 /** A base class for {@link OutputFormat}. */
 public abstract class OutputFormatBase implements OutputFormat {
@@ -29,7 +29,7 @@
 
   public void checkOutputSpecs(FileSystem fs, JobConf job) throws IOException {
     // Ensure that the output directory is set and not already there
-    File outDir = job.getOutputDir();
+    Path outDir = job.getOutputPath();
     if (outDir == null && job.getNumReduceTasks() != 0) {
       throw new IOException("Output directory not set in JobConf.");
     }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java Tue Apr 18 10:05:31 2006
@@ -184,7 +184,7 @@
     copyPhase.complete();                         // copy is already complete
 
     // open a file to collect map output
-    String file = job.getLocalFile(getTaskId(), "all.1").toString();
+    Path file = job.getLocalPath(getTaskId()+Path.SEPARATOR+"all.1");
     SequenceFile.Writer writer =
       new SequenceFile.Writer(lfs, file, keyClass, valueClass);
     try {
@@ -196,14 +196,13 @@
       DataOutputBuffer buffer = new DataOutputBuffer();
 
       for (int i = 0; i < mapTaskIds.length; i++) {
-        File partFile =
+        Path partFile =
           this.mapOutputFile.getInputFile(mapTaskIds[i], getTaskId());
         float progPerByte = 1.0f / lfs.getLength(partFile);
         Progress phase = appendPhase.phase();
         phase.setStatus(partFile.toString());
 
-        SequenceFile.Reader in =
-          new SequenceFile.Reader(lfs, partFile.toString(), job);
+        SequenceFile.Reader in = new SequenceFile.Reader(lfs, partFile, job);
         try {
           int keyLen;
           while((keyLen = in.next(buffer)) > 0) {
@@ -241,7 +240,7 @@
       };
     sortProgress.setName("Sort progress reporter for task "+getTaskId());
 
-    String sortedFile = job.getLocalFile(getTaskId(), "all.2").toString();
+    Path sortedFile = job.getLocalPath(getTaskId()+Path.SEPARATOR+"all.2");
 
     WritableComparator comparator = job.getOutputKeyComparator();
     
@@ -252,7 +251,7 @@
       SequenceFile.Sorter sorter =
         new SequenceFile.Sorter(lfs, comparator, valueClass, job);
       sorter.sort(file, sortedFile);              // sort
-      lfs.delete(new File(file));                 // remove unsorted
+      lfs.delete(file);                           // remove unsorted
 
     } finally {
       sortComplete = true;
@@ -275,7 +274,7 @@
     // apply reduce function
     SequenceFile.Reader in = new SequenceFile.Reader(lfs, sortedFile, job);
     Reporter reporter = getReporter(umbilical, getProgress());
-    long length = lfs.getLength(new File(sortedFile));
+    long length = lfs.getLength(sortedFile);
     try {
       ValuesIterator values = new ValuesIterator(in, length, comparator,
                                                  umbilical);
@@ -287,7 +286,7 @@
     } finally {
       reducer.close();
       in.close();
-      lfs.delete(new File(sortedFile));           // remove sorted
+      lfs.delete(sortedFile);                     // remove sorted
       out.close(reporter);
     }
     done(umbilical);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFormat.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFormat.java Tue Apr 18 10:05:31 2006
@@ -17,9 +17,9 @@
 package org.apache.hadoop.mapred;
 
 import java.io.IOException;
-import java.io.File;
 
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.MapFile;
@@ -31,14 +31,14 @@
     setMinSplitSize(SequenceFile.SYNC_INTERVAL);
   }
 
-  protected File[] listFiles(FileSystem fs, JobConf job)
+  protected Path[] listPaths(FileSystem fs, JobConf job)
     throws IOException {
 
-    File[] files = super.listFiles(fs, job);
+    Path[] files = super.listPaths(fs, job);
     for (int i = 0; i < files.length; i++) {
-      File file = files[i];
-      if (file.isDirectory()) {                   // it's a MapFile
-        files[i] = new File(file, MapFile.DATA_FILE_NAME); // use the data file
+      Path file = files[i];
+      if (fs.isDirectory(file)) {                 // it's a MapFile
+        files[i] = new Path(file, MapFile.DATA_FILE_NAME); // use the data file
       }
     }
     return files;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java Tue Apr 18 10:05:31 2006
@@ -17,10 +17,11 @@
 package org.apache.hadoop.mapred;
 
 import java.io.IOException;
-import java.io.File;
+import java.io.File;                              // deprecated
 import java.util.Arrays;
 
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.WritableComparable;
@@ -33,10 +34,10 @@
   public RecordWriter getRecordWriter(FileSystem fs, JobConf job,
                                       String name) throws IOException {
 
-    File file = new File(job.getOutputDir(), name);
+    Path file = new Path(job.getOutputPath(), name);
 
     final SequenceFile.Writer out =
-      new SequenceFile.Writer(fs, file.toString(),
+      new SequenceFile.Writer(fs, file,
                               job.getOutputKeyClass(),
                               job.getOutputValueClass(),
                               job.getBoolean("mapred.output.compress", false));
@@ -53,18 +54,24 @@
       };
   }
 
+  /** @deprecated Call {@link #getReaders(Configuration, Path)} instead. */
+  public static SequenceFile.Reader[] getReaders(Configuration conf, File dir) 
+    throws IOException {
+    return getReaders(conf, new Path(dir.toString()));
+  }
+
   /** Open the output generated by this format. */
-  public static SequenceFile.Reader[] getReaders(Configuration conf, File dir)
+  public static SequenceFile.Reader[] getReaders(Configuration conf, Path dir)
     throws IOException {
     FileSystem fs = FileSystem.get(conf);
-    File[] names = fs.listFiles(dir);
+    Path[] names = fs.listPaths(dir);
     
     // sort names, so that hash partitioning works
     Arrays.sort(names);
     
     SequenceFile.Reader[] parts = new SequenceFile.Reader[names.length];
     for (int i = 0; i < names.length; i++) {
-      parts[i] = new SequenceFile.Reader(fs, names[i].toString(), conf);
+      parts[i] = new SequenceFile.Reader(fs, names[i], conf);
     }
     return parts;
   }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileRecordReader.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileRecordReader.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileRecordReader.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileRecordReader.java Tue Apr 18 10:05:31 2006
@@ -33,7 +33,7 @@
   public SequenceFileRecordReader(Configuration conf, FileSplit split)
     throws IOException {
     FileSystem fs = FileSystem.get(conf);
-    this.in = new SequenceFile.Reader(fs, split.getFile().toString(), conf);
+    this.in = new SequenceFile.Reader(fs, split.getPath(), conf);
     this.end = split.getStart() + split.getLength();
 
     if (split.getStart() > in.getPosition())

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java Tue Apr 18 10:05:31 2006
@@ -372,7 +372,7 @@
             try {
                 if (isMapTask()) {
                     if (hints == null) {
-                        hints = job.getFileCacheHints(getTIPId(), split.getFile(), split.getStart(), split.getLength());
+                        hints = job.getFileCacheHints(getTIPId(), split.getPath(), split.getStart(), split.getLength());
                     }
                     if (hints != null) {
                         for (int i = 0; i < hints.length; i++) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java Tue Apr 18 10:05:31 2006
@@ -379,7 +379,7 @@
             this.task = task;
             this.lastProgressReport = System.currentTimeMillis();
             this.jobConf = new JobConf(conf);
-            this.jobConf.deleteLocalFiles(SUBDIR + File.separator + task.getTaskId());
+            this.jobConf.deleteLocalFiles(SUBDIR + "/" + task.getTaskId());
             localizeTask(task);
         }
 
@@ -388,23 +388,23 @@
          * So here, edit the Task's fields appropriately.
          */
         void localizeTask(Task t) throws IOException {
-            File localJobFile =
-              this.jobConf.getLocalFile(SUBDIR+File.separator+t.getTaskId(), "job.xml");
-            File localJarFile =
-              this.jobConf.getLocalFile(SUBDIR+File.separator+t.getTaskId(), "job.jar");
+            Path localJobFile =
+              this.jobConf.getLocalPath(SUBDIR+"/"+t.getTaskId()+"/"+"job.xml");
+            Path localJarFile =
+              this.jobConf.getLocalPath(SUBDIR+"/"+t.getTaskId()+"/"+"job.jar");
 
             String jobFile = t.getJobFile();
-            fs.copyToLocalFile(new File(jobFile), localJobFile);
-            t.setJobFile(localJobFile.getCanonicalPath());
+            fs.copyToLocalFile(new Path(jobFile), localJobFile);
+            t.setJobFile(localJobFile.toString());
 
             JobConf jc = new JobConf(localJobFile);
             String jarFile = jc.getJar();
             if (jarFile != null) {
-              fs.copyToLocalFile(new File(jarFile), localJarFile);
-              jc.setJar(localJarFile.getCanonicalPath());
+              fs.copyToLocalFile(new Path(jarFile), localJarFile);
+              jc.setJar(localJarFile.toString());
 
-              BufferedOutputStream out =
-                new BufferedOutputStream(new FileOutputStream(localJobFile));
+              FileSystem localFs = FileSystem.getNamed("local", fConf);
+              OutputStream out = localFs.create(localJobFile);
               try {
                 jc.write(out);
               } finally {
@@ -569,7 +569,7 @@
                 runner.close();
             } catch (IOException ie) {
             }
-            this.jobConf.deleteLocalFiles(SUBDIR + File.separator + task.getTaskId());
+            this.jobConf.deleteLocalFiles(SUBDIR + "/" + task.getTaskId());
         }
     }
 
@@ -695,18 +695,14 @@
           Task task = umbilical.getTask(taskid);
           JobConf job = new JobConf(task.getJobFile());
 
-          conf.addFinalResource(new File(task.getJobFile()));
+          conf.addFinalResource(new Path(task.getJobFile()));
 
           startPinging(umbilical, taskid);        // start pinging parent
 
           try {
-              // If the user set a working directory, use it
-              String workDir = job.getWorkingDirectory();
-              if (workDir != null) {
-                FileSystem file_sys = FileSystem.get(job);
-                file_sys.setWorkingDirectory(new File(workDir));
-              }
-              task.run(job, umbilical);           // run the task
+            // use job-specified working directory
+            FileSystem.get(job).setWorkingDirectory(job.getWorkingDirectory());
+            task.run(job, umbilical);             // run the task
           } catch (FSError e) {
             LOG.log(Level.SEVERE, "FSError from child", e);
             umbilical.fsError(e.getMessage());

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextInputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextInputFormat.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextInputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextInputFormat.java Tue Apr 18 10:05:31 2006
@@ -40,7 +40,7 @@
     final long end = start + split.getLength();
 
     // open the file and seek to the start of the split
-    final FSDataInputStream in = fs.open(split.getFile());
+    final FSDataInputStream in = fs.open(split.getPath());
     
     if (start != 0) {
       in.seek(start-1);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java Tue Apr 18 10:05:31 2006
@@ -17,9 +17,9 @@
 package org.apache.hadoop.mapred;
 
 import java.io.IOException;
-import java.io.File;
 
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FSDataOutputStream;
 
 import org.apache.hadoop.io.WritableComparable;
@@ -31,7 +31,7 @@
   public RecordWriter getRecordWriter(FileSystem fs, JobConf job,
                                       String name) throws IOException {
 
-    File file = new File(job.getOutputDir(), name);
+    Path file = new Path(job.getOutputPath(), name);
 
     final FSDataOutputStream out = fs.create(file);
 

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java Tue Apr 18 10:05:31 2006
@@ -135,7 +135,7 @@
   protected void prepareTempFileSpace() {
     if (baseDir.exists()) {
       try { // start from a blank slate
-        FileUtil.fullyDelete(baseDir, conf);
+        FileUtil.fullyDelete(baseDir);
       } catch (Exception ignored) {
       }
     }

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestLocalDFS.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestLocalDFS.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestLocalDFS.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestLocalDFS.java Tue Apr 18 10:05:31 2006
@@ -4,6 +4,7 @@
 import java.io.*;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 
 /**
  * This class tests the DFS class via the FileSystem interface in a single node
@@ -12,13 +13,13 @@
  */
 public class TestLocalDFS extends TestCase {
 
-  private void writeFile(FileSystem fileSys, File name) throws IOException {
+  private void writeFile(FileSystem fileSys, Path name) throws IOException {
     DataOutputStream stm = fileSys.create(name);
     stm.writeBytes("oom");
     stm.close();
   }
   
-  private void readFile(FileSystem fileSys, File name) throws IOException {
+  private void readFile(FileSystem fileSys, Path name) throws IOException {
     DataInputStream stm = fileSys.open(name);
     byte[] buffer = new byte[4];
     int bytesRead = stm.read(buffer, 0 ,4);
@@ -26,7 +27,7 @@
     stm.close();
   }
   
-  private void cleanupFile(FileSystem fileSys, File name) throws IOException {
+  private void cleanupFile(FileSystem fileSys, Path name) throws IOException {
     assertTrue(fileSys.exists(name));
     fileSys.delete(name);
     assertTrue(!fileSys.exists(name));
@@ -40,22 +41,22 @@
     MiniDFSCluster cluster = new MiniDFSCluster(65312, conf);
     FileSystem fileSys = cluster.getFileSystem();
     try {
-      File orig_path = fileSys.getWorkingDirectory();
+      Path orig_path = fileSys.getWorkingDirectory();
       assertTrue(orig_path.isAbsolute());
-      File file1 = new File("somewhat/random.txt");
+      Path file1 = new Path("somewhat/random.txt");
       writeFile(fileSys, file1);
-      assertTrue(fileSys.exists(new File(orig_path, file1.getPath())));
+      assertTrue(fileSys.exists(new Path(orig_path, file1.toString())));
       fileSys.delete(file1);
-      File subdir1 = new File("/somewhere").getAbsoluteFile();
+      Path subdir1 = new Path("/somewhere");
       fileSys.setWorkingDirectory(subdir1);
       writeFile(fileSys, file1);
-      cleanupFile(fileSys, new File(subdir1, file1.getPath()));
-      File subdir2 = new File("else");
+      cleanupFile(fileSys, new Path(subdir1, file1.toString()));
+      Path subdir2 = new Path("else");
       fileSys.setWorkingDirectory(subdir2);
       writeFile(fileSys, file1);
       readFile(fileSys, file1);
-      cleanupFile(fileSys, new File(new File(subdir1, subdir2.getPath()),
-                                     file1.getPath()));
+      cleanupFile(fileSys, new Path(new Path(subdir1, subdir2.toString()),
+                                     file1.toString()));
     } finally {
       fileSys.close();
       cluster.shutdown();

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java Tue Apr 18 10:05:31 2006
@@ -36,10 +36,10 @@
   private static final int SEEKS_PER_FILE = 4;
 
   private static String ROOT = System.getProperty("test.build.data","fs_test");
-  private static File CONTROL_DIR = new File(ROOT, "fs_control");
-  private static File WRITE_DIR = new File(ROOT, "fs_write");
-  private static File READ_DIR = new File(ROOT, "fs_read");
-  private static File DATA_DIR = new File(ROOT, "fs_data");
+  private static Path CONTROL_DIR = new Path(ROOT, "fs_control");
+  private static Path WRITE_DIR = new Path(ROOT, "fs_write");
+  private static Path READ_DIR = new Path(ROOT, "fs_read");
+  private static Path DATA_DIR = new Path(ROOT, "fs_data");
 
   public void testFs() throws Exception {
     testFs(10 * MEGA, 100, 0);
@@ -67,13 +67,12 @@
 
     LOG.info("creating control file: "+megaBytes+" bytes, "+numFiles+" files");
 
-    File controlFile = new File(CONTROL_DIR, "files");
+    Path controlFile = new Path(CONTROL_DIR, "files");
     fs.delete(controlFile);
     Random random = new Random(seed);
 
     SequenceFile.Writer writer =
-      new SequenceFile.Writer(fs, controlFile.toString(),
-                              UTF8.class, LongWritable.class);
+      new SequenceFile.Writer(fs, controlFile, UTF8.class, LongWritable.class);
 
     long totalSize = 0;
     long maxSize = ((megaBytes / numFiles) * 2) + 1;
@@ -135,7 +134,7 @@
       reporter.setStatus("creating " + name);
 
       // write to temp file initially to permit parallel execution
-      File tempFile = new File(DATA_DIR, name+suffix);
+      Path tempFile = new Path(DATA_DIR, name+suffix);
       OutputStream out = fs.create(tempFile);
 
       long written = 0;
@@ -156,7 +155,7 @@
         out.close();
       }
       // rename to final location
-      fs.rename(tempFile, new File(DATA_DIR, name));
+      fs.rename(tempFile, new Path(DATA_DIR, name));
 
       collector.collect(new UTF8("bytes"), new LongWritable(written));
 
@@ -177,7 +176,7 @@
     JobConf job = new JobConf(conf);
     job.setBoolean("fs.test.fastCheck", fastCheck);
 
-    job.setInputDir(CONTROL_DIR);
+    job.setInputPath(CONTROL_DIR);
     job.setInputFormat(SequenceFileInputFormat.class);
     job.setInputKeyClass(UTF8.class);
     job.setInputValueClass(LongWritable.class);
@@ -185,7 +184,7 @@
     job.setMapperClass(WriteMapper.class);
     job.setReducerClass(LongSumReducer.class);
 
-    job.setOutputDir(WRITE_DIR);
+    job.setOutputPath(WRITE_DIR);
     job.setOutputKeyClass(UTF8.class);
     job.setOutputValueClass(LongWritable.class);
     job.setNumReduceTasks(1);
@@ -227,7 +226,7 @@
       reporter.setStatus("opening " + name);
 
       DataInputStream in =
-        new DataInputStream(fs.open(new File(DATA_DIR, name)));
+        new DataInputStream(fs.open(new Path(DATA_DIR, name)));
 
       long read = 0;
       try {
@@ -273,7 +272,7 @@
     job.setBoolean("fs.test.fastCheck", fastCheck);
 
 
-    job.setInputDir(CONTROL_DIR);
+    job.setInputPath(CONTROL_DIR);
     job.setInputFormat(SequenceFileInputFormat.class);
     job.setInputKeyClass(UTF8.class);
     job.setInputValueClass(LongWritable.class);
@@ -281,7 +280,7 @@
     job.setMapperClass(ReadMapper.class);
     job.setReducerClass(LongSumReducer.class);
 
-    job.setOutputDir(READ_DIR);
+    job.setOutputPath(READ_DIR);
     job.setOutputKeyClass(UTF8.class);
     job.setOutputValueClass(LongWritable.class);
     job.setNumReduceTasks(1);
@@ -323,7 +322,7 @@
 
       reporter.setStatus("opening " + name);
 
-      FSDataInputStream in = fs.open(new File(DATA_DIR, name));
+      FSDataInputStream in = fs.open(new Path(DATA_DIR, name));
         
       try {
         for (int i = 0; i < SEEKS_PER_FILE; i++) {
@@ -368,7 +367,7 @@
     JobConf job = new JobConf(conf);
     job.setBoolean("fs.test.fastCheck", fastCheck);
 
-    job.setInputDir(CONTROL_DIR);
+    job.setInputPath(CONTROL_DIR);
     job.setInputFormat(SequenceFileInputFormat.class);
     job.setInputKeyClass(UTF8.class);
     job.setInputValueClass(LongWritable.class);
@@ -376,7 +375,7 @@
     job.setMapperClass(SeekMapper.class);
     job.setReducerClass(LongSumReducer.class);
 
-    job.setOutputDir(READ_DIR);
+    job.setOutputPath(READ_DIR);
     job.setOutputKeyClass(UTF8.class);
     job.setOutputValueClass(LongWritable.class);
     job.setNumReduceTasks(1);

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestLocalFileSystem.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestLocalFileSystem.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestLocalFileSystem.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestLocalFileSystem.java Tue Apr 18 10:05:31 2006
@@ -10,13 +10,13 @@
  */
 public class TestLocalFileSystem extends TestCase {
 
-  private void writeFile(FileSystem fs, File name) throws IOException {
+  private void writeFile(FileSystem fs, Path name) throws IOException {
     FSDataOutputStream stm = fs.create(name);
     stm.writeBytes("42\n");
     stm.close();
   }
   
-  private void cleanupFile(FileSystem fs, File name) throws IOException {
+  private void cleanupFile(FileSystem fs, Path name) throws IOException {
     assertTrue(fs.exists(name));
     fs.delete(name);
     assertTrue(!fs.exists(name));
@@ -28,9 +28,8 @@
   public void testWorkingDirectory() throws IOException {
     Configuration conf = new Configuration();
     FileSystem fileSys = FileSystem.getNamed("local", conf);
-    File origDir = fileSys.getWorkingDirectory();
-    File subdir = new File("build/test/data/work-dir/new subdir");
-    File subdirAbsolute = subdir.getAbsoluteFile();
+    Path origDir = fileSys.getWorkingDirectory();
+    Path subdir = new Path("build/test/data/work-dir/new subdir");
     try {
       // make sure it doesn't already exist
       assertTrue(!fileSys.exists(subdir));
@@ -41,33 +40,29 @@
       fileSys.setWorkingDirectory(subdir);
       
       // create a directory and check for it
-      File dir1 = new File("dir1");
-      File dir1Absolute = new File(subdirAbsolute, dir1.getPath());
+      Path dir1 = new Path("dir1");
       fileSys.mkdirs(dir1);
       assertTrue(fileSys.isDirectory(dir1));
-      assertTrue(fileSys.isDirectory(dir1Absolute));
       
       // delete the directory and make sure it went away
       fileSys.delete(dir1);
       assertTrue(!fileSys.exists(dir1));
-      assertTrue(!fileSys.exists(dir1Absolute));
       
       // create files and manipulate them.
-      File file1 = new File("file1");
-      File file2 = new File("sub/file2");
-      File file2_abs = new File(subdirAbsolute, file2.getPath());
+      Path file1 = new Path("file1");
+      Path file2 = new Path("sub/file2");
       writeFile(fileSys, file1);
       fileSys.copyFromLocalFile(file1, file2);
       assertTrue(fileSys.exists(file1));
       assertTrue(fileSys.isFile(file1));
-      cleanupFile(fileSys, file2_abs);
+      cleanupFile(fileSys, file2);
       fileSys.copyToLocalFile(file1, file2);
-      cleanupFile(fileSys, file2_abs);
+      cleanupFile(fileSys, file2);
       
       // try a rename
       fileSys.rename(file1, file2);
       assertTrue(!fileSys.exists(file1));
-      assertTrue(fileSys.exists(file2_abs));
+      assertTrue(fileSys.exists(file2));
       fileSys.rename(file2, file1);
       
       // try reading a file

Added: lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestPath.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestPath.java?rev=394984&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestPath.java (added)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestPath.java Tue Apr 18 10:05:31 2006
@@ -0,0 +1,97 @@
+/**
+ * Copyright 2006 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs;
+
+import java.util.*;
+import junit.framework.TestCase;
+
+public class TestPath extends TestCase {
+  public void testToString() {
+    toStringTest("/");
+    toStringTest("/foo");
+    toStringTest("/foo/bar");
+    toStringTest("foo");
+    toStringTest("foo/bar");
+    toStringTest("");
+    if (Path.WINDOWS) {
+      toStringTest("c:");
+      toStringTest("c:/");
+      toStringTest("c:foo");
+      toStringTest("c:foo/bar");
+      toStringTest("c:foo/bar");
+      toStringTest("c:/foo/bar");
+    }
+  }
+
+  private void toStringTest(String pathString) {
+    assertEquals(pathString, new Path(pathString).toString());
+  }
+
+  public void testNormalize() {
+    assertEquals("/", new Path("//").toString());
+    assertEquals("/foo", new Path("/foo/").toString());
+    assertEquals("/foo", new Path("/foo/").toString());
+    assertEquals("foo", new Path("foo/").toString());
+    assertEquals("foo", new Path("foo//").toString());
+    assertEquals("foo/bar", new Path("foo//bar").toString());
+    if (Path.WINDOWS) {
+      assertEquals("c:/a/b", new Path("c:\\a\\b").toString());
+    }
+  }
+
+  public void testIsAbsolute() {
+    assertTrue(new Path("/").isAbsolute());
+    assertTrue(new Path("/foo").isAbsolute());
+    assertFalse(new Path("foo").isAbsolute());
+    assertFalse(new Path("foo/bar").isAbsolute());
+    assertFalse(new Path("").isAbsolute());
+    if (Path.WINDOWS) {
+      assertTrue(new Path("c:/a/b").isAbsolute());
+      assertFalse(new Path("c:a/b").isAbsolute());
+    }
+  }
+
+  public void testParent() {
+    assertEquals(new Path("/foo"), new Path("/foo/bar").getParent());
+    assertEquals(new Path("foo"), new Path("foo/bar").getParent());
+    assertEquals(new Path("/"), new Path("/foo").getParent());
+    if (Path.WINDOWS) {
+      assertEquals(new Path("c:/"), new Path("c:/foo").getParent());
+    }
+  }
+
+  public void testChild() {
+    assertEquals(new Path(""), new Path("", ""));
+    assertEquals(new Path("/"), new Path("/", ""));
+    assertEquals(new Path("/"), new Path("", "/"));
+    assertEquals(new Path("/foo"), new Path("/", "foo"));
+    assertEquals(new Path("/foo/bar"), new Path("/foo", "bar"));
+    assertEquals(new Path("/foo/bar/baz"), new Path("/foo/bar", "baz"));
+    assertEquals(new Path("/foo/bar/baz"), new Path("/foo", "bar/baz"));
+    assertEquals(new Path("foo"), new Path("", "foo"));
+    assertEquals(new Path("foo/bar"), new Path("foo", "bar"));
+    assertEquals(new Path("foo/bar/baz"), new Path("foo", "bar/baz"));
+    assertEquals(new Path("foo/bar/baz"), new Path("foo/bar", "baz"));
+    assertEquals(new Path("/foo"), new Path("/bar", "/foo"));
+    if (Path.WINDOWS) {
+      assertEquals(new Path("c:/foo"), new Path("c:/bar", "/foo"));
+      assertEquals(new Path("c:/foo"), new Path("/bar", "c:/foo"));
+      assertEquals(new Path("c:/foo"), new Path("d:/bar", "c:/foo"));
+    }
+  }
+
+}

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java Tue Apr 18 10:05:31 2006
@@ -38,7 +38,7 @@
     int count = 1024 * 10;
     int megabytes = 1;
     int factor = 5;
-    String file = System.getProperty("test.build.data",".") + "/test.seq";
+    Path file = new Path(System.getProperty("test.build.data",".")+"/test.seq");
  
     int seed = new Random().nextInt();
 
@@ -65,9 +65,9 @@
   }
 
   private static void writeTest(FileSystem fs, int count, int seed,
-                                String file, boolean compress)
+                                Path file, boolean compress)
     throws IOException {
-    new File(file).delete();
+    fs.delete(file);
     LOG.fine("creating with " + count + " records");
     SequenceFile.Writer writer =
       new SequenceFile.Writer(fs, file, RandomDatum.class, RandomDatum.class,
@@ -83,7 +83,7 @@
     writer.close();
   }
 
-  private static void readTest(FileSystem fs, int count, int seed, String file)
+  private static void readTest(FileSystem fs, int count, int seed, Path file)
     throws IOException {
     RandomDatum k = new RandomDatum();
     RandomDatum v = new RandomDatum();
@@ -107,16 +107,16 @@
 
 
   private static void sortTest(FileSystem fs, int count, int megabytes, 
-                               int factor, boolean fast, String file)
+                               int factor, boolean fast, Path file)
     throws IOException {
-    new File(file+".sorted").delete();
+    fs.delete(new Path(file+".sorted"));
     SequenceFile.Sorter sorter = newSorter(fs, fast, megabytes, factor);
     LOG.fine("sorting " + count + " records");
-    sorter.sort(file, file+".sorted");
+    sorter.sort(file, file.suffix(".sorted"));
     LOG.fine("done sorting " + count + " records");
   }
 
-  private static void checkSort(FileSystem fs, int count, int seed, String file)
+  private static void checkSort(FileSystem fs, int count, int seed, Path file)
     throws IOException {
     LOG.fine("sorting " + count + " records in memory for check");
     RandomDatum.Generator generator = new RandomDatum.Generator(seed);
@@ -132,7 +132,8 @@
     RandomDatum k = new RandomDatum();
     RandomDatum v = new RandomDatum();
     Iterator iterator = map.entrySet().iterator();
-    SequenceFile.Reader reader = new SequenceFile.Reader(fs, file + ".sorted", conf);
+    SequenceFile.Reader reader =
+      new SequenceFile.Reader(fs, file.suffix(".sorted"), conf);
     for (int i = 0; i < count; i++) {
       Map.Entry entry = (Map.Entry)iterator.next();
       RandomDatum key = (RandomDatum)entry.getKey();
@@ -151,21 +152,21 @@
   }
 
   private static void mergeTest(FileSystem fs, int count, int seed, 
-                                String file, boolean fast, int factor, 
+                                Path file, boolean fast, int factor, 
                                 int megabytes)
     throws IOException {
 
     LOG.fine("creating "+factor+" files with "+count/factor+" records");
 
     SequenceFile.Writer[] writers = new SequenceFile.Writer[factor];
-    String[] names = new String[factor];
-    String[] sortedNames = new String[factor];
+    Path[] names = new Path[factor];
+    Path[] sortedNames = new Path[factor];
     
     for (int i = 0; i < factor; i++) {
-      names[i] = file+"."+i;
-      sortedNames[i] = names[i] + ".sorted";
-      fs.delete(new File(names[i]));
-      fs.delete(new File(sortedNames[i]));
+      names[i] = file.suffix("."+i);
+      sortedNames[i] = names[i].suffix(".sorted");
+      fs.delete(names[i]);
+      fs.delete(sortedNames[i]);
       writers[i] =
         new SequenceFile.Writer(fs, names[i], RandomDatum.class,RandomDatum.class);
     }
@@ -189,8 +190,9 @@
     }
 
     LOG.fine("merging " + factor + " files with " + count/factor + " records");
-    fs.delete(new File(file+".sorted"));
-    newSorter(fs, fast, megabytes, factor).merge(sortedNames, file+".sorted");
+    fs.delete(new Path(file+".sorted"));
+    newSorter(fs, fast, megabytes, factor)
+      .merge(sortedNames, file.suffix(".sorted"));
   }
 
   private static SequenceFile.Sorter newSorter(FileSystem fs, 
@@ -216,7 +218,7 @@
     boolean fast = false;
     boolean merge = false;
     boolean compress = false;
-    String file = null;
+    Path file = null;
     String usage = "Usage: SequenceFile (-local | -dfs <namenode:port>) [-count N] [-megabytes M] [-factor F] [-nocreate] [-check] [-fast] [-merge] [-compress] file";
     
     if (args.length == 0) {
@@ -247,7 +249,7 @@
               compress = true;
           } else {
               // file is required parameter
-              file = args[i];
+              file = new Path(args[i]);
           }
         }
         LOG.info("count = " + count);



Mime
View raw message