hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r394984 [1/3] - in /lucene/hadoop/trunk: ./ conf/ src/examples/org/apache/hadoop/examples/ src/java/org/apache/hadoop/conf/ src/java/org/apache/hadoop/dfs/ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/io/ src/java/org/apache/ha...
Date Tue, 18 Apr 2006 17:05:34 GMT
Author: cutting
Date: Tue Apr 18 10:05:31 2006
New Revision: 394984

URL: http://svn.apache.org/viewcvs?rev=394984&view=rev
Log:
Fixed HADOOP-129.  Replaced uses of java.io.File in FileSystem API with a new class named Path.  Also dfs.local.dir and mapred.local.dir may no longer be space-separated, but must now be comma-separated lists of directories.

Added:
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DfsPath.java
      - copied, changed from r394756, lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSFile.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Path.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/PathFilter.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestPath.java
Removed:
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSFile.java
Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/conf/hadoop-default.xml
    lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Grep.java
    lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSFileInfo.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDataset.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FSDataInputStream.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FSDataOutputStream.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileSystem.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/LocalFileSystem.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/InputFormatBase.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/LocalJobRunner.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapFileOutputFormat.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapOutputFile.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/OutputFormatBase.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFormat.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileRecordReader.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextInputFormat.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestLocalDFS.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestLocalFileSystem.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MapredLoadTest.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestTextInputFormat.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/CHANGES.txt?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Tue Apr 18 10:05:31 2006
@@ -21,6 +21,19 @@
 
  6. Fix HADOOP-128.  Improved DFS error handling. (Owen O'Malley via cutting)
 
+ 7. Fix HADOOP-129.  Replace uses of java.io.File with new class named
+    Path.  This fixes bugs where java.io.File methods were called
+    directly when FileSystem methods were desired, and reduces the
+    likelihood of such bugs in the future.  It also makes the handling
+    of pathnames more consistent between local and dfs FileSystems and
+    between Windows and Unix. java.io.File-based methods are still
+    available for back-compatibility, but are deprecated and will be
+    removed once 0.2 is released. (cutting)
+
+ 8. Change dfs.data.dir and mapred.local.dir to be comma-separated
+    lists of directories, no longer be space-separated. This fixes
+    several bugs on Windows. (cutting)
+
 
 Release 0.1.1 - 2006-04-08
 

Modified: lucene/hadoop/trunk/conf/hadoop-default.xml
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/conf/hadoop-default.xml?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/conf/hadoop-default.xml (original)
+++ lucene/hadoop/trunk/conf/hadoop-default.xml Tue Apr 18 10:05:31 2006
@@ -84,7 +84,7 @@
   <name>dfs.data.dir</name>
   <value>/tmp/hadoop/dfs/data</value>
   <description>Determines where on the local filesystem an DFS data node
-  should store its blocks.  If this is a comma- or space-delimited
+  should store its blocks.  If this is a comma-delimited
   list of directories, then data will be stored in all named
   directories, typically on different devices.</description>
 </property>
@@ -164,7 +164,7 @@
   <name>mapred.local.dir</name>
   <value>/tmp/hadoop/mapred/local</value>
   <description>The local directory where MapReduce stores intermediate
-  data files.  May be a space- or comma- separated list of
+  data files.  May be a comma-separated list of
   directories on different devices in order to spread disk i/o.
   </description>
 </property>

Modified: lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Grep.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Grep.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Grep.java (original)
+++ lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Grep.java Tue Apr 18 10:05:31 2006
@@ -29,7 +29,8 @@
 
 import org.apache.hadoop.conf.Configuration;
 
-import java.io.File;
+import org.apache.hadoop.fs.Path;
+
 import java.util.Random;
 
 /* Extracts matching regexs from input files and counts them. */
@@ -44,14 +45,14 @@
 
     Configuration defaults = new Configuration();
 
-    File tempDir =
-      new File("grep-temp-"+
+    Path tempDir =
+      new Path("grep-temp-"+
                Integer.toString(new Random().nextInt(Integer.MAX_VALUE)));
 
     JobConf grepJob = new JobConf(defaults, Grep.class);
     grepJob.setJobName("grep-search");
 
-    grepJob.setInputDir(new File(args[0]));
+    grepJob.setInputPath(new Path(args[0]));
 
     grepJob.setMapperClass(RegexMapper.class);
     grepJob.set("mapred.mapper.regex", args[2]);
@@ -61,7 +62,7 @@
     grepJob.setCombinerClass(LongSumReducer.class);
     grepJob.setReducerClass(LongSumReducer.class);
 
-    grepJob.setOutputDir(tempDir);
+    grepJob.setOutputPath(tempDir);
     grepJob.setOutputFormat(SequenceFileOutputFormat.class);
     grepJob.setOutputKeyClass(UTF8.class);
     grepJob.setOutputValueClass(LongWritable.class);
@@ -71,7 +72,7 @@
     JobConf sortJob = new JobConf(defaults, Grep.class);
     sortJob.setJobName("grep-sort");
 
-    sortJob.setInputDir(tempDir);
+    sortJob.setInputPath(tempDir);
     sortJob.setInputFormat(SequenceFileInputFormat.class);
     sortJob.setInputKeyClass(UTF8.class);
     sortJob.setInputValueClass(LongWritable.class);
@@ -79,7 +80,7 @@
     sortJob.setMapperClass(InverseMapper.class);
 
     sortJob.setNumReduceTasks(1);                 // write a single file
-    sortJob.setOutputDir(new File(args[1]));
+    sortJob.setOutputPath(new Path(args[1]));
     sortJob.setOutputKeyComparatorClass           // sort by decreasing freq
       (LongWritable.DecreasingComparator.class);
 

Modified: lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java (original)
+++ lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java Tue Apr 18 10:05:31 2006
@@ -20,6 +20,7 @@
 import java.util.*;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.UTF8;
 import org.apache.hadoop.io.Writable;
@@ -134,8 +135,8 @@
           other_args.size() + " instead of 2.");
       printUsage();
     }
-    conf.setInputDir(new File((String) other_args.get(0)));
-    conf.setOutputDir(new File((String) other_args.get(1)));
+    conf.setInputPath(new Path((String) other_args.get(0)));
+    conf.setOutputPath(new Path((String) other_args.get(1)));
     
     // Uncomment to run locally in a single process
     // conf.set("mapred.job.tracker", "local");

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java Tue Apr 18 10:05:31 2006
@@ -30,14 +30,16 @@
 import javax.xml.transform.stream.StreamResult;
 
 import org.apache.hadoop.util.LogFormatter;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 
 /** Provides access to configuration parameters.  Configurations are specified
  * by resources.  A resource contains a set of name/value pairs.
  *
- * <p>Each resources is named by either a String or by a File.  If named by a
+ * <p>Each resources is named by either a String or by a Path.  If named by a
  * String, then the classpath is examined for a file with that name.  If a
- * File, then the filesystem is examined directly, without referring to the
- * CLASSPATH.
+ * File, then the local filesystem is examined directly, without referring to
+ * the CLASSPATH.
  *
  * <p>Configuration resources are of two types: default and
  * final.  Default values are loaded first and final values are loaded last, and
@@ -78,7 +80,7 @@
   }
 
   /** Add a default resource. */
-  public void addDefaultResource(File file) {
+  public void addDefaultResource(Path file) {
     addResource(defaultResources, file);
   }
 
@@ -88,7 +90,7 @@
   }
 
   /** Add a final resource. */
-  public void addFinalResource(File file) {
+  public void addFinalResource(Path file) {
     addResource(finalResources, file);
   }
 
@@ -210,13 +212,13 @@
 
   /** Returns the value of the <code>name</code> property as an array of
    * strings.  If no such property is specified, then <code>null</code>
-   * is returned.  Values are whitespace or comma delimted.
+   * is returned.  Values are comma delimited.
    */
   public String[] getStrings(String name) {
     String valueString = get(name);
     if (valueString == null)
       return null;
-    StringTokenizer tokenizer = new StringTokenizer (valueString,", \t\n\r\f");
+    StringTokenizer tokenizer = new StringTokenizer (valueString,",");
     List values = new ArrayList();
     while (tokenizer.hasMoreTokens()) {
       values.add(tokenizer.nextToken());
@@ -263,17 +265,39 @@
     set(propertyName, theClass.getName());
   }
 
-  /** Returns a file name under a directory named in <i>dirsProp</i> with the
-   * given <i>path</i>.  If <i>dirsProp</i> contains multiple directories, then
-   * one is chosen based on <i>path</i>'s hash code.  If the selected directory
-   * does not exist, an attempt is made to create it.
+  /** Returns a local file under a directory named in <i>dirsProp</i> with
+   * the given <i>path</i>.  If <i>dirsProp</i> contains multiple directories,
+   * then one is chosen based on <i>path</i>'s hash code.  If the selected
+   * directory does not exist, an attempt is made to create it.
    */
-  public File getFile(String dirsProp, String path) throws IOException {
+  public Path getLocalPath(String dirsProp, String path)
+    throws IOException {
     String[] dirs = getStrings(dirsProp);
     int hashCode = path.hashCode();
+    FileSystem fs = FileSystem.getNamed("local", this);
     for (int i = 0; i < dirs.length; i++) {  // try each local dir
       int index = (hashCode+i & Integer.MAX_VALUE) % dirs.length;
-      File file = new File(dirs[index], path).getAbsoluteFile();
+      Path file = new Path(dirs[index], path);
+      Path dir = file.getParent();
+      if (fs.exists(dir) || fs.mkdirs(dir)) {
+        return file;
+      }
+    }
+    throw new IOException("No valid local directories in property: "+dirsProp);
+  }
+
+  /** Returns a local file name under a directory named in <i>dirsProp</i> with
+   * the given <i>path</i>.  If <i>dirsProp</i> contains multiple directories,
+   * then one is chosen based on <i>path</i>'s hash code.  If the selected
+   * directory does not exist, an attempt is made to create it.
+   */
+  public File getFile(String dirsProp, String path)
+    throws IOException {
+    String[] dirs = getStrings(dirsProp);
+    int hashCode = path.hashCode();
+    for (int i = 0; i < dirs.length; i++) {  // try each local dir
+      int index = (hashCode+i & Integer.MAX_VALUE) % dirs.length;
+      File file = new File(dirs[index], path);
       File dir = file.getParentFile();
       if (dir.exists() || dir.mkdirs()) {
         return file;
@@ -283,6 +307,7 @@
   }
 
 
+
   /** Returns the URL for the named resource. */
   public URL getResource(String name) {
     return classLoader.getResource(name);
@@ -358,11 +383,17 @@
           LOG.info("parsing " + url);
           doc = builder.parse(url.toString());
         }
-      } else if (name instanceof File) {          // a file resource
-        File file = (File)name;
-        if (file.exists()) {
+      } else if (name instanceof Path) {          // a file resource
+        Path file = (Path)name;
+        FileSystem fs = FileSystem.getNamed("local", this);
+        if (fs.exists(file)) {
           LOG.info("parsing " + file);
-          doc = builder.parse(file);
+          InputStream in = new BufferedInputStream(fs.openRaw(file));
+          try {
+            doc = builder.parse(in);
+          } finally {
+            in.close();
+          }
         }
       }
 
@@ -466,8 +497,8 @@
         sb.append(" , ");
       }
       Object obj = i.next();
-      if (obj instanceof File) {
-        sb.append((File)obj);
+      if (obj instanceof Path) {
+        sb.append((Path)obj);
       } else {
         sb.append((String)obj);
       }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java Tue Apr 18 10:05:31 2006
@@ -157,7 +157,7 @@
 
     /**
      */
-    public DFSFileInfo[] listFiles(UTF8 src) throws IOException {
+    public DFSFileInfo[] listPaths(UTF8 src) throws IOException {
         return namenode.getListing(src.toString());
     }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSFileInfo.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSFileInfo.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSFileInfo.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSFileInfo.java Tue Apr 18 10:05:31 2006
@@ -16,6 +16,7 @@
 package org.apache.hadoop.dfs;
 
 import org.apache.hadoop.io.*;
+import org.apache.hadoop.fs.Path;
 
 import java.io.*;
 
@@ -68,13 +69,13 @@
     /**
      */
     public String getName() {
-        return new File(path.toString()).getName();
+        return new Path(path.toString()).getName();
     }
 
     /**
      */
     public String getParent() {
-        return DFSFile.getDFSParent(path.toString());
+      return new Path(path.toString()).getParent().toString();
     }
 
     /**

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java Tue Apr 18 10:05:31 2006
@@ -38,37 +38,37 @@
     /**
      * Add a local file to the indicated name in DFS. src is kept.
      */
-    void copyFromLocal(File src, String dstf) throws IOException {
-        fs.copyFromLocalFile(src, new File(dstf));
+    void copyFromLocal(Path src, String dstf) throws IOException {
+        fs.copyFromLocalFile(src, new Path(dstf));
     }
 
     /**
      * Add a local file to the indicated name in DFS. src is removed.
      */
-    void moveFromLocal(File src, String dstf) throws IOException {
-        fs.moveFromLocalFile(src, new File(dstf));
+    void moveFromLocal(Path src, String dstf) throws IOException {
+        fs.moveFromLocalFile(src, new Path(dstf));
     }
 
     /**
      * Obtain the indicated DFS file and copy to the local name.
      * srcf is kept.
      */
-    void copyToLocal(String srcf, File dst) throws IOException {
-        fs.copyToLocalFile(new File(srcf), dst);
+    void copyToLocal(String srcf, Path dst) throws IOException {
+        fs.copyToLocalFile(new Path(srcf), dst);
     }
 
     /**
      * Obtain the indicated DFS file and copy to the local name.
      * srcf is removed.
      */
-    void moveToLocal(String srcf, File dst) throws IOException {
+    void moveToLocal(String srcf, Path dst) throws IOException {
         System.err.println("Option '-moveToLocal' is not implemented yet.");
     }
 
     void cat(String srcf) throws IOException {
-      FSDataInputStream in = fs.open(new File(srcf));
+      FSDataInputStream in = fs.open(new Path(srcf));
       try {
-        DataInputStream din = new DataInputStream(new BufferedInputStream(in));
+        BufferedReader din = new BufferedReader(new InputStreamReader(in));
         String line;
         while((line = din.readLine()) != null) {
           System.out.println(line);      
@@ -82,7 +82,7 @@
      * Get a listing of all files in DFS at the indicated name
      */
     public void ls(String src, boolean recursive) throws IOException {
-        File items[] = fs.listFiles(new File(src));
+        Path items[] = fs.listPaths(new Path(src));
         if (items == null) {
             System.out.println("Could not get listing for " + src);
         } else {
@@ -90,10 +90,10 @@
             	System.out.println("Found " + items.length + " items");
             }
             for (int i = 0; i < items.length; i++) {
-                File cur = items[i];
-                System.out.println(cur.getPath() + "\t" + (cur.isDirectory() ? "<dir>" : ("" + cur.length())));
-                if(recursive && cur.isDirectory()) {
-									 ls(cur.getPath(), recursive);
+                Path cur = items[i];
+                System.out.println(cur + "\t" + (fs.isDirectory(cur) ? "<dir>" : ("" + fs.getLength(cur))));
+                if(recursive && fs.isDirectory(cur)) {
+                  ls(cur.toString(), recursive);
                 }
             }
         }
@@ -102,14 +102,14 @@
     /**
      */
     public void du(String src) throws IOException {
-        File items[] = fs.listFiles(new File(src));
+        Path items[] = fs.listPaths(new Path(src));
         if (items == null) {
             System.out.println("Could not get listing for " + src);
         } else {
             System.out.println("Found " + items.length + " items");
             for (int i = 0; i < items.length; i++) {
-                DFSFile cur = (DFSFile) items[i];
-                System.out.println(cur.getPath() + "\t" + cur.getContentsLength());
+                DfsPath cur = (DfsPath) items[i];
+                System.out.println(cur + "\t" + cur.getContentsLength());
             }
         }
     }
@@ -118,7 +118,7 @@
      * Create the given dir
      */
     public void mkdir(String src) throws IOException {
-        File f = new File(src);
+        Path f = new Path(src);
         fs.mkdirs(f);
     }
     
@@ -126,7 +126,7 @@
      * Rename an DFS file
      */
     public void rename(String srcf, String dstf) throws IOException {
-        if (fs.rename(new File(srcf), new File(dstf))) {
+        if (fs.rename(new Path(srcf), new Path(dstf))) {
             System.out.println("Renamed " + srcf + " to " + dstf);
         } else {
             System.out.println("Rename failed");
@@ -137,14 +137,14 @@
      * Copy an DFS file
      */
     public void copy(String srcf, String dstf, Configuration conf) throws IOException {
-      DistributedFileSystem.doCopy(fs, new File(srcf), fs, new File(dstf), true, conf);
+      FileUtil.copy(fs, new Path(srcf), fs, new Path(dstf), false, conf);
     }
 
     /**
      * Delete an DFS file
      */
     public void delete(String srcf) throws IOException {
-        if (fs.delete(new File(srcf))) {
+        if (fs.delete(new Path(srcf))) {
             System.out.println("Deleted " + srcf);
         } else {
             System.out.println("Delete failed");
@@ -228,15 +228,15 @@
             DFSShell tc = new DFSShell(fs);
 
             if ("-put".equals(cmd) || "-copyFromLocal".equals(cmd)) {
-                tc.copyFromLocal(new File(argv[i++]), argv[i++]);
+                tc.copyFromLocal(new Path(argv[i++]), argv[i++]);
             } else if ("-moveFromLocal".equals(cmd)) {
-                tc.moveFromLocal(new File(argv[i++]), argv[i++]);
+                tc.moveFromLocal(new Path(argv[i++]), argv[i++]);
             } else if ("-get".equals(cmd) || "-copyToLocal".equals(cmd)) {
-                tc.copyToLocal(argv[i++], new File(argv[i++]));
+                tc.copyToLocal(argv[i++], new Path(argv[i++]));
             } else if ("-cat".equals(cmd)) {
                 tc.cat(argv[i++]);
             } else if ("-moveToLocal".equals(cmd)) {
-                tc.moveToLocal(argv[i++], new File(argv[i++]));
+                tc.moveToLocal(argv[i++], new Path(argv[i++]));
             } else if ("-ls".equals(cmd)) {
                 String arg = i < argv.length ? argv[i++] : "";
                 tc.ls(arg, false);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java Tue Apr 18 10:05:31 2006
@@ -105,7 +105,7 @@
    * @throws Exception
    */
   public Result fsck(String path) throws Exception {
-    DFSFileInfo[] files = dfs.listFiles(new UTF8(path));
+    DFSFileInfo[] files = dfs.listPaths(new UTF8(path));
     Result res = new Result();
     res.setReplication(conf.getInt("dfs.replication", 3));
     for (int i = 0; i < files.length; i++) {
@@ -119,7 +119,7 @@
       if (showFiles)
         System.out.println(file.getPath() + " <dir>");
       res.totalDirs++;
-      DFSFileInfo[] files = dfs.listFiles(new UTF8(file.getPath()));
+      DFSFileInfo[] files = dfs.listPaths(new UTF8(file.getPath()));
       for (int i = 0; i < files.length; i++) {
         check(files[i], res);
       }

Copied: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DfsPath.java (from r394756, lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSFile.java)
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DfsPath.java?p2=lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DfsPath.java&p1=lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSFile.java&r1=394756&r2=394984&rev=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DfsPath.java Tue Apr 18 10:05:31 2006
@@ -17,53 +17,18 @@
 
 import java.io.*;
 
+import org.apache.hadoop.fs.Path;
 
-/*****************************************************************
- * DFSFile is a traditional java File that's been annotated with
- * some extra information.
- *
- * @author Mike Cafarella
- *****************************************************************/
-class DFSFile extends File {
+
+/** DfsPath is a Path that's been annotated with some extra information. */
+class DfsPath extends Path {
     DFSFileInfo info;
 
-    /** Separator used in DFS filenames. */
-    public static final String DFS_FILE_SEPARATOR = "/";
-    
-    /**
-     */
-    public DFSFile(DFSFileInfo info) {
+    public DfsPath(DFSFileInfo info) {
         super(info.getPath());
         this.info = info;
     }
 
-    /**
-     * A number of File methods are unsupported in this subclass
-     */
-    public boolean canRead() {
-        return false;
-    }
-    public boolean canWrite() {
-        return false;
-    }
-    public boolean createNewFile() {
-        return false;
-    }
-    public boolean delete() {
-        return false;
-    }
-    public void deleteOnExit() {
-    }
-    public boolean isHidden() {
-        return false;
-    }
-    public boolean isAbsolute() {
-        return true;
-    }
-
-    /**
-     * We need to reimplement some of them
-     */
     public boolean isDirectory() {
         return info.isDir();
     }
@@ -73,29 +38,7 @@
     public long length() {
         return info.getLen();
     }
-
-    /**
-     * And add a few extras
-     */
     public long getContentsLength() {
         return info.getContentsLen();
-    }
-    
-    /**
-     * Retrieving parent path from DFS path string
-     * @param path - DFS path 
-     * @return - parent path of DFS path, or null if no parent exist.
-     */
-    public static String getDFSParent(String path) {
-        if (path == null)
-            return null;
-        if (DFS_FILE_SEPARATOR.equals(path))
-            return null;
-        int index = path.lastIndexOf(DFS_FILE_SEPARATOR); 
-        if (index == -1)
-            return null;
-        if (index == 0)
-            return DFS_FILE_SEPARATOR;
-        return path.substring(0, index);
     }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java Tue Apr 18 10:05:31 2006
@@ -32,10 +32,11 @@
  * @author Mike Cafarella
  *****************************************************************/
 public class DistributedFileSystem extends FileSystem {
-    private File workingDir = 
-      new File("/user", System.getProperty("user.name")).getAbsoluteFile();
+    private Path workingDir = 
+      new Path("/user", System.getProperty("user.name"));
 
     private String name;
+    private FileSystem localFs;
 
     DFSClient dfs;
 
@@ -45,40 +46,40 @@
       super(conf);
       this.dfs = new DFSClient(namenode, conf);
       this.name = namenode.getHostName() + ":" + namenode.getPort();
+      this.localFs = getNamed("local", conf);
     }
 
     public String getName() { return name; }
 
-    public File getWorkingDirectory() {
+    public Path getWorkingDirectory() {
       return workingDir;
     }
     
-    private File makeAbsolute(File f) {
-      if (isAbsolute(f)) {
+    private Path makeAbsolute(Path f) {
+      if (f.isAbsolute()) {
         return f;
       } else {
-        return new File(workingDir, f.getPath());
+        return new Path(workingDir, f);
       }
     }
     
-    public void setWorkingDirectory(File dir) {
+    public void setWorkingDirectory(Path dir) {
       workingDir = makeAbsolute(dir);
     }
     
-    private UTF8 getPath(File file) {
-      String path = getDFSPath(makeAbsolute(file));
-      return new UTF8(path);
+    private UTF8 getPath(Path file) {
+      return new UTF8(makeAbsolute(file).toString());
     }
 
-    public String[][] getFileCacheHints(File f, long start, long len) throws IOException {
+    public String[][] getFileCacheHints(Path f, long start, long len) throws IOException {
       return dfs.getHints(getPath(f), start, len);
     }
 
-    public FSInputStream openRaw(File f) throws IOException {
+    public FSInputStream openRaw(Path f) throws IOException {
       return dfs.open(getPath(f));
     }
 
-    public FSOutputStream createRaw(File f, boolean overwrite, short replication)
+    public FSOutputStream createRaw(Path f, boolean overwrite, short replication)
       throws IOException {
       return dfs.create(getPath(f), overwrite, replication);
     }
@@ -86,161 +87,85 @@
     /**
      * Rename files/dirs
      */
-    public boolean renameRaw(File src, File dst) throws IOException {
+    public boolean renameRaw(Path src, Path dst) throws IOException {
       return dfs.rename(getPath(src), getPath(dst));
     }
 
     /**
-     * Get rid of File f, whether a true file or dir.
+     * Get rid of Path f, whether a true file or dir.
      */
-    public boolean deleteRaw(File f) throws IOException {
+    public boolean deleteRaw(Path f) throws IOException {
         return dfs.delete(getPath(f));
     }
 
-    public boolean exists(File f) throws IOException {
+    public boolean exists(Path f) throws IOException {
         return dfs.exists(getPath(f));
     }
 
-    public boolean isDirectory(File f) throws IOException {
-        if (f instanceof DFSFile) {
-          return ((DFSFile)f).isDirectory();
+    public boolean isDirectory(Path f) throws IOException {
+        if (f instanceof DfsPath) {
+          return ((DfsPath)f).isDirectory();
         }
         return dfs.isDirectory(getPath(f));
     }
 
-    public boolean isAbsolute(File f) {
-      return f.isAbsolute() ||
-        f.getPath().startsWith("/") ||
-        f.getPath().startsWith("\\");
-    }
-
-    public long getLength(File f) throws IOException {
-        if (f instanceof DFSFile) {
-          return ((DFSFile)f).length();
+    public long getLength(Path f) throws IOException {
+        if (f instanceof DfsPath) {
+          return ((DfsPath)f).length();
         }
 
-        DFSFileInfo info[] = dfs.listFiles(getPath(f));
+        DFSFileInfo info[] = dfs.listPaths(getPath(f));
         return info[0].getLen();
     }
 
-    public File[] listFilesRaw(File f) throws IOException {
-        DFSFileInfo info[] = dfs.listFiles(getPath(f));
+    public Path[] listPathsRaw(Path f) throws IOException {
+        DFSFileInfo info[] = dfs.listPaths(getPath(f));
         if (info == null) {
-            return new File[0];
+            return new Path[0];
         } else {
-            File results[] = new DFSFile[info.length];
+            Path results[] = new DfsPath[info.length];
             for (int i = 0; i < info.length; i++) {
-                results[i] = new DFSFile(info[i]);
+                results[i] = new DfsPath(info[i]);
             }
             return results;
         }
     }
 
-    public void mkdirs(File f) throws IOException {
-        dfs.mkdirs(getPath(f));
+    public boolean mkdirs(Path f) throws IOException {
+        return dfs.mkdirs(getPath(f));
     }
 
-    public void lock(File f, boolean shared) throws IOException {
+    public void lock(Path f, boolean shared) throws IOException {
         dfs.lock(getPath(f), ! shared);
     }
 
-    public void release(File f) throws IOException {
+    public void release(Path f) throws IOException {
         dfs.release(getPath(f));
     }
 
-    public void moveFromLocalFile(File src, File dst) throws IOException {
-        doFromLocalFile(src, dst, true);
+    public void moveFromLocalFile(Path src, Path dst) throws IOException {
+      FileUtil.copy(localFs, src, this, dst, true, getConf());
     }
 
-    public void copyFromLocalFile(File src, File dst) throws IOException {
-        doFromLocalFile(src, dst, false);
+    public void copyFromLocalFile(Path src, Path dst) throws IOException {
+      FileUtil.copy(localFs, src, this, dst, false, getConf());
     }
 
-    private void doFromLocalFile(File src, File dst, boolean deleteSource) throws IOException {
-        FileSystem localFs = getNamed("local", getConf());
-        doCopy( localFs, src, this, dst, deleteSource, getConf() );
-    }
-
-    public static void doCopy(FileSystem srcFS, 
-                        File src, 
-                        FileSystem dstFS, 
-                        File dst, 
-                        boolean deleteSource,
-                        Configuration conf
-                       ) throws IOException {
-        if (dstFS.exists(dst)) {
-            if (! dstFS.isDirectory(dst)) {
-                throw new IOException("Target " + dst + " already exists");
-            } else {
-                dst = new File(dst, src.getName());
-                if (dstFS.exists(dst)) {
-                    throw new IOException("Target " + dst + " already exists");
-                }
-            }
-        }
-
-        if (srcFS.isDirectory(src)) {
-            dstFS.mkdirs(dst);
-            File contents[] = srcFS.listFiles(src);
-            for (int i = 0; i < contents.length; i++) {
-                doCopy( srcFS, contents[i], dstFS, new File(dst, contents[i].getName()), deleteSource, conf);
-            }
-        } else {
-            byte buf[] = new byte[conf.getInt("io.file.buffer.size", 4096)];
-            InputStream in = srcFS.open(src);
-            try {
-                OutputStream out = dstFS.create(dst);
-                try {
-                    int bytesRead = in.read(buf);
-                    while (bytesRead >= 0) {
-                        out.write(buf, 0, bytesRead);
-                        bytesRead = in.read(buf);
-                    }
-                } finally {
-                    out.close();
-                }
-            } finally {
-                in.close();
-            } 
-        }
-        if (deleteSource)
-          srcFS.delete(src);
-    }
-
-    public void copyToLocalFile(File src, File dst) throws IOException {
-        dst = dst.getCanonicalFile();
-        FileSystem localFs = getNamed("local", getConf());
-        doCopy( this, src, localFs, dst, false, getConf() );
+    public void copyToLocalFile(Path src, Path dst) throws IOException {
+      FileUtil.copy(this, src, localFs, dst, false, getConf());
     }
 
-    public File startLocalOutput(File fsOutputFile, File tmpLocalFile) throws IOException {
-        if (exists(fsOutputFile)) {
-            copyToLocalFile(fsOutputFile, tmpLocalFile);
-        }
-        return tmpLocalFile;
+    public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
+      throws IOException {
+      return tmpLocalFile;
     }
 
     /**
      * Move completed local data to DFS destination
      */
-    public void completeLocalOutput(File fsOutputFile, File tmpLocalFile) throws IOException {
-        moveFromLocalFile(tmpLocalFile, fsOutputFile);
-    }
-
-    /**
-     * Fetch remote DFS file, place at tmpLocalFile
-     */
-    public File startLocalInput(File fsInputFile, File tmpLocalFile) throws IOException {
-        copyToLocalFile(fsInputFile, tmpLocalFile);
-        return tmpLocalFile;
-    }
-
-    /**
-     * We're done with the local stuff, so delete it
-     */
-    public void completeLocalInput(File localFile) throws IOException {
-        // Get rid of the local copy - we don't need it anymore.
-        FileUtil.fullyDelete(localFile, getConf());
+    public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile)
+      throws IOException {
+      moveFromLocalFile(tmpLocalFile, fsOutputFile);
     }
 
     public void close() throws IOException {
@@ -255,27 +180,7 @@
         return dfs;
     }
     
-    private String getDFSPath(File f) {
-      List l = new ArrayList();
-      l.add(f.getName());
-      File parent = f.getParentFile();
-      while (parent != null) {
-        l.add(parent.getName());
-        parent = parent.getParentFile();
-      }
-      StringBuffer path = new StringBuffer();
-      path.append(l.get(l.size() - 1));
-      for (int i = l.size() - 2; i >= 0; i--) {
-        path.append(DFSFile.DFS_FILE_SEPARATOR);
-        path.append(l.get(i));
-      }
-      if (isAbsolute(f) && path.length() == 0) {
-        path.append(DFSFile.DFS_FILE_SEPARATOR);
-      }
-      return path.toString();
-    }
-
-    public void reportChecksumFailure(File f, FSInputStream in,
+    public void reportChecksumFailure(Path f, FSInputStream in,
                                       long start, long length, int crc) {
       
       // ignore for now, causing task to fail, and hope that when task is
@@ -305,7 +210,7 @@
     /** Return the total size of all files in the filesystem.*/
     public long getUsed()throws IOException{
         long used = 0;
-        DFSFileInfo dfsFiles[] = dfs.listFiles(getPath(new File("/")));
+        DFSFileInfo dfsFiles[] = dfs.listPaths(getPath(new Path("/")));
         for(int i=0;i<dfsFiles.length;i++){
             used += dfsFiles[i].getContentsLen();
         }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDataset.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDataset.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDataset.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDataset.java Tue Apr 18 10:05:31 2006
@@ -199,7 +199,7 @@
         }
         this.tmp = new File(dir, "tmp");
         if (tmp.exists()) {
-            FileUtil.fullyDelete(tmp, conf);
+            FileUtil.fullyDelete(tmp);
         }
         this.tmp.mkdirs();
         this.dirTree = new FSDir(data);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java Tue Apr 18 10:05:31 2006
@@ -22,6 +22,7 @@
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.Path;
 
 /*************************************************
  * FSDirectory stores the filesystem directory state.
@@ -150,10 +151,10 @@
         INode addNode(String path, INode newNode) {
           File target = new File( path );
           // find parent
-          String parentName = DFSFile.getDFSParent(path);
-          if (parentName == null)
+          Path parent = new Path(path).getParent();
+          if (parent == null)
             return null;
-          INode parentNode = getNode(parentName);
+          INode parentNode = getNode(parent.toString());
           if (parentNode == null)
             return null;
           // check whether the parent already has a node with that name
@@ -308,7 +309,7 @@
         File image = new File(dir, "image");
         File edits = new File(dir, "edits");
 
-        if (!((!image.exists() || FileUtil.fullyDelete(image, conf)) &&
+        if (!((!image.exists() || FileUtil.fullyDelete(image)) &&
               (!edits.exists() || edits.delete()) &&
               image.mkdirs())) {
           
@@ -591,7 +592,7 @@
 
         // Always do an implicit mkdirs for parent directory tree
         String pathString = path.toString();
-        mkdirs(DFSFile.getDFSParent(pathString));
+        mkdirs(new Path(pathString).getParent().toString());
         INode newNode = new INode( new File(pathString).getName(), blocks, replication);
         if( ! unprotectedAddFile(path, newNode) )
           return false;
@@ -808,10 +809,10 @@
         v.add(src);
 
         // All its parents
-        String parent = DFSFile.getDFSParent(src);
+        Path parent = new Path(src).getParent();
         while (parent != null) {
-            v.add(parent);
-            parent = DFSFile.getDFSParent(parent);
+            v.add(parent.toString());
+            parent = parent.getParent();
         }
 
         // Now go backwards through list of dirs, creating along

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FSDataInputStream.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FSDataInputStream.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FSDataInputStream.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FSDataInputStream.java Tue Apr 18 10:05:31 2006
@@ -36,18 +36,18 @@
   /** Verify that data matches checksums. */
   private class Checker extends FilterInputStream implements Seekable {
     private FileSystem fs;
-    private File file;
+    private Path file;
     private FSDataInputStream sums;
     private Checksum sum = new CRC32();
     private int inSum;
 
-    public Checker(FileSystem fs, File file, Configuration conf)
+    public Checker(FileSystem fs, Path file, Configuration conf)
       throws IOException {
       super(fs.openRaw(file));
       
       this.fs = fs;
       this.file = file;
-      File sumFile = fs.getChecksumFile(file);
+      Path sumFile = fs.getChecksumFile(file);
       try {
         this.sums = new FSDataInputStream(fs.openRaw(sumFile), conf);
         byte[] version = new byte[VERSION.length];
@@ -214,14 +214,14 @@
 }
   
   
-  public FSDataInputStream(FileSystem fs, File file, int bufferSize, Configuration conf)
+  public FSDataInputStream(FileSystem fs, Path file, int bufferSize, Configuration conf)
       throws IOException {
     super(null);
     this.in = new Buffer(new PositionCache(new Checker(fs, file, conf)), bufferSize);
   }
   
   
-  public FSDataInputStream(FileSystem fs, File file, Configuration conf)
+  public FSDataInputStream(FileSystem fs, Path file, Configuration conf)
     throws IOException {
     super(null);
     int bufferSize = conf.getInt("io.file.buffer.size", 4096);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FSDataOutputStream.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FSDataOutputStream.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FSDataOutputStream.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FSDataOutputStream.java Tue Apr 18 10:05:31 2006
@@ -35,7 +35,7 @@
     private int bytesPerSum;
 
     public Summer(FileSystem fs, 
-                  File file, 
+                  Path file, 
                   boolean overwrite, 
                   short replication,
                   Configuration conf)
@@ -125,7 +125,7 @@
 
   }
 
-  public FSDataOutputStream(FileSystem fs, File file,
+  public FSDataOutputStream(FileSystem fs, Path file,
                             boolean overwrite, Configuration conf,
                             int bufferSize, short replication )
   throws IOException {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileSystem.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileSystem.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileSystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileSystem.java Tue Apr 18 10:05:31 2006
@@ -51,8 +51,6 @@
      * Parse the cmd-line args, starting at i.  Remove consumed args
      * from array.  We expect param in the form:
      * '-local | -dfs <namenode:port>'
-     *
-     * @deprecated use fs.default.name config option instead
      */
     public static FileSystem parseArgs(String argv[], int i, Configuration conf) throws IOException {
         /**
@@ -107,12 +105,12 @@
     }
 
     /** Return the name of the checksum file associated with a file.*/
-    public static File getChecksumFile(File file) {
-      return new File(file.getParentFile(), "."+file.getName()+".crc");
+    public static Path getChecksumFile(Path file) {
+      return new Path(file.getParent(), "."+file.getName()+".crc");
     }
 
     /** Return true iff file is a checksum file name.*/
-    public static boolean isChecksumFile(File file) {
+    public static boolean isChecksumFile(Path file) {
       String name = file.getName();
       return name.startsWith(".") && name.endsWith(".crc");
     }
@@ -135,49 +133,59 @@
      *
      * The FileSystem will simply return an elt containing 'localhost'.
      */
-    public abstract String[][] getFileCacheHints(File f, long start, long len) throws IOException;
+    public abstract String[][] getFileCacheHints(Path f, long start, long len) throws IOException;
+
+    /** @deprecated Call {@link #open(Path)} instead. */
+    public FSDataInputStream open(File f) throws IOException {
+      return open(new Path(f.toString()));
+    }
 
     /**
-     * Opens an FSDataInputStream at the indicated File.
+     * Opens an FSDataInputStream at the indicated Path.
      * @param f the file name to open
      * @param bufferSize the size of the buffer to be used.
      */
-    public FSDataInputStream open(File f, int bufferSize) throws IOException {
+    public FSDataInputStream open(Path f, int bufferSize) throws IOException {
       return new FSDataInputStream(this, f, bufferSize, getConf());
     }
     
     /**
-     * Opens an FSDataInputStream at the indicated File.
+     * Opens an FSDataInputStream at the indicated Path.
      * @param f the file to open
      */
-    public FSDataInputStream open(File f) throws IOException {
+    public FSDataInputStream open(Path f) throws IOException {
       return new FSDataInputStream(this, f, getConf());
     }
 
     /**
-     * Opens an InputStream for the indicated File, whether local
+     * Opens an InputStream for the indicated Path, whether local
      * or via DFS.
      */
-    public abstract FSInputStream openRaw(File f) throws IOException;
+    public abstract FSInputStream openRaw(Path f) throws IOException;
+
+    /** @deprecated Call {@link #create(Path)} instead. */
+    public FSDataOutputStream create(File f) throws IOException {
+      return create(new Path(f.toString()));
+    }
 
     /**
-     * Opens an FSDataOutputStream at the indicated File.
+     * Opens an FSDataOutputStream at the indicated Path.
      * Files are overwritten by default.
      */
-    public FSDataOutputStream create(File f) throws IOException {
+    public FSDataOutputStream create(Path f) throws IOException {
       return create(f, true, 
                     getConf().getInt("io.file.buffer.size", 4096),
                     (short)getConf().getInt("dfs.replication", 3));
     }
 
     /**
-     * Opens an FSDataOutputStream at the indicated File.
+     * Opens an FSDataOutputStream at the indicated Path.
      * @param f the file name to open
      * @param overwrite if a file with this name already exists, then if true,
      *   the file will be overwritten, and if false an error will be thrown.
      * @param bufferSize the size of the buffer to be used.
      */
-    public FSDataOutputStream create( File f, 
+    public FSDataOutputStream create( Path f, 
                                       boolean overwrite,
                                       int bufferSize
                                     ) throws IOException {
@@ -186,14 +194,14 @@
     }
     
     /**
-     * Opens an FSDataOutputStream at the indicated File.
+     * Opens an FSDataOutputStream at the indicated Path.
      * @param f the file name to open
      * @param overwrite if a file with this name already exists, then if true,
      *   the file will be overwritten, and if false an error will be thrown.
      * @param bufferSize the size of the buffer to be used.
      * @param replication required block replication for the file. 
      */
-    public FSDataOutputStream create( File f, 
+    public FSDataOutputStream create( Path f, 
                                       boolean overwrite,
                                       int bufferSize,
                                       short replication
@@ -202,20 +210,25 @@
                                     bufferSize, replication );
     }
 
-    /** Opens an OutputStream at the indicated File.
+    /** Opens an OutputStream at the indicated Path.
      * @param f the file name to open
      * @param overwrite if a file with this name already exists, then if true,
      *   the file will be overwritten, and if false an error will be thrown.
      * @param replication required block replication for the file. 
      */
-    public abstract FSOutputStream createRaw(File f, boolean overwrite, short replication)
+    public abstract FSOutputStream createRaw(Path f, boolean overwrite, short replication)
       throws IOException;
 
+    /** @deprecated Call {@link #createNewFile(Path)} instead. */
+    public boolean createNewFile(File f) throws IOException {
+      return createNewFile(new Path(f.toString()));
+    }
+
     /**
-     * Creates the given File as a brand-new zero-length file.  If
+     * Creates the given Path as a brand-new zero-length file.  If
      * create fails, or if it already existed, return false.
      */
-    public boolean createNewFile(File f) throws IOException {
+    public boolean createNewFile(Path f) throws IOException {
         if (exists(f)) {
             return false;
         } else {
@@ -229,18 +242,23 @@
         }
     }
 
+    /** @deprecated Call {@link #rename(Path, Path)} instead. */
+    public boolean rename(File src, File dst) throws IOException {
+      return rename(new Path(src.toString()), new Path(dst.toString()));
+    }
+
     /**
-     * Renames File src to File dst.  Can take place on local fs
+     * Renames Path src to Path dst.  Can take place on local fs
      * or remote DFS.
      */
-    public boolean rename(File src, File dst) throws IOException {
+    public boolean rename(Path src, Path dst) throws IOException {
       if (isDirectory(src)) {
         return renameRaw(src, dst);
       } else {
 
         boolean value = renameRaw(src, dst);
 
-        File checkFile = getChecksumFile(src);
+        Path checkFile = getChecksumFile(src);
         if (exists(checkFile))
           renameRaw(checkFile, getChecksumFile(dst)); // try to rename checksum
 
@@ -250,15 +268,18 @@
     }
 
     /**
-     * Renames File src to File dst.  Can take place on local fs
+     * Renames Path src to Path dst.  Can take place on local fs
      * or remote DFS.
      */
-    public abstract boolean renameRaw(File src, File dst) throws IOException;
+    public abstract boolean renameRaw(Path src, Path dst) throws IOException;
 
-    /**
-     * Deletes File
-     */
+    /** @deprecated Call {@link #delete(Path)} instead. */
     public boolean delete(File f) throws IOException {
+      return delete(new Path(f.toString()));
+    }
+
+    /** Delete a file. */
+    public boolean delete(Path f) throws IOException {
       if (isDirectory(f)) {
         return deleteRaw(f);
       } else {
@@ -268,20 +289,33 @@
     }
 
     /**
-     * Deletes File
+     * Deletes Path
      */
-    public abstract boolean deleteRaw(File f) throws IOException;
+    public abstract boolean deleteRaw(Path f) throws IOException;
 
-    /**
-     * Check if exists
-     */
-    public abstract boolean exists(File f) throws IOException;
+    /** @deprecated call {@link #exists(Path)} instead */
+    public boolean exists(File f) throws IOException {
+      return exists(new Path(f.toString()));
+    }
+
+    /** Check if exists. */
+    public abstract boolean exists(Path f) throws IOException;
+
+    /** @deprecated Call {@link #isDirectory(Path)} instead. */
+    public boolean isDirectory(File f) throws IOException {
+      return isDirectory(new Path(f.toString()));
+    }
 
     /** True iff the named path is a directory. */
-    public abstract boolean isDirectory(File f) throws IOException;
+    public abstract boolean isDirectory(Path f) throws IOException;
 
-    /** True iff the named path is a regular file. */
+    /** @deprecated Call {@link #isFile(Path)} instead. */
     public boolean isFile(File f) throws IOException {
+      return isFile(new Path(f.toString()));
+    }
+
+    /** True iff the named path is a regular file. */
+    public boolean isFile(Path f) throws IOException {
         if (exists(f) && ! isDirectory(f)) {
             return true;
         } else {
@@ -289,28 +323,59 @@
         }
     }
     
-    /** True iff the named path is absolute. */
-    public abstract boolean isAbsolute(File f);
+    /** @deprecated Call {@link #getLength(Path)} instead. */
+    public long getLength(File f) throws IOException {
+      return getLength(new Path(f.toString()));
+    }
 
     /** The number of bytes in a file. */
-    public abstract long getLength(File f) throws IOException;
+    public abstract long getLength(Path f) throws IOException;
 
-    /** List files in a directory. */
+    /** @deprecated Call {@link #listPaths(Path)} instead. */
     public File[] listFiles(File f) throws IOException {
-      return listFiles(f, new FileFilter() {
-          public boolean accept(File file) {
+      Path[] paths = listPaths(new Path(f.toString()));
+      if (paths == null)
+        return null;
+      File[] result = new File[paths.length];
+      for (int i = 0 ; i < paths.length; i++) {
+        result[i] = new File(paths[i].toString());
+      }
+      return result;
+    }
+
+    /** List files in a directory. */
+    public Path[] listPaths(Path f) throws IOException {
+      return listPaths(f, new PathFilter() {
+          public boolean accept(Path file) {
             return !isChecksumFile(file);
           }
         });
     }
 
     /** List files in a directory. */
-    public abstract File[] listFilesRaw(File f) throws IOException;
+    public abstract Path[] listPathsRaw(Path f) throws IOException;
+
+    /** @deprecated Call {@link #listPaths(Path)} instead. */
+    public File[] listFiles(File f, final FileFilter filt) throws IOException {
+      Path[] paths = listPaths(new Path(f.toString()),
+                               new PathFilter() {
+                                 public boolean accept(Path p) {
+                                   return filt.accept(new File(p.toString()));
+                                 }
+                               });
+      if (paths == null)
+        return null;
+      File[] result = new File[paths.length];
+      for (int i = 0 ; i < paths.length; i++) {
+        result[i] = new File(paths[i].toString());
+      }
+      return result;
+    }
 
     /** Filter files in a directory. */
-    public File[] listFiles(File f, FileFilter filter) throws IOException {
+    public Path[] listPaths(Path f, PathFilter filter) throws IOException {
         Vector results = new Vector();
-        File listing[] = listFilesRaw(f);
+        Path listing[] = listPathsRaw(f);
         if (listing != null) {
           for (int i = 0; i < listing.length; i++) {
             if (filter.accept(listing[i])) {
@@ -318,7 +383,7 @@
             }
           }
         }
-        return (File[]) results.toArray(new File[results.size()]);
+        return (Path[]) results.toArray(new Path[results.size()]);
     }
 
     /**
@@ -326,54 +391,75 @@
      * All relative paths will be resolved relative to it.
      * @param new_dir
      */
-    public abstract void setWorkingDirectory(File new_dir);
+    public abstract void setWorkingDirectory(Path new_dir);
     
     /**
      * Get the current working directory for the given file system
      * @return the directory pathname
      */
-    public abstract File getWorkingDirectory();
+    public abstract Path getWorkingDirectory();
     
+    /** @deprecated Call {@link #mkdirs(Path)} instead. */
+    public boolean mkdirs(File f) throws IOException {
+      return mkdirs(new Path(f.toString()));
+    }
+
     /**
      * Make the given file and all non-existent parents into
      * directories.
      */
-    public abstract void mkdirs(File f) throws IOException;
+    public abstract boolean mkdirs(Path f) throws IOException;
+
+    /** @deprecated Call {@link #lock(Path,boolean)} instead. */
+    public void lock(File f, boolean shared) throws IOException {
+      lock(new Path(f.toString()), shared);
+    }
 
     /**
-     * Obtain a lock on the given File
+     * Obtain a lock on the given Path
      */
-    public abstract void lock(File f, boolean shared) throws IOException;
+    public abstract void lock(Path f, boolean shared) throws IOException;
+
+    /** @deprecated Call {@link #release(Path)} instead. */
+    public void release(File f) throws IOException {
+      release(new Path(f.toString()));
+    }
 
     /**
      * Release the lock
      */
-    public abstract void release(File f) throws IOException;
+    public abstract void release(Path f) throws IOException;
 
     /**
      * The src file is on the local disk.  Add it to FS at
      * the given dst name and the source is kept intact afterwards
      */
-    public abstract void copyFromLocalFile(File src, File dst) throws IOException;
+    public abstract void copyFromLocalFile(Path src, Path dst) throws IOException;
 
     /**
      * The src file is on the local disk.  Add it to FS at
      * the given dst name, removing the source afterwards.
      */
-    public abstract void moveFromLocalFile(File src, File dst) throws IOException;
+    public abstract void moveFromLocalFile(Path src, Path dst) throws IOException;
 
     /**
-     * The src file is under FS2, and the dst is on the local disk.
+     * The src file is under FS, and the dst is on the local disk.
      * Copy it from FS control to the local dst name.
      */
-    public abstract void copyToLocalFile(File src, File dst) throws IOException;
+    public abstract void copyToLocalFile(Path src, Path dst) throws IOException;
 
     /**
-     * the same as copyToLocalFile(File src, File dst), except that
+     * the same as copyToLocalFile(Path src, File dst), except that
      * the source is removed afterward.
      */
     // not implemented yet
-    //public abstract void moveToLocalFile(File src, File dst) throws IOException;
+    //public abstract void moveToLocalFile(Path src, File dst) throws IOException;
+
+    /** @deprecated Call {@link #startLocalOutput(Path, Path)} instead. */
+    public File startLocalOutput(File src, File dst) throws IOException {
+      return new File(startLocalOutput(new Path(src.toString()),
+                                       new Path(dst.toString())).toString());
+    }
 
     /**
      * Returns a local File that the user can write output to.  The caller
@@ -381,23 +467,12 @@
      * file.  If the FS is local, we write directly into the target.  If
      * the FS is remote, we write into the tmp local area.
      */
-    public abstract File startLocalOutput(File fsOutputFile, File tmpLocalFile) throws IOException;
-
-    /**
-     * Called when we're all done writing to the target.  A local FS will
-     * do nothing, because we've written to exactly the right place.  A remote
-     * FS will copy the contents of tmpLocalFile to the correct target at
-     * fsOutputFile.
-     */
-    public abstract void completeLocalOutput(File fsOutputFile, File tmpLocalFile) throws IOException;
+    public abstract Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException;
 
-    /**
-     * Returns a local File that the user can read from.  The caller 
-     * provides both the eventual FS target name and the local working
-     * file.  If the FS is local, we read directly from the source.  If
-     * the FS is remote, we write data into the tmp local area.
-     */
-    public abstract File startLocalInput(File fsInputFile, File tmpLocalFile) throws IOException;
+    /** @deprecated Call {@link #completeLocalOutput(Path, Path)} instead. */
+    public void completeLocalOutput(File src, File dst) throws IOException {
+      completeLocalOutput(new Path(src.toString()), new Path(dst.toString()));
+    }
 
     /**
      * Called when we're all done writing to the target.  A local FS will
@@ -405,7 +480,7 @@
      * FS will copy the contents of tmpLocalFile to the correct target at
      * fsOutputFile.
      */
-    public abstract void completeLocalInput(File localFile) throws IOException;
+    public abstract void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException;
 
     /**
      * No more filesystem operations are needed.  Will
@@ -421,7 +496,7 @@
      * @param length the length of the bad data in the file
      * @param crc the expected CRC32 of the data
      */
-    public abstract void reportChecksumFailure(File f, FSInputStream in,
+    public abstract void reportChecksumFailure(Path f, FSInputStream in,
                                                long start, long length,
                                                int crc);
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java Tue Apr 18 10:05:31 2006
@@ -24,100 +24,165 @@
  * A collection of file-processing util methods
  */
 public class FileUtil {
-    /**
-     * Delete a directory and all its contents.  If
-     * we return false, the directory may be partially-deleted.
-     */
-    public static boolean fullyDelete(File dir, Configuration conf) throws IOException {
-        return fullyDelete(new LocalFileSystem(conf), dir);
-    }
-    public static boolean fullyDelete(FileSystem fs, File dir) throws IOException {
-        // 20041022, xing.
-        // Currently fs.detele(File) means fully delete for both
-        // LocalFileSystem.java and DistributedFileSystem.java. So we are okay now.
-        // If implementation changes in future, it should be modified too.
-        return fs.delete(dir);
-    }
-
-    /**
-     * Copy a file's contents to a new location.
-     * Returns whether a target file was overwritten
-     */
-    public static boolean copyContents(FileSystem fs, File src, File dst, boolean overwrite, Configuration conf) throws IOException {
-        if (fs.exists(dst) && !overwrite) {
+  
+  /** @deprecated Call {@link #fullyDelete(File)}. */
+  public static boolean fullyDelete(File dir, Configuration conf)
+    throws IOException {
+    return fullyDelete(dir);
+  }
+
+  /**
+   * Delete a directory and all its contents.  If
+   * we return false, the directory may be partially-deleted.
+   */
+  public static boolean fullyDelete(File dir) throws IOException {
+    File contents[] = dir.listFiles();
+    if (contents != null) {
+      for (int i = 0; i < contents.length; i++) {
+        if (contents[i].isFile()) {
+          if (! contents[i].delete()) {
             return false;
+          }
+        } else {
+          if (! fullyDelete(contents[i])) {
+            return false;
+          }
         }
+      }
+    }
+    return dir.delete();
+  }
 
-        File dstParent = dst.getParentFile();
-        if ((dstParent != null) && (!fs.exists(dstParent))) {
-            fs.mkdirs(dstParent);
-        }
 
-        if (fs.isFile(src)) {
-            FSInputStream in = fs.openRaw(src);
-            try {
-                FSOutputStream out = fs.createRaw(dst, true, 
-                                      (short)conf.getInt("dfs.replication", 3));
-                byte buf[] = new byte[conf.getInt("io.file.buffer.size", 4096)];
-                try {
-                    int readBytes = in.read(buf);
-
-                    while (readBytes >= 0) {
-                        out.write(buf, 0, readBytes);
-                        readBytes = in.read(buf);
-                    }
-                } finally {
-                    out.close();
-                }
-            } finally {
-                in.close();
-            }
-        } else {
-            fs.mkdirs(dst);
-            File contents[] = fs.listFilesRaw(src);
-            if (contents != null) {
-                for (int i = 0; i < contents.length; i++) {
-                    File newDst = new File(dst, contents[i].getName());
-                    if (! copyContents(fs, contents[i], newDst, overwrite, conf)) {
-                        return false;
-                    }
-                }
-            }
-        }
-        return true;
+  /** Copy files between FileSystems. */
+  public static boolean copy(FileSystem srcFS, Path src, 
+                             FileSystem dstFS, Path dst, 
+                             boolean deleteSource,
+                             Configuration conf ) throws IOException {
+    dst = checkDest(src.getName(), dstFS, dst);
+
+    if (srcFS.isDirectory(src)) {
+      dstFS.mkdirs(dst);
+      Path contents[] = srcFS.listPaths(src);
+      for (int i = 0; i < contents.length; i++) {
+        copy(srcFS, contents[i], dstFS, new Path(dst, contents[i].getName()),
+             deleteSource, conf);
+      }
+    } else if (srcFS.isFile(src)) {
+      InputStream in = srcFS.open(src);
+      try {
+        copyContent(in, dstFS.create(dst), conf);
+      } finally {
+        in.close();
+      } 
+    }
+    if (deleteSource) {
+      return srcFS.delete(src);
+    } else {
+      return true;
+    }
+  }
+
+  /** Copy local files to a FileSystem. */
+  public static boolean copy(File src,
+                             FileSystem dstFS, Path dst,
+                             boolean deleteSource,
+                             Configuration conf ) throws IOException {
+    dst = checkDest(src.getName(), dstFS, dst);
+
+    if (src.isDirectory()) {
+      dstFS.mkdirs(dst);
+      File contents[] = src.listFiles();
+      for (int i = 0; i < contents.length; i++) {
+        copy(contents[i], dstFS, new Path(dst, contents[i].getName()),
+             deleteSource, conf);
+      }
+    } else if (src.isFile()) {
+      InputStream in = new FileInputStream(src);
+      try {
+        copyContent(in, dstFS.create(dst), conf);
+      } finally {
+        in.close();
+      } 
     }
+    if (deleteSource) {
+      return FileUtil.fullyDelete(src);
+    } else {
+      return true;
+    }
+  }
 
-    /**
-     * Copy a file and/or directory and all its contents (whether
-     * data or other files/dirs)
-     */
-    public static void recursiveCopy(FileSystem fs, File src, File dst, Configuration conf) throws IOException {
-        //
-        // Resolve the real target.
-        //
-        if (fs.exists(dst) && fs.isDirectory(dst)) {
-            dst = new File(dst, src.getName());
-        } else if (fs.exists(dst)) {
-            throw new IOException("Destination " + dst + " already exists");
+  /** Copy FileSystem files to local files. */
+  public static boolean copy(FileSystem srcFS, Path src, 
+                             File dst, boolean deleteSource,
+                             Configuration conf ) throws IOException {
+
+    dst = checkDest(src.getName(), dst);
+
+    if (srcFS.isDirectory(src)) {
+      dst.mkdirs();
+      Path contents[] = srcFS.listPaths(src);
+      for (int i = 0; i < contents.length; i++) {
+        copy(srcFS, contents[i], new File(dst, contents[i].getName()),
+             deleteSource, conf);
+      }
+    } else if (srcFS.isFile(src)) {
+      InputStream in = srcFS.open(src);
+      try {
+        copyContent(in, new FileOutputStream(dst), conf);
+      } finally {
+        in.close();
+      } 
+    }
+    if (deleteSource) {
+      return srcFS.delete(src);
+    } else {
+      return true;
+    }
+  }
+
+  private static void copyContent(InputStream in, OutputStream out,
+                                  Configuration conf) throws IOException {
+    byte buf[] = new byte[conf.getInt("io.file.buffer.size", 4096)];
+    try {
+      int bytesRead = in.read(buf);
+      while (bytesRead >= 0) {
+        out.write(buf, 0, bytesRead);
+        bytesRead = in.read(buf);
+      }
+    } finally {
+      out.close();
+    }
+  }
+
+  private static Path checkDest(String srcName, FileSystem dstFS, Path dst)
+    throws IOException {
+    if (dstFS.exists(dst)) {
+      if (!dstFS.isDirectory(dst)) {
+        throw new IOException("Target " + dst + " already exists");
+      } else {
+        dst = new Path(dst, srcName);
+        if (dstFS.exists(dst)) {
+          throw new IOException("Target " + dst + " already exists");
         }
+      }
+    }
+    return dst;
+  }
 
-        //
-        // Copy the items
-        //
-        if (! fs.isDirectory(src)) {
-            //
-            // If the source is a file, then just copy the contents
-            //
-            copyContents(fs, src, dst, true, conf);
-        } else {
-            //
-            // If the source is a dir, then we need to copy all the subfiles.
-            //
-            fs.mkdirs(dst);
-            File contents[] = fs.listFiles(src);
-            for (int i = 0; i < contents.length; i++) {
-                recursiveCopy(fs, contents[i], new File(dst, contents[i].getName()), conf);
-            }
+  private static File checkDest(String srcName, File dst)
+    throws IOException {
+    if (dst.exists()) {
+      if (!dst.isDirectory()) {
+        throw new IOException("Target " + dst + " already exists");
+      } else {
+        dst = new File(dst, srcName);
+        if (dst.exists()) {
+          throw new IOException("Target " + dst + " already exists");
         }
+      }
     }
+    return dst;
+  }
+
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/LocalFileSystem.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/LocalFileSystem.java?rev=394984&r1=394983&r2=394984&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/LocalFileSystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/LocalFileSystem.java Tue Apr 18 10:05:31 2006
@@ -29,8 +29,8 @@
  * @author Mike Cafarella
  *****************************************************************/
 public class LocalFileSystem extends FileSystem {
-    private File workingDir
-      = new File(System.getProperty("user.dir")).getAbsoluteFile();
+    private Path workingDir =
+      new Path(System.getProperty("user.dir"));
     TreeMap sharedLockDataSet = new TreeMap();
     TreeMap nonsharedLockDataSet = new TreeMap();
     TreeMap lockObjSet = new TreeMap();
@@ -52,8 +52,8 @@
      * Return 1x1 'localhost' cell if the file exists.
      * Return null if otherwise.
      */
-    public String[][] getFileCacheHints(File f, long start, long len) throws IOException {
-        if (! f.exists()) {
+    public String[][] getFileCacheHints(Path f, long start, long len) throws IOException {
+        if (! exists(f)) {
             return null;
         } else {
             String result[][] = new String[1][];
@@ -65,14 +65,22 @@
 
     public String getName() { return "local"; }
 
+    /** Convert a path to a File. */
+    public File pathToFile(Path path) {
+      if (!path.isAbsolute()) {
+        path = new Path(workingDir, path);
+      }
+      return new File(path.toString());
+    }
+
     /*******************************************************
      * For open()'s FSInputStream
      *******************************************************/
     class LocalFSFileInputStream extends FSInputStream {
         FileInputStream fis;
 
-        public LocalFSFileInputStream(File f) throws IOException {
-          this.fis = new FileInputStream(f);
+        public LocalFSFileInputStream(Path f) throws IOException {
+          this.fis = new FileInputStream(pathToFile(f));
         }
 
         public void seek(long pos) throws IOException {
@@ -109,10 +117,9 @@
         public long skip(long n) throws IOException { return fis.skip(n); }
     }
     
-    public FSInputStream openRaw(File f) throws IOException {
-        f = makeAbsolute(f);
-        if (! f.exists()) {
-            throw new FileNotFoundException(f.toString());
+    public FSInputStream openRaw(Path f) throws IOException {
+        if (! exists(f)) {
+            throw new FileNotFoundException(toString());
         }
         return new LocalFSFileInputStream(f);
     }
@@ -123,8 +130,8 @@
     class LocalFSFileOutputStream extends FSOutputStream {
       FileOutputStream fos;
 
-      public LocalFSFileOutputStream(File f) throws IOException {
-        this.fos = new FileOutputStream(f);
+      public LocalFSFileOutputStream(Path f) throws IOException {
+        this.fos = new FileOutputStream(pathToFile(f));
       }
 
       public long getPos() throws IOException {
@@ -153,90 +160,72 @@
       }
     }
 
-    private File makeAbsolute(File f) {
-      if (isAbsolute(f)) {
-        return f;
-      } else {
-        return new File(workingDir, f.toString()).getAbsoluteFile();
-      }
-    }
-    
-    public FSOutputStream createRaw(File f, boolean overwrite, short replication)
+    public FSOutputStream createRaw(Path f, boolean overwrite, short replication)
       throws IOException {
-        f = makeAbsolute(f);
-        if (f.exists() && ! overwrite) {
+        if (exists(f) && ! overwrite) {
             throw new IOException("File already exists:"+f);
         }
-        File parent = f.getParentFile();
+        Path parent = f.getParent();
         if (parent != null)
-          parent.mkdirs();
+          mkdirs(parent);
 
         return new LocalFSFileOutputStream(f);
     }
 
-    public boolean renameRaw(File src, File dst) throws IOException {
-        src = makeAbsolute(src);
-        dst = makeAbsolute(dst);
+    public boolean renameRaw(Path src, Path dst) throws IOException {
         if (useCopyForRename) {
-            FileUtil.copyContents(this, src, dst, true, getConf());
-            return fullyDelete(src);
-        } else return src.renameTo(dst);
+          return FileUtil.copy(this, src, this, dst, true, getConf());
+        } else return pathToFile(src).renameTo(pathToFile(dst));
     }
 
-    public boolean deleteRaw(File f) throws IOException {
-        f = makeAbsolute(f);
+    public boolean deleteRaw(Path p) throws IOException {
+        File f = pathToFile(p);
         if (f.isFile()) {
             return f.delete();
-        } else return fullyDelete(f);
-    }
-
-    public boolean exists(File f) throws IOException {
-        f = makeAbsolute(f);
-        return f.exists();
+        } else return FileUtil.fullyDelete(f);
     }
 
-    public boolean isDirectory(File f) throws IOException {
-        f = makeAbsolute(f);
-        return f.isDirectory();
+    public boolean exists(Path f) throws IOException {
+        return pathToFile(f).exists();
     }
 
-    public boolean isAbsolute(File f) {
-      return f.isAbsolute() ||
-        f.getPath().startsWith("/") ||
-        f.getPath().startsWith("\\");
+    public boolean isDirectory(Path f) throws IOException {
+        return pathToFile(f).isDirectory();
     }
 
-    public long getLength(File f) throws IOException {
-        f = makeAbsolute(f);
-        return f.length();
+    public long getLength(Path f) throws IOException {
+        return pathToFile(f).length();
     }
 
-    public File[] listFilesRaw(File f) throws IOException {
-        f = makeAbsolute(f);
-        return f.listFiles();
+    public Path[] listPathsRaw(Path f) throws IOException {
+        String[] names = pathToFile(f).list();
+        if (names == null) {
+          return null;
+        }
+        Path[] results = new Path[names.length];
+        for (int i = 0; i < names.length; i++) {
+          results[i] = new Path(f, names[i]);
+        }
+        return results;
     }
 
-    public void mkdirs(File f) throws IOException {
-        f = makeAbsolute(f);
-        f.mkdirs();
+    public boolean mkdirs(Path f) throws IOException {
+      return pathToFile(f).mkdirs();
     }
 
     /**
      * Set the working directory to the given directory.
-     * Sets both a local variable and the system property.
-     * Note that the system property is only used if the application explictly
-     * calls java.io.File.getAbsolutePath().
      */
-    public void setWorkingDirectory(File new_dir) {
-      workingDir = makeAbsolute(new_dir);
+    public void setWorkingDirectory(Path newDir) {
+      workingDir = newDir;
     }
     
-    public File getWorkingDirectory() {
+    public Path getWorkingDirectory() {
       return workingDir;
     }
     
-    public synchronized void lock(File f, boolean shared) throws IOException {
-        f = makeAbsolute(f);
+    public synchronized void lock(Path p, boolean shared) throws IOException {
+        File f = pathToFile(p);
         f.createNewFile();
 
         FileLock lockObj = null;
@@ -252,8 +241,8 @@
         lockObjSet.put(f, lockObj);
     }
 
-    public synchronized void release(File f) throws IOException {
-        f = makeAbsolute(f);
+    public synchronized void release(Path p) throws IOException {
+        File f = pathToFile(p);
         FileLock lockObj = (FileLock) lockObjSet.get(f);
         FileInputStream sharedLockData = (FileInputStream) sharedLockDataSet.get(f);
         FileOutputStream nonsharedLockData = (FileOutputStream) nonsharedLockDataSet.get(f);
@@ -277,52 +266,29 @@
     }
 
     // In the case of the local filesystem, we can just rename the file.
-    public void moveFromLocalFile(File src, File dst) throws IOException {
-        if (! src.equals(dst)) {
-            src = makeAbsolute(src);
-            dst = makeAbsolute(dst);
-            if (useCopyForRename) {
-                FileUtil.copyContents(this, src, dst, true, getConf());
-                fullyDelete(src);
-            } else src.renameTo(dst);
-        }
+    public void moveFromLocalFile(Path src, Path dst) throws IOException {
+      rename(src, dst);
     }
 
     // Similar to moveFromLocalFile(), except the source is kept intact.
-    public void copyFromLocalFile(File src, File dst) throws IOException {
-        if (! src.equals(dst)) {
-            src = makeAbsolute(src);
-            dst = makeAbsolute(dst);
-            FileUtil.copyContents(this, src, dst, true, getConf());
-        }
+    public void copyFromLocalFile(Path src, Path dst) throws IOException {
+      FileUtil.copy(this, src, this, dst, false, getConf());
     }
 
     // We can't delete the src file in this case.  Too bad.
-    public void copyToLocalFile(File src, File dst) throws IOException {
-        if (! src.equals(dst)) {
-            src = makeAbsolute(src);
-            dst = makeAbsolute(dst);
-            FileUtil.copyContents(this, src, dst, true, getConf());
-        }
+    public void copyToLocalFile(Path src, Path dst) throws IOException {
+      FileUtil.copy(this, src, this, dst, false, getConf());
     }
 
     // We can write output directly to the final location
-    public File startLocalOutput(File fsOutputFile, File tmpLocalFile) throws IOException {
-        return makeAbsolute(fsOutputFile);
+    public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
+      throws IOException {
+      return fsOutputFile;
     }
 
     // It's in the right place - nothing to do.
-    public void completeLocalOutput(File fsWorkingFile, File tmpLocalFile) throws IOException {
-    }
-
-    // We can read directly from the real local fs.
-    public File startLocalInput(File fsInputFile, File tmpLocalFile) throws IOException {
-        return makeAbsolute(fsInputFile);
-    }
-
-    // We're done reading.  Nothing to clean up.
-    public void completeLocalInput(File localFile) throws IOException {
-        // Ignore the file, it's at the right destination!
+    public void completeLocalOutput(Path fsWorkingFile, Path tmpLocalFile)
+      throws IOException {
     }
 
     public void close() throws IOException {}
@@ -331,39 +297,14 @@
         return "LocalFS";
     }
     
-    /**
-     * Implement our own version instead of using the one in FileUtil,
-     * to avoid infinite recursion.
-     * @param dir
-     * @return
-     * @throws IOException
-     */
-    private boolean fullyDelete(File dir) throws IOException {
-        dir = makeAbsolute(dir);
-        File contents[] = dir.listFiles();
-        if (contents != null) {
-            for (int i = 0; i < contents.length; i++) {
-                if (contents[i].isFile()) {
-                    if (! contents[i].delete()) {
-                        return false;
-                    }
-                } else {
-                    if (! fullyDelete(contents[i])) {
-                        return false;
-                    }
-                }
-            }
-        }
-        return dir.delete();
-    }
 
     /** Moves files to a bad file directory on the same device, so that their
      * storage will not be reused. */
-    public void reportChecksumFailure(File f, FSInputStream in,
+    public void reportChecksumFailure(Path p, FSInputStream in,
                                       long start, long length, int crc) {
       try {
         // canonicalize f   
-        f = makeAbsolute(f).getCanonicalFile();
+        File f = pathToFile(p).getCanonicalFile();
       
         // find highest writable parent dir of f on the same device
         String device = new DF(f.toString(), getConf()).getMount();
@@ -384,11 +325,11 @@
         f.renameTo(badFile);                      // rename it
 
         // move checksum file too
-        File checkFile = getChecksumFile(f);
+        File checkFile = pathToFile(getChecksumFile(p));
         checkFile.renameTo(new File(badDir, checkFile.getName()+suffix));
 
       } catch (IOException e) {
-        LOG.warning("Error moving bad file " + f + ": " + e);
+        LOG.warning("Error moving bad file " + p + ": " + e);
       }
     }
 



Mime
View raw message