hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r374733 [2/4] - in /lucene/hadoop/trunk: ./ bin/ conf/ lib/ lib/jetty-ext/ src/java/ src/java/org/ src/java/org/apache/ src/java/org/apache/hadoop/ src/java/org/apache/hadoop/conf/ src/java/org/apache/hadoop/dfs/ src/java/org/apache/hadoop/...
Date Fri, 03 Feb 2006 19:45:51 GMT
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java Fri Feb  3 11:45:32 2006
@@ -13,9 +13,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.ndfs;
+package org.apache.hadoop.dfs.
 
-import org.apache.nutch.io.*;
+import org.apache.hadoop.io.*;
 
 import java.io.*;
 import java.util.*;
@@ -106,7 +106,7 @@
             if (getNode(target) != null) {
                 return null;
             } else {
-                String parentName = NDFSFile.getNDFSParent(target);
+                String parentName = DFSFile.getDFSParent(target);
                 if (parentName == null) {
                     return null;
                 }
@@ -462,7 +462,7 @@
         waitForReady();
 
         // Always do an implicit mkdirs for parent directory tree
-        mkdirs(NDFSFile.getNDFSParent(src.toString()));
+        mkdirs(DFSFile.getDFSParent(src.toString()));
         if (unprotectedAddFile(src, blocks)) {
             logEdit(OP_ADD, src, new ArrayWritable(Block.class, blocks));
             return true;
@@ -598,7 +598,7 @@
      * This function is admittedly very inefficient right now.  We'll
      * make it better later.
      */
-    public NDFSFileInfo[] getListing(UTF8 src) {
+    public DFSFileInfo[] getListing(UTF8 src) {
         String srcs = normalizePath(src);
 
         synchronized (rootDir) {
@@ -609,14 +609,14 @@
                 Vector contents = new Vector();
                 targetNode.listContents(contents);
 
-                NDFSFileInfo listing[] = new NDFSFileInfo[contents.size()];
+                DFSFileInfo listing[] = new DFSFileInfo[contents.size()];
                 int i = 0;
                 for (Iterator it = contents.iterator(); it.hasNext(); i++) {
                     INode cur = (INode) it.next();
                     UTF8 curName = new UTF8(cur.computeName());
-                    listing[i] = new NDFSFileInfo(curName, cur.computeFileLength(), cur.computeContentsLength(), isDir(curName));
-                    //listing[i] = new NDFSFileInfo(curName, cur.computeFileLength(), 0, isDir(curName));
-                    //listing[i] = new NDFSFileInfo(curName, cur.computeFileLength(), 0, false);
+                    listing[i] = new DFSFileInfo(curName, cur.computeFileLength(), cur.computeContentsLength(), isDir(curName));
+                    //listing[i] = new DFSFileInfo(curName, cur.computeFileLength(), 0, isDir(curName));
+                    //listing[i] = new DFSFileInfo(curName, cur.computeFileLength(), 0, false);
                 }
                 return listing;
             }
@@ -688,10 +688,10 @@
         v.add(src);
 
         // All its parents
-        String parent = NDFSFile.getNDFSParent(src);
+        String parent = DFSFile.getDFSParent(src);
         while (parent != null) {
             v.add(parent);
-            parent = NDFSFile.getNDFSParent(parent);
+            parent = DFSFile.getDFSParent(parent);
         }
 
         // Now go backwards through list of dirs, creating along

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java Fri Feb  3 11:45:32 2006
@@ -13,10 +13,11 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.ndfs;
+package org.apache.hadoop.dfs.
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.LogFormatter;
 
 import java.io.*;
 import java.util.*;
@@ -32,7 +33,7 @@
  * 5)  LRU cache of updated-heartbeat machines
  ***************************************************/
 public class FSNamesystem implements FSConstants {
-    public static final Logger LOG = LogFormatter.getLogger("org.apache.nutch.fs.FSNamesystem");
+    public static final Logger LOG = LogFormatter.getLogger("org.apache.hadoop.fs.FSNamesystem");
 
    
 
@@ -128,7 +129,7 @@
     Daemon hbthread = null, lmthread = null;
     boolean fsRunning = true;
     long systemStart = 0;
-    private NutchConf nutchConf;
+    private Configuration conf;
 
     //  DESIRED_REPLICATION is how many copies we try to have at all times
     private int desiredReplication;
@@ -149,23 +150,23 @@
      * dir is where the filesystem directory state 
      * is stored
      */
-    public FSNamesystem(File dir, NutchConf nutchConf) throws IOException {
+    public FSNamesystem(File dir, Configuration conf) throws IOException {
         this.dir = new FSDirectory(dir);
         this.hbthread = new Daemon(new HeartbeatMonitor());
         this.lmthread = new Daemon(new LeaseMonitor());
         hbthread.start();
         lmthread.start();
         this.systemStart = System.currentTimeMillis();
-        this.nutchConf = nutchConf;
+        this.conf = conf;
         
-        this.desiredReplication = nutchConf.getInt("ndfs.replication", 3);
+        this.desiredReplication = conf.getInt("dfs.replication", 3);
         this.maxReplication = desiredReplication;
-        this.maxReplicationStreams = nutchConf.getInt("ndfs.max-repl-streams", 2);
+        this.maxReplicationStreams = conf.getInt("dfs.max-repl-streams", 2);
         this.minReplication = 1;
         this.heartBeatRecheck= 1000;
-        this.useAvailability = nutchConf.getBoolean("ndfs.availability.allocation", false);
+        this.useAvailability = conf.getBoolean("dfs.availability.allocation", false);
         this.allowSameHostTargets =
-           nutchConf.getBoolean("test.ndfs.same.host.targets.allowed", false);
+           conf.getBoolean("test.dfs.same.host.targets.allowed", false);
     }
 
     /** Close down this filesystem manager.
@@ -770,7 +771,7 @@
      * Get a listing of all files at 'src'.  The Object[] array
      * exists so we can return file attributes (soon to be implemented)
      */
-    public NDFSFileInfo[] getListing(UTF8 src) {
+    public DFSFileInfo[] getListing(UTF8 src) {
         return dir.getListing(src);
     }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/LocatedBlock.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/LocatedBlock.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/LocatedBlock.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/LocatedBlock.java Fri Feb  3 11:45:32 2006
@@ -13,9 +13,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.ndfs;
+package org.apache.hadoop.dfs.
 
-import org.apache.nutch.io.*;
+import org.apache.hadoop.io.*;
 
 import java.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NDFSClient.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NDFSClient.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NDFSClient.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NDFSClient.java Fri Feb  3 11:45:32 2006
@@ -13,12 +13,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.ndfs;
+package org.apache.hadoop.dfs.
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.fs.*;
-import org.apache.nutch.ipc.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.ipc.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.LogFormatter;
 
 import java.io.*;
 import java.net.*;
@@ -26,12 +27,12 @@
 import java.util.logging.*;
 
 /********************************************************
- * NDFSClient can connect to a Nutch Filesystem and perform basic file tasks.
+ * DFSClient can connect to a Nutch Filesystem and perform basic file tasks.
  * Connects to a namenode daemon.
  * @author Mike Cafarella, Tessa MacDuff
  ********************************************************/
-public class NDFSClient implements FSConstants {
-    public static final Logger LOG = LogFormatter.getLogger("org.apache.nutch.fs.NDFSClient");
+public class DFSClient implements FSConstants {
+    public static final Logger LOG = LogFormatter.getLogger("org.apache.hadoop.fs.DFSClient");
     static int MAX_BLOCK_ACQUIRE_FAILURES = 10;
     ClientProtocol namenode;
     boolean running = true;
@@ -40,11 +41,11 @@
     Daemon leaseChecker;
 
 
-    /** Create a new NDFSClient connected to the given namenode server.
+    /** Create a new DFSClient connected to the given namenode server.
      */
-    public NDFSClient(InetSocketAddress nameNodeAddr, NutchConf nutchConf) {
-        this.namenode = (ClientProtocol) RPC.getProxy(ClientProtocol.class, nameNodeAddr, nutchConf);
-        this.clientName = "NDFSClient_" + r.nextInt();
+    public DFSClient(InetSocketAddress nameNodeAddr, Configuration conf) {
+        this.namenode = (ClientProtocol) RPC.getProxy(ClientProtocol.class, nameNodeAddr, conf);
+        this.clientName = "DFSClient_" + r.nextInt();
         this.leaseChecker = new Daemon(new LeaseChecker());
         this.leaseChecker.start();
     }
@@ -76,11 +77,11 @@
      */
     public NFSInputStream open(UTF8 src) throws IOException {
         // Get block info from namenode
-        return new NDFSInputStream(src.toString());
+        return new DFSInputStream(src.toString());
     }
 
     public NFSOutputStream create(UTF8 src, boolean overwrite) throws IOException {
-        return new NDFSOutputStream(src, overwrite);
+        return new DFSOutputStream(src, overwrite);
     }
 
     /**
@@ -113,7 +114,7 @@
 
     /**
      */
-    public NDFSFileInfo[] listFiles(UTF8 src) throws IOException {
+    public DFSFileInfo[] listFiles(UTF8 src) throws IOException {
         return namenode.getListing(src.toString());
     }
 
@@ -220,10 +221,10 @@
     }
 
     /****************************************************************
-     * NDFSInputStream provides bytes from a named file.  It handles 
+     * DFSInputStream provides bytes from a named file.  It handles 
      * negotiation of the namenode and various datanodes as necessary.
      ****************************************************************/
-    class NDFSInputStream extends NFSInputStream {
+    class DFSInputStream extends NFSInputStream {
         boolean closed = false;
 
         private String src;
@@ -237,7 +238,7 @@
 
         /**
          */
-        public NDFSInputStream(String src) throws IOException {
+        public DFSInputStream(String src) throws IOException {
             this.src = src;
             openInfo();
             this.blockStream = null;
@@ -486,9 +487,9 @@
     }
 
     /****************************************************************
-     * NDFSOutputStream creates files from a stream of bytes.
+     * DFSOutputStream creates files from a stream of bytes.
      ****************************************************************/
-    class NDFSOutputStream extends NFSOutputStream {
+    class DFSOutputStream extends NFSOutputStream {
         boolean closed = false;
 
         private byte outBuf[] = new byte[BUFFER_SIZE];
@@ -510,13 +511,13 @@
         /**
          * Create a new output stream to the given DataNode.
          */
-        public NDFSOutputStream(UTF8 src, boolean overwrite) throws IOException {
+        public DFSOutputStream(UTF8 src, boolean overwrite) throws IOException {
             this.src = src;
             this.overwrite = overwrite;
             this.blockStream = null;
             this.blockReplyStream = null;
             this.blockStreamWorking = false;
-            this.backupFile = File.createTempFile("ndfsout", "bak");
+            this.backupFile = File.createTempFile("dfsout", "bak");
             this.backupStream = new BufferedOutputStream(new FileOutputStream(backupFile));
             nextBlockOutputStream(true);
         }
@@ -785,7 +786,7 @@
             // Delete local backup, start new one
             //
             backupFile.delete();
-            backupFile = File.createTempFile("ndfsout", "bak");
+            backupFile = File.createTempFile("dfsout", "bak");
             backupStream = new BufferedOutputStream(new FileOutputStream(backupFile));
         }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NDFSFile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NDFSFile.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NDFSFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NDFSFile.java Fri Feb  3 11:45:32 2006
@@ -13,26 +13,26 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.ndfs;
+package org.apache.hadoop.dfs.
 
 import java.io.*;
 
 
 /*****************************************************************
- * NDFSFile is a traditional java File that's been annotated with
+ * DFSFile is a traditional java File that's been annotated with
  * some extra information.
  *
  * @author Mike Cafarella
  *****************************************************************/
-public class NDFSFile extends File {
-    NDFSFileInfo info;
+public class DFSFile extends File {
+    DFSFileInfo info;
 
-    /** Separator used in NDFS filenames. */
-    public static final String NDFS_FILE_SEPARATOR = "/";
+    /** Separator used in DFS filenames. */
+    public static final String DFS_FILE_SEPARATOR = "/";
     
     /**
      */
-    public NDFSFile(NDFSFileInfo info) {
+    public DFSFile(DFSFileInfo info) {
         super(info.getPath());
         this.info = info;
     }
@@ -79,20 +79,20 @@
     }
     
     /**
-     * Retrieving parent path from NDFS path string
-     * @param path - NDFS path 
-     * @return - parent path of NDFS path, or null if no parent exist.
+     * Retrieving parent path from DFS path string
+     * @param path - DFS path 
+     * @return - parent path of DFS path, or null if no parent exist.
      */
-    public static String getNDFSParent(String path) {
+    public static String getDFSParent(String path) {
         if (path == null)
             return null;
-        if (NDFS_FILE_SEPARATOR.equals(path))
+        if (DFS_FILE_SEPARATOR.equals(path))
             return null;
-        int index = path.lastIndexOf(NDFS_FILE_SEPARATOR); 
+        int index = path.lastIndexOf(DFS_FILE_SEPARATOR); 
         if (index == -1)
             return null;
         if (index == 0)
-            return NDFS_FILE_SEPARATOR;
+            return DFS_FILE_SEPARATOR;
         return path.substring(0, index);
     }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NDFSFileInfo.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NDFSFileInfo.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NDFSFileInfo.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NDFSFileInfo.java Fri Feb  3 11:45:32 2006
@@ -13,19 +13,19 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.ndfs;
+package org.apache.hadoop.dfs.
 
-import org.apache.nutch.io.*;
+import org.apache.hadoop.io.*;
 
 import java.io.*;
 
 /******************************************************
- * NDFSFileInfo tracks info about remote files, including
+ * DFSFileInfo tracks info about remote files, including
  * name, size, etc.  
  * 
  * @author Mike Cafarella
  ******************************************************/
-public class NDFSFileInfo implements Writable {
+public class DFSFileInfo implements Writable {
     UTF8 path;
     long len;
     long contentsLen;
@@ -33,12 +33,12 @@
 
     /**
      */
-    public NDFSFileInfo() {
+    public DFSFileInfo() {
     }
 
     /**
      */
-    public NDFSFileInfo(UTF8 path, long len, long contentsLen, boolean isDir) {
+    public DFSFileInfo(UTF8 path, long len, long contentsLen, boolean isDir) {
         this.path = path;
         this.len = len;
         this.contentsLen = contentsLen;
@@ -60,7 +60,7 @@
     /**
      */
     public String getParent() {
-        return NDFSFile.getNDFSParent(path.toString());
+        return DFSFile.getDFSParent(path.toString());
     }
 
     /**

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java Fri Feb  3 11:45:32 2006
@@ -13,11 +13,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.ndfs;
+package org.apache.hadoop.dfs.
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.ipc.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.ipc.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.LogFormatter;
 
 import java.io.*;
 import java.net.*;
@@ -36,7 +37,7 @@
  * @author Mike Cafarella
  **********************************************************/
 public class NameNode implements ClientProtocol, DatanodeProtocol, FSConstants {
-    public static final Logger LOG = LogFormatter.getLogger("org.apache.nutch.ndfs.NameNode");
+    public static final Logger LOG = LogFormatter.getLogger("org.apache.hadoop.dfs.NameNode");
 
     private FSNamesystem namesystem;
     private Server server;
@@ -48,20 +49,20 @@
     /**
      * Create a NameNode at the default location
      */
-    public NameNode(NutchConf nutchConf) throws IOException {
-        this(new File(nutchConf.get("ndfs.name.dir",
-                                          "/tmp/nutch/ndfs/name")),
+    public NameNode(Configuration conf) throws IOException {
+        this(new File(conf.get("dfs.name.dir",
+                                          "/tmp/nutch/dfs/name")),
              DataNode.createSocketAddr
-             (nutchConf.get("fs.default.name", "local")).getPort(), nutchConf);
+             (conf.get("fs.default.name", "local")).getPort(), conf);
     }
 
     /**
      * Create a NameNode at the specified location and start it.
      */
-    public NameNode(File dir, int port, NutchConf nutchConf) throws IOException {
-        this.namesystem = new FSNamesystem(dir, nutchConf);
-        this.handlerCount = nutchConf.getInt("ndfs.namenode.handler.count", 10);
-        this.server = RPC.getServer(this, port, handlerCount, false, nutchConf);
+    public NameNode(File dir, int port, Configuration conf) throws IOException {
+        this.namesystem = new FSNamesystem(dir, conf);
+        this.handlerCount = conf.getInt("dfs.namenode.handler.count", 10);
+        this.server = RPC.getServer(this, port, handlerCount, false, conf);
         this.server.start();
     }
 
@@ -266,7 +267,7 @@
 
     /**
      */
-    public NDFSFileInfo[] getListing(String src) throws IOException {
+    public DFSFileInfo[] getListing(String src) throws IOException {
         return namesystem.getListing(new UTF8(src));
     }
 
@@ -345,7 +346,7 @@
     /**
      */
     public static void main(String argv[]) throws IOException, InterruptedException {
-        NameNode namenode = new NameNode(new NutchConf());
+        NameNode namenode = new NameNode(new Configuration());
         namenode.join();
     }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/ChecksumException.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/ChecksumException.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/ChecksumException.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/ChecksumException.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.fs;
+package org.apache.hadoop.fs.
 
 import java.io.IOException;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FSError.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FSError.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FSError.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FSError.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.fs;
+package org.apache.hadoop.fs.
 
 /** Thrown for unexpected filesystem errors, presumed to reflect disk errors
  * in the native filesystem. */

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java Fri Feb  3 11:45:32 2006
@@ -14,11 +14,11 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.fs;
+package org.apache.hadoop.fs.
 
 import java.io.*;
 
-import org.apache.nutch.util.NutchConf;
+import org.apache.hadoop.conf.Configuration;
 
 /**
  * A collection of file-processing util methods
@@ -28,13 +28,13 @@
      * Delete a directory and all its contents.  If
      * we return false, the directory may be partially-deleted.
      */
-    public static boolean fullyDelete(File dir, NutchConf nutchConf) throws IOException {
-        return fullyDelete(new LocalFileSystem(nutchConf), dir);
+    public static boolean fullyDelete(File dir, Configuration conf) throws IOException {
+        return fullyDelete(new LocalFileSystem(conf), dir);
     }
     public static boolean fullyDelete(NutchFileSystem nfs, File dir) throws IOException {
         // 20041022, xing.
         // Currently nfs.detele(File) means fully delete for both
-        // LocalFileSystem.java and NDFSFileSystem.java. So we are okay now.
+        // LocalFileSystem.java and DistributedFileSystem.java. So we are okay now.
         // If implementation changes in future, it should be modified too.
         return nfs.delete(dir);
     }
@@ -43,7 +43,7 @@
      * Copy a file's contents to a new location.
      * Returns whether a target file was overwritten
      */
-    public static boolean copyContents(NutchFileSystem nfs, File src, File dst, boolean overwrite, NutchConf nutchConf) throws IOException {
+    public static boolean copyContents(NutchFileSystem nfs, File src, File dst, boolean overwrite, Configuration conf) throws IOException {
         if (nfs.exists(dst) && !overwrite) {
             return false;
         }
@@ -57,7 +57,7 @@
             NFSInputStream in = nfs.openRaw(src);
             try {
                 NFSOutputStream out = nfs.createRaw(dst, true);
-                byte buf[] = new byte[nutchConf.getInt("io.file.buffer.size", 4096)];
+                byte buf[] = new byte[conf.getInt("io.file.buffer.size", 4096)];
                 try {
                     int readBytes = in.read(buf);
 
@@ -77,7 +77,7 @@
             if (contents != null) {
                 for (int i = 0; i < contents.length; i++) {
                     File newDst = new File(dst, contents[i].getName());
-                    if (! copyContents(nfs, contents[i], newDst, overwrite, nutchConf)) {
+                    if (! copyContents(nfs, contents[i], newDst, overwrite, conf)) {
                         return false;
                     }
                 }
@@ -90,7 +90,7 @@
      * Copy a file and/or directory and all its contents (whether
      * data or other files/dirs)
      */
-    public static void recursiveCopy(NutchFileSystem nfs, File src, File dst, NutchConf nutchConf) throws IOException {
+    public static void recursiveCopy(NutchFileSystem nfs, File src, File dst, Configuration conf) throws IOException {
         //
         // Resolve the real target.
         //
@@ -107,7 +107,7 @@
             //
             // If the source is a file, then just copy the contents
             //
-            copyContents(nfs, src, dst, true, nutchConf);
+            copyContents(nfs, src, dst, true, conf);
         } else {
             //
             // If the source is a dir, then we need to copy all the subfiles.
@@ -115,7 +115,7 @@
             nfs.mkdirs(dst);
             File contents[] = nfs.listFiles(src);
             for (int i = 0; i < contents.length; i++) {
-                recursiveCopy(nfs, contents[i], new File(dst, contents[i].getName()), nutchConf);
+                recursiveCopy(nfs, contents[i], new File(dst, contents[i].getName()), conf);
             }
         }
     }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/LocalFileSystem.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/LocalFileSystem.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/LocalFileSystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/LocalFileSystem.java Fri Feb  3 11:45:32 2006
@@ -14,17 +14,17 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.fs;
+package org.apache.hadoop.fs.
 
 import java.io.*;
 import java.util.*;
 import java.nio.channels.*;
 
-import org.apache.nutch.ndfs.NDFSFile;
-import org.apache.nutch.ndfs.DF;
-import org.apache.nutch.ndfs.NDFSFileInfo;
-import org.apache.nutch.util.NutchConf;
-import org.apache.nutch.io.UTF8;
+import org.apache.hadoop.dfs.DFSFile;
+import org.apache.hadoop.dfs.DF;
+import org.apache.hadoop.dfs.DFSFileInfo;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.UTF8;
 
 /****************************************************************
  * Implement the NutchFileSystem interface for the local disk.
@@ -42,8 +42,8 @@
     
     /**
      */
-    public LocalFileSystem(NutchConf nutchConf) throws IOException {
-        super(nutchConf);
+    public LocalFileSystem(Configuration conf) throws IOException {
+        super(conf);
         // if you find an OS which reliably supports non-POSIX
         // rename(2) across filesystems / volumes, you can
         // uncomment this.
@@ -176,7 +176,7 @@
      */
     public boolean renameRaw(File src, File dst) throws IOException {
         if (useCopyForRename) {
-            FileUtil.copyContents(this, src, dst, true, nutchConf);
+            FileUtil.copyContents(this, src, dst, true, conf);
             return fullyDelete(src);
         } else return src.renameTo(dst);
     }
@@ -214,7 +214,7 @@
         File[] files = f.listFiles();
         if (files == null) return null;
         // 20041022, xing, Watch out here:
-        // currently NDFSFile.java does not support those methods
+        // currently DFSFile.java does not support those methods
         //    public boolean canRead()
         //    public boolean canWrite()
         //    public boolean createNewFile()
@@ -222,12 +222,12 @@
         //    public void deleteOnExit()
         //    public boolean isHidden()
         // so you can not rely on returned list for these operations.
-        NDFSFile[] nfiles = new NDFSFile[files.length];
+        DFSFile[] nfiles = new DFSFile[files.length];
         for (int i = 0; i < files.length; i++) {
             long len = files[i].length();
             UTF8 name = new UTF8(files[i].toString());
-            NDFSFileInfo info = new NDFSFileInfo(name, len, len, files[i].isDirectory());
-            nfiles[i] = new NDFSFile(info);
+            DFSFileInfo info = new DFSFileInfo(name, len, len, files[i].isDirectory());
+            nfiles[i] = new DFSFile(info);
         }
         return nfiles;
     }
@@ -289,7 +289,7 @@
     public void moveFromLocalFile(File src, File dst) throws IOException {
         if (! src.equals(dst)) {
             if (useCopyForRename) {
-                FileUtil.copyContents(this, src, dst, true, this.nutchConf);
+                FileUtil.copyContents(this, src, dst, true, this.conf);
                 fullyDelete(src);
             } else src.renameTo(dst);
         }
@@ -300,7 +300,7 @@
      */
     public void copyFromLocalFile(File src, File dst) throws IOException {
         if (! src.equals(dst)) {
-            FileUtil.copyContents(this, src, dst, true, this.nutchConf);
+            FileUtil.copyContents(this, src, dst, true, this.conf);
         }
     }
 
@@ -309,7 +309,7 @@
      */
     public void copyToLocalFile(File src, File dst) throws IOException {
         if (! src.equals(dst)) {
-            FileUtil.copyContents(this, src, dst, true, this.nutchConf);
+            FileUtil.copyContents(this, src, dst, true, this.conf);
         }
     }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NDFSFileSystem.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NDFSFileSystem.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NDFSFileSystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NDFSFileSystem.java Fri Feb  3 11:45:32 2006
@@ -14,107 +14,107 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.fs;
+package org.apache.hadoop.fs.
 
 import java.io.*;
 import java.net.*;
 import java.util.*;
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.ndfs.*;
-import org.apache.nutch.util.NutchConf;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.dfs.*;
+import org.apache.hadoop.conf.Configuration;
 
 /****************************************************************
- * Implementation of the abstract NutchFileSystem for the NDFS system.
+ * Implementation of the abstract NutchFileSystem for the DFS system.
  * This is the distributed file system.  It can be distributed over
  * 1 or more machines 
  * @author Mike Cafarella
  *****************************************************************/
-public class NDFSFileSystem extends NutchFileSystem {
+public class DistributedFileSystem extends NutchFileSystem {
     private static final String HOME_DIR =
       "/user/" + System.getProperty("user.name") + "/";
 
     private Random r = new Random();
     private String name;
 
-    NDFSClient ndfs;
+    DFSClient dfs;
 
     /**
      * Create the ShareSet automatically, and then go on to
      * the regular constructor.
      */
-    public NDFSFileSystem(InetSocketAddress namenode, NutchConf nutchConf) throws IOException {
-      super(nutchConf);
-      this.ndfs = new NDFSClient(namenode, nutchConf);
+    public DistributedFileSystem(InetSocketAddress namenode, Configuration conf) throws IOException {
+      super(conf);
+      this.dfs = new DFSClient(namenode, conf);
       this.name = namenode.getHostName() + ":" + namenode.getPort();
     }
 
     public String getName() { return name; }
 
     private UTF8 getPath(File file) {
-      String path = getNDFSPath(file);
-      if (!path.startsWith(NDFSFile.NDFS_FILE_SEPARATOR)) {
-        path = getNDFSPath(new File(HOME_DIR, path)); // make absolute
+      String path = getDFSPath(file);
+      if (!path.startsWith(DFSFile.DFS_FILE_SEPARATOR)) {
+        path = getDFSPath(new File(HOME_DIR, path)); // make absolute
       }
       return new UTF8(path);
     }
 
     public String[][] getFileCacheHints(File f, long start, long len) throws IOException {
-      return ndfs.getHints(getPath(f), start, len);
+      return dfs.getHints(getPath(f), start, len);
     }
 
     public NFSInputStream openRaw(File f) throws IOException {
-      return ndfs.open(getPath(f));
+      return dfs.open(getPath(f));
     }
 
     public NFSOutputStream createRaw(File f, boolean overwrite)
       throws IOException {
-      return ndfs.create(getPath(f), overwrite);
+      return dfs.create(getPath(f), overwrite);
     }
 
     /**
      * Rename files/dirs
      */
     public boolean renameRaw(File src, File dst) throws IOException {
-      return ndfs.rename(getPath(src), getPath(dst));
+      return dfs.rename(getPath(src), getPath(dst));
     }
 
     /**
      * Get rid of File f, whether a true file or dir.
      */
     public boolean deleteRaw(File f) throws IOException {
-        return ndfs.delete(getPath(f));
+        return dfs.delete(getPath(f));
     }
 
     /**
      */
     public boolean exists(File f) throws IOException {
-        return ndfs.exists(getPath(f));
+        return dfs.exists(getPath(f));
     }
 
     /**
      */
     public boolean isDirectory(File f) throws IOException {
-        return ndfs.isDirectory(getPath(f));
+        return dfs.isDirectory(getPath(f));
     }
 
     /**
      */
     public long getLength(File f) throws IOException {
-        NDFSFileInfo info[] = ndfs.listFiles(getPath(f));
+        DFSFileInfo info[] = dfs.listFiles(getPath(f));
         return info[0].getLen();
     }
 
     /**
      */
     public File[] listFilesRaw(File f) throws IOException {
-        NDFSFileInfo info[] = ndfs.listFiles(getPath(f));
+        DFSFileInfo info[] = dfs.listFiles(getPath(f));
         if (info == null) {
             return new File[0];
         } else {
-            File results[] = new NDFSFile[info.length];
+            File results[] = new DFSFile[info.length];
             for (int i = 0; i < info.length; i++) {
-                results[i] = new NDFSFile(info[i]);
+                results[i] = new DFSFile(info[i]);
             }
             return results;
         }
@@ -123,21 +123,21 @@
     /**
      */
     public void mkdirs(File f) throws IOException {
-        ndfs.mkdirs(getPath(f));
+        dfs.mkdirs(getPath(f));
     }
 
     /**
      * Obtain a filesystem lock at File f.
      */
     public void lock(File f, boolean shared) throws IOException {
-        ndfs.lock(getPath(f), ! shared);
+        dfs.lock(getPath(f), ! shared);
     }
 
     /**
      * Release a held lock
      */
     public void release(File f) throws IOException {
-        ndfs.release(getPath(f));
+        dfs.release(getPath(f));
     }
 
     /**
@@ -173,7 +173,7 @@
                 doFromLocalFile(contents[i], new File(dst, contents[i].getName()), deleteSource);
             }
         } else {
-            byte buf[] = new byte[this.nutchConf.getInt("io.file.buffer.size", 4096)];
+            byte buf[] = new byte[this.conf.getInt("io.file.buffer.size", 4096)];
             InputStream in = new BufferedInputStream(new FileInputStream(src));
             try {
                 OutputStream out = create(dst);
@@ -218,10 +218,10 @@
                 copyToLocalFile(contents[i], new File(dst, contents[i].getName()));
             }
         } else {
-            byte buf[] = new byte[this.nutchConf.getInt("io.file.buffer.size", 4096)];
+            byte buf[] = new byte[this.conf.getInt("io.file.buffer.size", 4096)];
             InputStream in = open(src);
             try {
-                OutputStream out = NutchFileSystem.getNamed("local", this.nutchConf).create(dst);
+                OutputStream out = NutchFileSystem.getNamed("local", this.conf).create(dst);
                 try {
                     int bytesRead = in.read(buf);
                     while (bytesRead >= 0) {
@@ -249,14 +249,14 @@
     }
 
     /**
-     * Move completed local data to NDFS destination
+     * Move completed local data to DFS destination
      */
     public void completeLocalOutput(File nfsOutputFile, File tmpLocalFile) throws IOException {
         moveFromLocalFile(tmpLocalFile, nfsOutputFile);
     }
 
     /**
-     * Fetch remote NDFS file, place at tmpLocalFile
+     * Fetch remote DFS file, place at tmpLocalFile
      */
     public File startLocalInput(File nfsInputFile, File tmpLocalFile) throws IOException {
         copyToLocalFile(nfsInputFile, tmpLocalFile);
@@ -268,29 +268,29 @@
      */
     public void completeLocalInput(File localFile) throws IOException {
         // Get rid of the local copy - we don't need it anymore.
-        FileUtil.fullyDelete(localFile, this.nutchConf);
+        FileUtil.fullyDelete(localFile, this.conf);
     }
 
     /**
      * Shut down the FS.  Not necessary for regular filesystem.
      */
     public void close() throws IOException {
-        ndfs.close();
+        dfs.close();
     }
 
     /**
      */
     public String toString() {
-        return "NDFS[" + ndfs + "]";
+        return "DFS[" + dfs + "]";
     }
 
     /**
      */
-    public NDFSClient getClient() {
-        return ndfs;
+    public DFSClient getClient() {
+        return dfs;
     }
     
-    private String getNDFSPath(File f) {
+    private String getDFSPath(File f) {
       List l = new ArrayList();
       l.add(f.getName());
       File parent = f.getParentFile();
@@ -301,7 +301,7 @@
       StringBuffer path = new StringBuffer();
       path.append(l.get(l.size() - 1));
       for (int i = l.size() - 2; i >= 0; i--) {
-        path.append(NDFSFile.NDFS_FILE_SEPARATOR);
+        path.append(DFSFile.DFS_FILE_SEPARATOR);
         path.append(l.get(i));
       }
       return path.toString();

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NDFSShell.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NDFSShell.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NDFSShell.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NDFSShell.java Fri Feb  3 11:45:32 2006
@@ -13,48 +13,47 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.fs;
+package org.apache.hadoop.fs.
 
-import org.apache.nutch.io.*;
-import org.apache.nutch.ipc.*;
-import org.apache.nutch.util.*;
-import org.apache.nutch.ndfs.*;
+import org.apache.hadoop.io.*;
+import org.apache.hadoop.ipc.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.dfs.*;
 
 import java.io.*;
 import java.net.*;
 import java.util.*;
 
 /**************************************************
- * This class provides some NDFS administrative access.
+ * This class provides some DFS administrative access.
  *
  * @author Mike Cafarella
  **************************************************/
-public class NDFSShell {
+public class DFSShell {
     NutchFileSystem nfs;
 
     /**
      */
-    public NDFSShell(NutchFileSystem nfs) {
+    public DFSShell(NutchFileSystem nfs) {
         this.nfs = nfs;
     }
 
-
     /**
-     * Add a local file to the indicated name in NDFS. src is kept.
+     * Add a local file to the indicated name in DFS. src is kept.
      */
     void copyFromLocal(File src, String dstf) throws IOException {
         nfs.copyFromLocalFile(src, new File(dstf));
     }
 
     /**
-     * Add a local file to the indicated name in NDFS. src is removed.
+     * Add a local file to the indicated name in DFS. src is removed.
      */
     void moveFromLocal(File src, String dstf) throws IOException {
         nfs.moveFromLocalFile(src, new File(dstf));
     }
 
     /**
-     * Obtain the indicated NDFS file and copy to the local name.
+     * Obtain the indicated DFS file and copy to the local name.
      * srcf is kept.
      */
     void copyToLocal(String srcf, File dst) throws IOException {
@@ -62,7 +61,7 @@
     }
 
     /**
-     * Obtain the indicated NDFS file and copy to the local name.
+     * Obtain the indicated DFS file and copy to the local name.
      * srcf is removed.
      */
     void moveToLocal(String srcf, File dst) throws IOException {
@@ -70,7 +69,7 @@
     }
 
     /**
-     * Get a listing of all files in NDFS at the indicated name
+     * Get a listing of all files in DFS at the indicated name
      */
     public void ls(String src) throws IOException {
         File items[] = nfs.listFiles(new File(src));
@@ -94,7 +93,7 @@
         } else {
             System.out.println("Found " + items.length + " items");
             for (int i = 0; i < items.length; i++) {
-                NDFSFile cur = (NDFSFile) items[i];
+                DFSFile cur = (DFSFile) items[i];
                 System.out.println(cur.getPath() + "\t" + cur.getContentsLength());
             }
         }
@@ -109,7 +108,7 @@
     }
     
     /**
-     * Rename an NDFS file
+     * Rename an DFS file
      */
     public void rename(String srcf, String dstf) throws IOException {
         if (nfs.rename(new File(srcf), new File(dstf))) {
@@ -120,10 +119,10 @@
     }
 
     /**
-     * Copy an NDFS file
+     * Copy an DFS file
      */
-    public void copy(String srcf, String dstf, NutchConf nutchConf) throws IOException {
-        if (FileUtil.copyContents(nfs, new File(srcf), new File(dstf), true, nutchConf)) {
+    public void copy(String srcf, String dstf, Configuration conf) throws IOException {
+        if (FileUtil.copyContents(nfs, new File(srcf), new File(dstf), true, conf)) {
             System.out.println("Copied " + srcf + " to " + dstf);
         } else {
             System.out.println("Copy failed");
@@ -131,7 +130,7 @@
     }
 
     /**
-     * Delete an NDFS file
+     * Delete an DFS file
      */
     public void delete(String srcf) throws IOException {
         if (nfs.delete(new File(srcf))) {
@@ -173,17 +172,17 @@
      * Gives a report on how the NutchFileSystem is doing
      */
     public void report() throws IOException {
-        if (nfs instanceof NDFSFileSystem) {
-            NDFSFileSystem ndfsfs = (NDFSFileSystem) nfs;
-            NDFSClient ndfs = ndfsfs.getClient();
-            long total = ndfs.totalRawCapacity();
-            long used = ndfs.totalRawUsed();
-            DatanodeInfo info[] = ndfs.datanodeReport();
+        if (nfs instanceof DistributedFileSystem) {
+            DistributedFileSystem dfsfs = (DistributedFileSystem) nfs;
+            DFSClient dfs = dfsfs.getClient();
+            long total = dfs.totalRawCapacity();
+            long used = dfs.totalRawUsed();
+            DatanodeInfo info[] = dfs.datanodeReport();
 
             long totalEffectiveBytes = 0;
             File topItems[] = nfs.listFiles(new File("/"));
             for (int i = 0; i < topItems.length; i++) {
-                NDFSFile cur = (NDFSFile) topItems[i];
+                DFSFile cur = (DFSFile) topItems[i];
                 totalEffectiveBytes += cur.getContentsLength();
             }
 
@@ -216,7 +215,7 @@
      */
     public static void main(String argv[]) throws IOException {
         if (argv.length < 1) {
-            System.out.println("Usage: java NDFSShell [-local | -ndfs <namenode:port>]" +
+            System.out.println("Usage: java DFSShell [-local | -dfs <namenode:port>]" +
                     " [-ls <path>] [-du <path>] [-mv <src> <dst>] [-cp <src> <dst>] [-rm <src>]" +
                     " [-put <localsrc> <dst>] [-copyFromLocal <localsrc> <dst>] [-moveFromLocal <localsrc> <dst>]" + 
                     " [-get <src> <localdst>] [-copyToLocal <src> <localdst>] [-moveToLocal <src> <localdst>]" +
@@ -224,11 +223,11 @@
             return;
         }
 
-        NutchConf nutchConf = new NutchConf();
+        Configuration conf = new Configuration();
         int i = 0;
-        NutchFileSystem nfs = NutchFileSystem.parseArgs(argv, i, nutchConf);
+        NutchFileSystem nfs = NutchFileSystem.parseArgs(argv, i, conf);
         try {
-            NDFSShell tc = new NDFSShell(nfs);
+            DFSShell tc = new DFSShell(nfs);
 
             String cmd = argv[i++];
             if ("-put".equals(cmd) || "-copyFromLocal".equals(cmd)) {
@@ -245,7 +244,7 @@
             } else if ("-mv".equals(cmd)) {
                 tc.rename(argv[i++], argv[i++]);
             } else if ("-cp".equals(cmd)) {
-                tc.copy(argv[i++], argv[i++], nutchConf);
+                tc.copy(argv[i++], argv[i++], conf);
             } else if ("-rm".equals(cmd)) {
                 tc.delete(argv[i++]);
             } else if ("-du".equals(cmd)) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NFSDataInputStream.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NFSDataInputStream.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NFSDataInputStream.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NFSDataInputStream.java Fri Feb  3 11:45:32 2006
@@ -13,19 +13,20 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.fs;
+package org.apache.hadoop.fs.
 
 import java.io.*;
 import java.util.Arrays;
 import java.util.logging.*;
 import java.util.zip.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.LogFormatter;
 
 /** Utility that wraps a {@link NFSInputStream} in a {@link DataInputStream}
  * and buffers input through a {@link BufferedInputStream}. */
 public class NFSDataInputStream extends DataInputStream {
   private static final Logger LOG =
-    LogFormatter.getLogger("org.apache.nutch.fs.DataInputStream");
+    LogFormatter.getLogger("org.apache.hadoop.fs.DataInputStream");
 
   private static final byte[] VERSION = NFSDataOutputStream.CHECKSUM_VERSION;
   private static final int HEADER_LENGTH = 8;
@@ -40,7 +41,7 @@
     private Checksum sum = new CRC32();
     private int inSum;
 
-    public Checker(NutchFileSystem fs, File file, NutchConf nutchConf)
+    public Checker(NutchFileSystem fs, File file, Configuration conf)
       throws IOException {
       super(fs.openRaw(file));
       
@@ -48,7 +49,7 @@
       this.file = file;
       File sumFile = fs.getChecksumFile(file);
       try {
-        this.sums = new NFSDataInputStream(fs.openRaw(sumFile), nutchConf);
+        this.sums = new NFSDataInputStream(fs.openRaw(sumFile), conf);
         byte[] version = new byte[VERSION.length];
         sums.readFully(version);
         if (!Arrays.equals(version, VERSION))
@@ -212,23 +213,23 @@
 }
   
   
-  public NFSDataInputStream(NutchFileSystem fs, File file, int bufferSize, NutchConf nutchConf)
+  public NFSDataInputStream(NutchFileSystem fs, File file, int bufferSize, Configuration conf)
       throws IOException {
     super(null);
-    this.in = new Buffer(new PositionCache(new Checker(fs, file, nutchConf)), bufferSize);
+    this.in = new Buffer(new PositionCache(new Checker(fs, file, conf)), bufferSize);
   }
   
   
-  public NFSDataInputStream(NutchFileSystem fs, File file, NutchConf nutchConf)
+  public NFSDataInputStream(NutchFileSystem fs, File file, Configuration conf)
     throws IOException {
     super(null);
-    int bufferSize = nutchConf.getInt("io.file.buffer.size", 4096);
-    this.in = new Buffer(new PositionCache(new Checker(fs, file, nutchConf)), bufferSize);
+    int bufferSize = conf.getInt("io.file.buffer.size", 4096);
+    this.in = new Buffer(new PositionCache(new Checker(fs, file, conf)), bufferSize);
   }
     
   /** Construct without checksums. */
-  public NFSDataInputStream(NFSInputStream in, NutchConf nutchConf) throws IOException {
-    this(in, nutchConf.getInt("io.file.buffer.size", 4096));
+  public NFSDataInputStream(NFSInputStream in, Configuration conf) throws IOException {
+    this(in, conf.getInt("io.file.buffer.size", 4096));
   }
   /** Construct without checksums. */
   public NFSDataInputStream(NFSInputStream in, int bufferSize)

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NFSDataOutputStream.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NFSDataOutputStream.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NFSDataOutputStream.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NFSDataOutputStream.java Fri Feb  3 11:45:32 2006
@@ -13,12 +13,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.fs;
+package org.apache.hadoop.fs.
 
 import java.io.*;
 import java.util.zip.Checksum;
 import java.util.zip.CRC32;
-import org.apache.nutch.util.NutchConf;
+import org.apache.hadoop.conf.Configuration;
 
 /** Utility that wraps a {@link NFSOutputStream} in a {@link DataOutputStream},
  * buffers output through a {@link BufferedOutputStream} and creates a checksum
@@ -34,12 +34,12 @@
     private int inSum;
     private int bytesPerSum;
 
-    public Summer(NutchFileSystem fs, File file, boolean overwrite, NutchConf nutchConf)
+    public Summer(NutchFileSystem fs, File file, boolean overwrite, Configuration conf)
       throws IOException {
       super(fs.createRaw(file, overwrite));
-      this.bytesPerSum = nutchConf.getInt("io.bytes.per.checksum", 512);
+      this.bytesPerSum = conf.getInt("io.bytes.per.checksum", 512);
       this.sums =
-        new NFSDataOutputStream(fs.createRaw(fs.getChecksumFile(file), true), nutchConf);
+        new NFSDataOutputStream(fs.createRaw(fs.getChecksumFile(file), true), conf);
 
       sums.write(CHECKSUM_VERSION, 0, CHECKSUM_VERSION.length);
       sums.writeInt(this.bytesPerSum);
@@ -122,15 +122,15 @@
   }
 
   public NFSDataOutputStream(NutchFileSystem fs, File file,
-                             boolean overwrite, NutchConf nutchConf)
+                             boolean overwrite, Configuration conf)
     throws IOException {
-    super(new Buffer(new PositionCache(new Summer(fs, file, overwrite, nutchConf)),
-            nutchConf.getInt("io.file.buffer.size", 4096)));
+    super(new Buffer(new PositionCache(new Summer(fs, file, overwrite, conf)),
+            conf.getInt("io.file.buffer.size", 4096)));
   }
 
   /** Construct without checksums. */
-  public NFSDataOutputStream(NFSOutputStream out, NutchConf nutchConf) throws IOException {
-    this(out, nutchConf.getInt("io.file.buffer.size", 4096));
+  public NFSDataOutputStream(NFSOutputStream out, Configuration conf) throws IOException {
+    this(out, conf.getInt("io.file.buffer.size", 4096));
   }
 
   /** Construct without checksums. */

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NFSInputStream.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NFSInputStream.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NFSInputStream.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NFSInputStream.java Fri Feb  3 11:45:32 2006
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.fs;
+package org.apache.hadoop.fs.
 
 import java.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NFSOutputStream.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NFSOutputStream.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NFSOutputStream.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NFSOutputStream.java Fri Feb  3 11:45:32 2006
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.fs;
+package org.apache.hadoop.fs.
 
 import java.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NutchFileSystem.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NutchFileSystem.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NutchFileSystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/NutchFileSystem.java Fri Feb  3 11:45:32 2006
@@ -13,15 +13,16 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.fs;
+package org.apache.hadoop.fs.
 
 import java.io.*;
 import java.net.*;
 import java.util.*;
 import java.util.logging.*;
 
-import org.apache.nutch.ndfs.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.dfs.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.LogFormatter;
 
 /****************************************************************
  * An abstract base class for a fairly simple
@@ -38,38 +39,38 @@
  * knowledge and local instances of ShareGroup.
  * <p>
  * The local implementation is {@link LocalFileSystem} and distributed
- * implementation is {@link NDFSFileSystem}.
+ * implementation is {@link DistributedFileSystem}.
  * @author Mike Cafarella
  *****************************************************************/
 public abstract class NutchFileSystem {
-    public static final Logger LOG = LogFormatter.getLogger("org.apache.nutch.util.NutchFileSystem");
+    public static final Logger LOG = LogFormatter.getLogger("org.apache.hadoop.dfs.DistributedFileSystem");
 
     private static final HashMap NAME_TO_FS = new HashMap();
     /**
      * Parse the cmd-line args, starting at i.  Remove consumed args
      * from array.  We expect param in the form:
-     * '-local | -ndfs <namenode:port>'
+     * '-local | -dfs <namenode:port>'
      *
      * @deprecated use fs.default.name config option instead
      */
-    public static NutchFileSystem parseArgs(String argv[], int i, NutchConf nutchConf) throws IOException {
+    public static NutchFileSystem parseArgs(String argv[], int i, Configuration conf) throws IOException {
         /**
         if (argv.length - i < 1) {
-            throw new IOException("Must indicate filesystem type for NDFS");
+            throw new IOException("Must indicate filesystem type for DFS");
         }
         */
         int orig = i;
         NutchFileSystem nfs = null;
         String cmd = argv[i];
-        if ("-ndfs".equals(cmd)) {
+        if ("-dfs".equals(cmd)) {
             i++;
             InetSocketAddress addr = DataNode.createSocketAddr(argv[i++]);
-            nfs = new NDFSFileSystem(addr, nutchConf);
+            nfs = new DistributedFileSystem(addr, conf);
         } else if ("-local".equals(cmd)) {
             i++;
-            nfs = new LocalFileSystem(nutchConf);
+            nfs = new LocalFileSystem(conf);
         } else {
-            nfs = get(nutchConf);                          // using default
+            nfs = get(conf);                          // using default
             LOG.info("No FS indicated, using default:"+nfs.getName());
 
         }
@@ -81,25 +82,25 @@
     }
 
     /** Returns the configured filesystem implementation.*/
-    public static NutchFileSystem get(NutchConf conf) throws IOException {
+    public static NutchFileSystem get(Configuration conf) throws IOException {
       return getNamed(conf.get("fs.default.name", "local"), conf);
     }
 
-    protected NutchConf nutchConf;
+    protected Configuration conf;
     /** Returns a name for this filesystem, suitable to pass to {@link
      * NutchFileSystem#getNamed(String).*/
     public abstract String getName();
   
     /** Returns a named filesystem.  Names are either the string "local" or a
-     * host:port pair, naming an NDFS name server.*/
-    public static NutchFileSystem getNamed(String name, NutchConf nutchConf) throws IOException {
+     * host:port pair, naming an DFS name server.*/
+    public static NutchFileSystem getNamed(String name, Configuration conf) throws IOException {
       NutchFileSystem fs = (NutchFileSystem)NAME_TO_FS.get(name);
-      int ioFileBufferSize = nutchConf.getInt("io.file.buffer.size", 4096);
+      int ioFileBufferSize = conf.getInt("io.file.buffer.size", 4096);
       if (fs == null) {
         if ("local".equals(name)) {
-          fs = new LocalFileSystem(nutchConf);
+          fs = new LocalFileSystem(conf);
         } else {
-          fs = new NDFSFileSystem(DataNode.createSocketAddr(name), nutchConf);
+          fs = new DistributedFileSystem(DataNode.createSocketAddr(name), conf);
         }
         NAME_TO_FS.put(name, fs);
       }
@@ -122,8 +123,8 @@
     ///////////////////////////////////////////////////////////////
     /**
      */
-    public NutchFileSystem(NutchConf nutchConf) {
-        this.nutchConf = nutchConf;
+    public NutchFileSystem(Configuration conf) {
+        this.conf = conf;
     }
 
     /**
@@ -131,7 +132,7 @@
      * where portions of the given file can be found.  For a nonexistent 
      * file or regions, null will be returned.
      *
-     * This call is most helpful with NDFS, where it returns 
+     * This call is most helpful with DFS, where it returns 
      * hostnames of machines that contain the given file.
      *
      * The NutchFileSystem will simply return an elt containing 'localhost'.
@@ -146,7 +147,7 @@
      * @param bufferSize the size of the buffer to be used.
      */
     public NFSDataInputStream open(File f, int bufferSize) throws IOException {
-      return new NFSDataInputStream(this, f, bufferSize, this.nutchConf);
+      return new NFSDataInputStream(this, f, bufferSize, this.conf);
     }
     
     /**
@@ -157,12 +158,12 @@
      * @param bufferSize the size of the buffer to be used.
      */
     public NFSDataInputStream open(File f) throws IOException {
-      return new NFSDataInputStream(this, f, nutchConf);
+      return new NFSDataInputStream(this, f, conf);
     }
 
     /**
      * Opens an InputStream for the indicated File, whether local
-     * or via NDFS.
+     * or via DFS.
      */
     public abstract NFSInputStream openRaw(File f) throws IOException;
 
@@ -171,7 +172,7 @@
      * Files are overwritten by default.
      */
     public NFSDataOutputStream create(File f) throws IOException {
-      return create(f, true,this.nutchConf.getInt("io.file.buffer.size", 4096));
+      return create(f, true,this.conf.getInt("io.file.buffer.size", 4096));
     }
 
     /**
@@ -183,7 +184,7 @@
      */
     public NFSDataOutputStream create(File f, boolean overwrite,
                                       int bufferSize) throws IOException {
-      return new NFSDataOutputStream(this, f, overwrite, this.nutchConf);
+      return new NFSDataOutputStream(this, f, overwrite, this.conf);
     }
 
     /** Opens an OutputStream at the indicated File.
@@ -213,7 +214,7 @@
 
     /**
      * Renames File src to File dst.  Can take place on local fs
-     * or remote NDFS.
+     * or remote DFS.
      */
     public boolean rename(File src, File dst) throws IOException {
       if (isDirectory(src)) {
@@ -233,7 +234,7 @@
 
     /**
      * Renames File src to File dst.  Can take place on local fs
-     * or remote NDFS.
+     * or remote DFS.
      */
     public abstract boolean renameRaw(File src, File dst) throws IOException;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Seekable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Seekable.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Seekable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Seekable.java Fri Feb  3 11:45:32 2006
@@ -13,7 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.nutch.fs;
+package org.apache.hadoop.fs.
 
 import java.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ArrayFile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ArrayFile.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ArrayFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ArrayFile.java Fri Feb  3 11:45:32 2006
@@ -14,11 +14,11 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.*;
-import org.apache.nutch.fs.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.conf.*;
 
 /** A dense file-based mapping from integers to values. */
 public class ArrayFile extends MapFile {
@@ -46,8 +46,8 @@
     private LongWritable key = new LongWritable();
 
     /** Construct an array reader for the named file.*/
-    public Reader(NutchFileSystem nfs, String file, NutchConf nutchConf) throws IOException {
-      super(nfs, file, nutchConf);
+    public Reader(NutchFileSystem nfs, String file, Configuration conf) throws IOException {
+      super(nfs, file, conf);
     }
 
     /** Positions the reader before its <code>n</code>th value. */

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ArrayWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ArrayWritable.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ArrayWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ArrayWritable.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.*;
 import java.lang.reflect.Array;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/BooleanWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/BooleanWritable.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/BooleanWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/BooleanWritable.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/BytesWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/BytesWritable.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/BytesWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/BytesWritable.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.IOException;
 import java.io.DataInput;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/CompressedWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/CompressedWritable.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/CompressedWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/CompressedWritable.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.IOException;
 import java.io.DataInput;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/DataInputBuffer.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/DataInputBuffer.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/DataInputBuffer.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/DataInputBuffer.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/DataOutputBuffer.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/DataOutputBuffer.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/DataOutputBuffer.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/DataOutputBuffer.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/FloatWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/FloatWritable.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/FloatWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/FloatWritable.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/IntWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/IntWritable.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/IntWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/IntWritable.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/LongWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/LongWritable.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/LongWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/LongWritable.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MD5Hash.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MD5Hash.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MD5Hash.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MD5Hash.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.IOException;
 import java.io.DataInput;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java Fri Feb  3 11:45:32 2006
@@ -14,12 +14,12 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.*;
 import java.util.Arrays;
-import org.apache.nutch.fs.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.conf.*;
 
 /** A file-based map from keys to values.
  * 
@@ -44,9 +44,6 @@
   /** The name of the data file. */
   public static final String DATA_FILE_NAME = "data";
 
-
-
-
   protected MapFile() {}                          // no public ctor
 
   /** Writes a new map. */
@@ -193,20 +190,20 @@
     public Class getValueClass() { return data.getValueClass(); }
 
     /** Construct a map reader for the named map.*/
-    public Reader(NutchFileSystem nfs, String dirName, NutchConf nutchConf) throws IOException {
-      this(nfs, dirName, null, nutchConf);
-      INDEX_SKIP = nutchConf.getInt("io.map.index.skip", 0);
+    public Reader(NutchFileSystem nfs, String dirName, Configuration conf) throws IOException {
+      this(nfs, dirName, null, conf);
+      INDEX_SKIP = conf.getInt("io.map.index.skip", 0);
     }
 
     /** Construct a map reader for the named map using the named comparator.*/
-    public Reader(NutchFileSystem nfs, String dirName, WritableComparator comparator, NutchConf nutchConf)
+    public Reader(NutchFileSystem nfs, String dirName, WritableComparator comparator, Configuration conf)
       throws IOException {
       File dir = new File(dirName);
       File dataFile = new File(dir, DATA_FILE_NAME);
       File indexFile = new File(dir, INDEX_FILE_NAME);
 
       // open the data
-      this.data = new SequenceFile.Reader(nfs, dataFile.getPath(),  nutchConf);
+      this.data = new SequenceFile.Reader(nfs, dataFile.getPath(),  conf);
       this.firstPosition = data.getPosition();
 
       if (comparator == null)
@@ -217,7 +214,7 @@
       this.getKey = this.comparator.newKey();
 
       // open the index
-      this.index = new SequenceFile.Reader(nfs, indexFile.getPath(), nutchConf);
+      this.index = new SequenceFile.Reader(nfs, indexFile.getPath(), conf);
     }
 
     private void readIndex() throws IOException {
@@ -420,7 +417,7 @@
    * @throws Exception
    */
   public static long fix(NutchFileSystem nfs, File dir,
-          Class keyClass, Class valueClass, boolean dryrun, NutchConf nutchConf) throws Exception {
+          Class keyClass, Class valueClass, boolean dryrun, Configuration conf) throws Exception {
     String dr = (dryrun ? "[DRY RUN ] " : "");
     File data = new File(dir, DATA_FILE_NAME);
     File index = new File(dir, INDEX_FILE_NAME);
@@ -433,7 +430,7 @@
       // no fixing needed
       return -1;
     }
-    SequenceFile.Reader dataReader = new SequenceFile.Reader(nfs, data.toString(), nutchConf);
+    SequenceFile.Reader dataReader = new SequenceFile.Reader(nfs, data.toString(), conf);
     if (!dataReader.getKeyClass().equals(keyClass)) {
       throw new Exception(dr + "Wrong key class in " + dir + ", expected" + keyClass.getName() +
               ", got " + dataReader.getKeyClass().getName());
@@ -478,7 +475,7 @@
     String in = args[0];
     String out = args[1];
 
-    NutchConf conf = new NutchConf();
+    Configuration conf = new Configuration();
     int ioFileBufferSize = conf.getInt("io.file.buffer.size", 4096);
     NutchFileSystem nfs = new LocalFileSystem(conf);
     MapFile.Reader reader = new MapFile.Reader(nfs, in, conf);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/NullWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/NullWritable.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/NullWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/NullWritable.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.*;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.lang.reflect.Proxy;
 import java.lang.reflect.Method;
@@ -25,17 +25,17 @@
 import java.io.*;
 import java.util.*;
 
-import org.apache.nutch.util.NutchConf;
-import org.apache.nutch.util.NutchConfigurable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configurable;
 
 /** A polymorphic Writable that writes an instance with it's class name.
  * Handles arrays, strings and primitive types without a Writable wrapper.
  */
-public class ObjectWritable implements Writable, NutchConfigurable {
+public class ObjectWritable implements Writable, Configurable {
 
   private Class declaredClass;
   private Object instance;
-  private NutchConf nutchConf;
+  private Configuration conf;
 
   public ObjectWritable() {}
   
@@ -61,7 +61,7 @@
   }
   
   public void readFields(DataInput in) throws IOException {
-    readObject(in, this, this.nutchConf);
+    readObject(in, this, this.conf);
   }
   
   public void write(DataOutput out) throws IOException {
@@ -169,14 +169,14 @@
   
   /** Read a {@link Writable}, {@link String}, primitive type, or an array of
    * the preceding. */
-  public static Object readObject(DataInput in, NutchConf nutchConf)
+  public static Object readObject(DataInput in, Configuration conf)
     throws IOException {
-    return readObject(in, null, nutchConf);
+    return readObject(in, null, conf);
   }
     
   /** Read a {@link Writable}, {@link String}, primitive type, or an array of
    * the preceding. */
-  public static Object readObject(DataInput in, ObjectWritable objectWritable, NutchConf nutchConf)
+  public static Object readObject(DataInput in, ObjectWritable objectWritable, Configuration conf)
     throws IOException {
     String className = UTF8.readString(in);
     Class declaredClass = (Class)PRIMITIVE_NAMES.get(className);
@@ -224,7 +224,7 @@
       int length = in.readInt();
       instance = Array.newInstance(declaredClass.getComponentType(), length);
       for (int i = 0; i < length; i++) {
-        Array.set(instance, i, readObject(in, nutchConf));
+        Array.set(instance, i, readObject(in, conf));
       }
       
     } else if (declaredClass == String.class) {        // String
@@ -233,8 +233,8 @@
     } else {                                      // Writable
       try {
         Writable writable = (Writable)declaredClass.newInstance();
-        if(writable instanceof NutchConfigurable) {
-          ((NutchConfigurable) writable).setConf(nutchConf);
+        if(writable instanceof Configurable) {
+          ((Configurable) writable).setConf(conf);
         }
         writable.readFields(in);
         instance = writable;
@@ -254,12 +254,12 @@
       
   }
 
-  public void setConf(NutchConf conf) {
-    this.nutchConf = conf;
+  public void setConf(Configuration conf) {
+    this.conf = conf;
   }
 
-  public NutchConf getConf() {
-    return this.nutchConf;
+  public Configuration getConf() {
+    return this.conf;
   }
   
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.*;
 import java.util.*;
@@ -25,13 +25,14 @@
 import java.rmi.server.UID;
 import java.security.MessageDigest;
 import org.apache.lucene.util.PriorityQueue;
-import org.apache.nutch.fs.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.LogFormatter;
 
 /** Support for flat files of binary key/value pairs. */
 public class SequenceFile {
   public static final Logger LOG =
-    LogFormatter.getLogger("org.apache.nutch.io.SequenceFile");
+    LogFormatter.getLogger("org.apache.hadoop.io.SequenceFile");
 
   private SequenceFile() {}                         // no public ctor
 
@@ -222,12 +223,12 @@
     private byte[] inflateIn = new byte[1024];
     private DataOutputBuffer inflateOut = new DataOutputBuffer();
     private Inflater inflater = new Inflater();
-    private NutchConf nutchConf;
+    private Configuration conf;
 
     /** Open the named file. */
-    public Reader(NutchFileSystem nfs, String file, NutchConf nutchConf) throws IOException {
-      this(nfs, file, nutchConf.getInt("io.file.buffer.size", 4096));
-      this.nutchConf = nutchConf;
+    public Reader(NutchFileSystem nfs, String file, Configuration conf) throws IOException {
+      this(nfs, file, conf.getInt("io.file.buffer.size", 4096));
+      this.conf = conf;
     }
 
     private Reader(NutchFileSystem nfs, String name, int bufferSize) throws IOException {
@@ -341,8 +342,8 @@
           }
           inBuf.reset(inflateOut.getData(), inflateOut.getLength());
         }
-        if(val instanceof NutchConfigurable) {
-          ((NutchConfigurable) val).setConf(this.nutchConf);
+        if(val instanceof Configurable) {
+          ((Configurable) val).setConf(this.conf);
         }
         val.readFields(inBuf);
 
@@ -390,9 +391,9 @@
 
     private void handleChecksumException(ChecksumException e)
       throws IOException {
-      if (this.nutchConf.getBoolean("io.skip.checksum.errors", false)) {
+      if (this.conf.getBoolean("io.skip.checksum.errors", false)) {
         LOG.warning("Bad checksum at "+getPosition()+". Skipping entries.");
-        sync(getPosition()+this.nutchConf.getInt("io.bytes.per.checksum", 512));
+        sync(getPosition()+this.conf.getInt("io.bytes.per.checksum", 512));
       } else {
         throw e;
       }
@@ -469,22 +470,22 @@
     private Class keyClass;
     private Class valClass;
 
-    private NutchConf nutchConf;
+    private Configuration conf;
 
     /** Sort and merge files containing the named classes. */
-    public Sorter(NutchFileSystem nfs, Class keyClass, Class valClass, NutchConf nutchConf)  {
-      this(nfs, new WritableComparator(keyClass), valClass, nutchConf);
+    public Sorter(NutchFileSystem nfs, Class keyClass, Class valClass, Configuration conf)  {
+      this(nfs, new WritableComparator(keyClass), valClass, conf);
     }
 
     /** Sort and merge using an arbitrary {@link WritableComparator}. */
-    public Sorter(NutchFileSystem nfs, WritableComparator comparator, Class valClass, NutchConf nutchConf) {
+    public Sorter(NutchFileSystem nfs, WritableComparator comparator, Class valClass, Configuration conf) {
       this.nfs = nfs;
       this.comparator = comparator;
       this.keyClass = comparator.getKeyClass();
       this.valClass = valClass;
-      this.memory = nutchConf.getInt("io.sort.mb", 100) * 1024 * 1024;
-      this.factor = nutchConf.getInt("io.sort.factor", 100);
-      this.nutchConf = nutchConf;
+      this.memory = conf.getInt("io.sort.mb", 100) * 1024 * 1024;
+      this.factor = conf.getInt("io.sort.factor", 100);
+      this.conf = conf;
     }
 
     /** Set the number of streams to merge at once.*/
@@ -518,7 +519,7 @@
 
     private int sortPass() throws IOException {
       LOG.fine("running sort pass");
-      SortPass sortPass = new SortPass(this.nutchConf);         // make the SortPass
+      SortPass sortPass = new SortPass(this.conf);         // make the SortPass
       try {
         return sortPass.run();                    // run it
       } finally {
@@ -541,8 +542,8 @@
       private NFSDataOutputStream out;
         private String outName;
 
-      public SortPass(NutchConf nutchConf) throws IOException {
-        in = new Reader(nfs, inFile, nutchConf);
+      public SortPass(Configuration conf) throws IOException {
+        in = new Reader(nfs, inFile, conf);
       }
       
       public int run() throws IOException {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SetFile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SetFile.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SetFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SetFile.java Fri Feb  3 11:45:32 2006
@@ -14,12 +14,12 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.*;
 
-import org.apache.nutch.fs.*;
-import org.apache.nutch.util.*;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.conf.*;
 
 /** A file-based set of keys. */
 public class SetFile extends MapFile {
@@ -51,14 +51,14 @@
   public static class Reader extends MapFile.Reader {
 
     /** Construct a set reader for the named set.*/
-    public Reader(NutchFileSystem nfs, String dirName, NutchConf nutchConf) throws IOException {
-      super(nfs, dirName, nutchConf);
+    public Reader(NutchFileSystem nfs, String dirName, Configuration conf) throws IOException {
+      super(nfs, dirName, conf);
     }
 
     /** Construct a set reader for the named set using the named comparator.*/
-    public Reader(NutchFileSystem nfs, String dirName, WritableComparator comparator, NutchConf nutchConf)
+    public Reader(NutchFileSystem nfs, String dirName, WritableComparator comparator, Configuration conf)
       throws IOException {
-      super(nfs, dirName, comparator, nutchConf);
+      super(nfs, dirName, comparator, conf);
     }
 
     // javadoc inherited

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/TwoDArrayWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/TwoDArrayWritable.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/TwoDArrayWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/TwoDArrayWritable.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.*;
 import java.lang.reflect.Array;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/UTF8.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/UTF8.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/UTF8.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/UTF8.java Fri Feb  3 11:45:32 2006
@@ -14,14 +14,14 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.IOException;
 import java.io.DataInput;
 import java.io.DataOutput;
 
 import java.util.logging.Logger;
-import org.apache.nutch.util.LogFormatter;
+import org.apache.hadoop.util.LogFormatter;
 
 /** A WritableComparable for strings that uses the UTF8 encoding.
  * 
@@ -30,7 +30,7 @@
  * @author Doug Cutting
  */
 public class UTF8 implements WritableComparable {
-  private static final Logger LOG= LogFormatter.getLogger("org.apache.nutch.io.UTF8");
+  private static final Logger LOG= LogFormatter.getLogger("org.apache.hadoop.io.UTF8");
   private static final DataOutputBuffer OBUF = new DataOutputBuffer();
   private static final DataInputBuffer IBUF = new DataInputBuffer();
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionMismatchException.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionMismatchException.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionMismatchException.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionMismatchException.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.IOException;
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionedWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionedWritable.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionedWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/VersionedWritable.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.DataOutput;
 import java.io.DataInput;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/Writable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/Writable.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/Writable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/Writable.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.DataOutput;
 import java.io.DataInput;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparable.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparable.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 /** An interface which extends both {@link Writable} and {@link Comparable}.
  *

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java?rev=374733&r1=374710&r2=374733&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java Fri Feb  3 11:45:32 2006
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package org.apache.nutch.io;
+package org.apache.hadoop.io.
 
 import java.io.*;
 import java.util.*;



Mime
View raw message