hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r761837 - in /hadoop/core/trunk: ./ src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/ src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/ src/core/org/apache/hadoop/fs/ src/core/org/apache/hadoop/fs/ftp/ src/core/org/apa...
Date Fri, 03 Apr 2009 23:17:28 GMT
Author: omalley
Date: Fri Apr  3 23:17:27 2009
New Revision: 761837

URL: http://svn.apache.org/viewvc?rev=761837&view=rev
Log:
HADOOP-5450. Add application-specific data types to streaming's typed bytes
interface. (Klaas Bosteels via omalley)

Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/Type.java
    hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesInput.java
    hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesOutput.java
    hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesWritableInput.java
    hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesWritableOutput.java
    hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/package.html
    hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/TestIO.java
    hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java
    hadoop/core/trunk/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java
    hadoop/core/trunk/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java
    hadoop/core/trunk/src/core/org/apache/hadoop/fs/kfs/KosmosFileSystem.java
    hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3FileSystem.java
    hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Child.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Task.java

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Fri Apr  3 23:17:27 2009
@@ -74,6 +74,9 @@
     HADOOP-5257. HDFS servers may start and stop external components through
     a plugin interface. (Carlos Valiente via dhruba)
 
+    HADOOP-5450. Add application-specific data types to streaming's typed bytes
+    interface. (Klaas Bosteels via omalley)
+
   IMPROVEMENTS
 
     HADOOP-4565. Added CombineFileInputFormat to use data locality information

Modified: hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/Type.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/Type.java?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/Type.java
(original)
+++ hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/Type.java
Fri Apr  3 23:17:27 2009
@@ -23,6 +23,7 @@
  */
 public enum Type {
 
+  // codes for supported types (< 50):
   BYTES(0),
   BYTE(1),
   BOOL(2),
@@ -34,6 +35,11 @@
   VECTOR(8),
   LIST(9),
   MAP(10),
+  
+  // application-specific codes (50-200):
+  WRITABLE(50),
+  
+  // low-level codes (> 200):
   MARKER(255);
 
   final int code;

Modified: hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesInput.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesInput.java?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesInput.java
(original)
+++ hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesInput.java
Fri Apr  3 23:17:27 2009
@@ -101,6 +101,8 @@
       return readMap();
     } else if (code == Type.MARKER.code) {
       return null;
+    } else if (50 <= code && code <= 200) { // application-specific typecodes
+      return new Buffer(readBytes());
     } else {
       throw new RuntimeException("unknown type");
     }
@@ -146,6 +148,8 @@
       return readRawMap();
     } else if (code == Type.MARKER.code) {
       return null;
+    } else if (50 <= code && code <= 200) { // application-specific typecodes
+      return readRawBytes();
     } else {
       throw new RuntimeException("unknown type");
     }

Modified: hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesOutput.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesOutput.java?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesOutput.java
(original)
+++ hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesOutput.java
Fri Apr  3 23:17:27 2009
@@ -124,16 +124,27 @@
   }
 
   /**
-   * Writes a bytes array as a typed bytes sequence.
+   * Writes a bytes array as a typed bytes sequence, using a given typecode.
    * 
    * @param bytes the bytes array to be written
+   * @param code the typecode to use
    * @throws IOException
    */
-  public void writeBytes(byte[] bytes) throws IOException {
-    out.write(Type.BYTES.code);
+  public void writeBytes(byte[] bytes, int code) throws IOException {
+    out.write(code);
     out.writeInt(bytes.length);
     out.write(bytes);
   }
+  
+  /**
+   * Writes a bytes array as a typed bytes sequence.
+   * 
+   * @param bytes the bytes array to be written
+   * @throws IOException
+   */
+  public void writeBytes(byte[] bytes) throws IOException {
+    writeBytes(bytes, Type.BYTES.code);
+  }
 
   /**
    * Writes a byte as a typed bytes sequence.

Modified: hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesWritableInput.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesWritableInput.java?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesWritableInput.java
(original)
+++ hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesWritableInput.java
Fri Apr  3 23:17:27 2009
@@ -18,9 +18,13 @@
 
 package org.apache.hadoop.typedbytes;
 
+import java.io.ByteArrayInputStream;
 import java.io.DataInput;
+import java.io.DataInputStream;
 import java.io.IOException;
 
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.ArrayWritable;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.ByteWritable;
@@ -36,17 +40,22 @@
 import org.apache.hadoop.io.VLongWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableUtils;
+import org.apache.hadoop.util.ReflectionUtils;
 
 /**
  * Provides functionality for reading typed bytes as Writable objects.
  * 
  * @see TypedBytesInput
  */
-public class TypedBytesWritableInput {
+public class TypedBytesWritableInput implements Configurable {
 
   private TypedBytesInput in;
+  private Configuration conf;
 
-  private TypedBytesWritableInput() {}
+  private TypedBytesWritableInput() {
+    conf = new Configuration();
+  }
 
   private void setTypedBytesInput(TypedBytesInput in) {
     this.in = in;
@@ -86,6 +95,7 @@
 
   /** Creates a new instance of TypedBytesWritableInput. */
   public TypedBytesWritableInput(TypedBytesInput in) {
+    this();
     this.in = in;
   }
 
@@ -120,6 +130,8 @@
       return readArray();
     case MAP:
       return readMap();
+    case WRITABLE:
+      return readWritable();
     default:
       throw new RuntimeException("unknown type");
     }
@@ -151,6 +163,8 @@
       return ArrayWritable.class;
     case MAP:
       return MapWritable.class;
+    case WRITABLE:
+      return Writable.class;
     default:
       throw new RuntimeException("unknown type");
     }
@@ -331,5 +345,36 @@
   public SortedMapWritable readSortedMap() throws IOException {
     return readSortedMap(null);
   }
+  
+  public Writable readWritable(Writable writable) throws IOException {
+    ByteArrayInputStream bais = new ByteArrayInputStream(in.readBytes());
+    DataInputStream dis = new DataInputStream(bais);
+    String className = WritableUtils.readString(dis);
+    if (writable == null) {
+      try {
+        Class<? extends Writable> cls = 
+          conf.getClassByName(className).asSubclass(Writable.class);
+        writable = (Writable) ReflectionUtils.newInstance(cls, conf);
+      } catch (ClassNotFoundException e) {
+        throw new IOException(e);
+      }
+    } else if (!writable.getClass().getName().equals(className)) {
+      throw new IOException("wrong Writable class given");
+    }
+    writable.readFields(dis);
+    return writable;
+  }
+
+  public Writable readWritable() throws IOException {
+    return readWritable(null);
+  }
 
+  public Configuration getConf() {
+    return conf;
+  }
+
+  public void setConf(Configuration conf) {
+    this.conf = conf;
+  }
+  
 }

Modified: hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesWritableOutput.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesWritableOutput.java?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesWritableOutput.java
(original)
+++ hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/TypedBytesWritableOutput.java
Fri Apr  3 23:17:27 2009
@@ -40,6 +40,7 @@
 import org.apache.hadoop.io.VLongWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.record.Record;
 
 /**
@@ -91,6 +92,7 @@
 
   /** Creates a new instance of TypedBytesWritableOutput. */
   public TypedBytesWritableOutput(TypedBytesOutput out) {
+    this();
     this.out = out;
   }
 
@@ -209,13 +211,12 @@
   }
 
   public void writeWritable(Writable w) throws IOException {
-    out.writeVectorHeader(2);
-    out.writeString(w.getClass().getName());
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     DataOutputStream dos = new DataOutputStream(baos);
+    WritableUtils.writeString(dos, w.getClass().getName());
     w.write(dos);
     dos.close();
-    out.writeBytes(baos.toByteArray());
+    out.writeBytes(baos.toByteArray(), Type.WRITABLE.code);
   }
 
 }

Modified: hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/package.html
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/package.html?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/package.html
(original)
+++ hadoop/core/trunk/src/contrib/streaming/src/java/org/apache/hadoop/typedbytes/package.html
Fri Apr  3 23:17:27 2009
@@ -41,6 +41,8 @@
 <tr><td><i>10</i></td><td>A map.</td></tr>
 </table>
 </p>
+The type codes <i>50</i> to <i>200</i> are treated as aliases for
<i>0</i>, and can thus be used for
+application-specific serialization.
 
 <h3>Subsequent Bytes</h3>
 

Modified: hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/TestIO.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/TestIO.java?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/TestIO.java
(original)
+++ hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/typedbytes/TestIO.java
Fri Apr  3 23:17:27 2009
@@ -40,6 +40,7 @@
 import org.apache.hadoop.io.DoubleWritable;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.io.ObjectWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.VIntWritable;
 import org.apache.hadoop.io.VLongWritable;
@@ -80,6 +81,7 @@
       (byte) 123, true, 12345, 123456789L, (float) 1.2, 1.234,
       "random string", vector, list, map 
     };
+    byte[] appSpecificBytes = new byte[] { 1, 2, 3 };
 
     FileOutputStream ostream = new FileOutputStream(tmpfile);
     DataOutputStream dostream = new DataOutputStream(ostream);
@@ -87,6 +89,7 @@
     for (Object obj : objects) {
       out.write(obj);
     }
+    out.writeBytes(appSpecificBytes, 100);
     dostream.close();
     ostream.close();
 
@@ -96,6 +99,7 @@
     for (Object obj : objects) {
       assertEquals(obj, in.read());
     }
+    assertEquals(new Buffer(appSpecificBytes), in.read());
     distream.close();
     istream.close();
 
@@ -114,6 +118,9 @@
       dis = new DataInputStream(bais);
       assertEquals(obj, (new TypedBytesInput(dis)).read());
     }
+    byte[] rawBytes = in.readRaw();
+    assertEquals(new Buffer(appSpecificBytes),
+      new Buffer(rawBytes, 5, rawBytes.length - 5));
     distream.close();
     istream.close();
   }
@@ -164,7 +171,8 @@
       new ByteWritable((byte) 123), new BooleanWritable(true),
       new VIntWritable(12345), new VLongWritable(123456789L),
       new FloatWritable((float) 1.2), new DoubleWritable(1.234),
-      new Text("random string")
+      new Text("random string"),
+      new ObjectWritable("test")
     };
     TypedBytesWritable tbw = new TypedBytesWritable();
     tbw.setValue("typed bytes text");
@@ -201,7 +209,7 @@
 
     TypedBytesWritableInput in = new TypedBytesWritableInput(distream);
     for (Writable w : writables) {
-      assertEquals(w, in.read());
+      assertEquals(w.toString(), in.read().toString());
     }
 
     assertEquals(tbw.getValue().toString(), in.read().toString());

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java Fri Apr  3 23:17:27 2009
@@ -23,6 +23,7 @@
 import java.net.URI;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.HashMap;
 import java.util.IdentityHashMap;
 import java.util.Iterator;
@@ -79,10 +80,6 @@
     statisticsTable =
       new IdentityHashMap<Class<? extends FileSystem>, Statistics>();
   
-  /** Recording statistics per FileSystem URI scheme */
-  private static final Map<String, Statistics> statsByUriScheme = 
-    new HashMap<String, Statistics>();
-
   /**
    * The statistics for this file system.
    */
@@ -128,8 +125,9 @@
    *   for this FileSystem
    * @param conf the configuration
    */
-  public abstract void initialize(URI name, Configuration conf)
-    throws IOException;
+  public void initialize(URI name, Configuration conf) throws IOException {
+    statistics = getStatistics(name.getScheme(), getClass());    
+  }
 
   /** Returns a URI whose scheme and authority identify this FileSystem.*/
   public abstract URI getUri();
@@ -303,7 +301,6 @@
 
   protected FileSystem() {
     super(null);
-    statistics = getStatistics(this.getClass());
   }
 
   /** Check that a Path belongs to this FileSystem. */
@@ -1407,7 +1404,6 @@
     }
     FileSystem fs = (FileSystem)ReflectionUtils.newInstance(clazz, conf);
     fs.initialize(uri, conf);
-    statsByUriScheme.put(uri.getScheme(), fs.statistics);
     return fs;
   }
 
@@ -1537,9 +1533,14 @@
   }
   
   public static final class Statistics {
+    private final String scheme;
     private AtomicLong bytesRead = new AtomicLong();
     private AtomicLong bytesWritten = new AtomicLong();
     
+    public Statistics(String scheme) {
+      this.scheme = scheme;
+    }
+
     /**
      * Increment the bytes read in the statistics
      * @param newBytes the additional bytes read
@@ -1576,32 +1577,65 @@
       return bytesRead + " bytes read and " + bytesWritten + 
              " bytes written";
     }
+    
+    /**
+     * Reset the counts of bytes to 0.
+     */
+    public void reset() {
+      bytesWritten.set(0);
+      bytesRead.set(0);
+    }
+    
+    /**
+     * Get the uri scheme associated with this statistics object.
+     * @return the uri scheme
+     */
+    public String getScheme() {
+      return scheme;
+    }
   }
   
   /**
    * Get the Map of Statistics object indexed by URI Scheme.
    * @return a Map having a key as URI scheme and value as Statistics object
+   * @deprecated use @link #getFileSystemClasses instead
    */
   public static synchronized Map<String, Statistics> getStatistics() {
-    return statsByUriScheme;
+    Map<String, Statistics> result = new HashMap<String, Statistics>();
+    for(Statistics stat: statisticsTable.values()) {
+      result.put(stat.getScheme(), stat);
+    }
+    return result;
+  }
+
+  /**
+   * Return the FileSystem classes that have Statistics
+   */
+  public static synchronized List<Statistics> getAllStatistics() {
+    return new ArrayList<Statistics>(statisticsTable.values());
   }
   
   /**
    * Get the statistics for a particular file system
-   * @deprecated Consider using {@link #getStatistics()} instead.
    * @param cls the class to lookup
    * @return a statistics object
    */
   public static synchronized 
-  Statistics getStatistics(Class<? extends FileSystem> cls) {
+  Statistics getStatistics(String scheme, Class<? extends FileSystem> cls) {
     Statistics result = statisticsTable.get(cls);
     if (result == null) {
-      result = new Statistics();
+      result = new Statistics(scheme);
       statisticsTable.put(cls, result);
     }
     return result;
   }
   
+  public static synchronized void clearStatistics() {
+    for(Statistics stat: statisticsTable.values()) {
+      stat.reset();
+    }
+  }
+
   public static synchronized
   void printStatistics() throws IOException {
     for (Map.Entry<Class<? extends FileSystem>, Statistics> pair: 

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java Fri Apr  3 23:17:27
2009
@@ -59,7 +59,8 @@
 
   public URI getUri() { return NAME; }
   
-  public void initialize(URI uri, Configuration conf) {
+  public void initialize(URI uri, Configuration conf) throws IOException {
+    super.initialize(uri, conf);
     setConf(conf);
   }
   

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java Fri Apr  3 23:17:27
2009
@@ -56,6 +56,7 @@
 
   @Override
   public void initialize(URI uri, Configuration conf) throws IOException { // get
+    super.initialize(uri, conf);
     // get host information from uri (overrides info in conf)
     String host = uri.getHost();
     host = (host == null) ? conf.get("fs.ftp.host", null) : host;

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/kfs/KosmosFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/kfs/KosmosFileSystem.java?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/kfs/KosmosFileSystem.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/kfs/KosmosFileSystem.java Fri Apr  3 23:17:27
2009
@@ -62,27 +62,29 @@
 
     @Override
     public void initialize(URI uri, Configuration conf) throws IOException {
-        try {
-	    if (kfsImpl == null) {
-                if (uri.getHost() == null) {
-                    kfsImpl = new KFSImpl(conf.get("fs.kfs.metaServerHost", ""),
-                                          conf.getInt("fs.kfs.metaServerPort", -1),
-                                          statistics);
-                } else {
-                    kfsImpl = new KFSImpl(uri.getHost(), uri.getPort(), statistics);
-                }
-	    }
-
-            this.localFs = FileSystem.getLocal(conf);
-            this.uri = URI.create(uri.getScheme() + "://" + uri.getAuthority());
-            this.workingDir = new Path("/user", System.getProperty("user.name")).makeQualified(this);
-            setConf(conf);
-            
-        } catch (Exception e) {
-            e.printStackTrace();
-            System.out.println("Unable to initialize KFS");
-            System.exit(-1);
+      super.initialize(uri, conf);
+      try {
+        if (kfsImpl == null) {
+          if (uri.getHost() == null) {
+            kfsImpl = new KFSImpl(conf.get("fs.kfs.metaServerHost", ""),
+                                  conf.getInt("fs.kfs.metaServerPort", -1),
+                                  statistics);
+          } else {
+            kfsImpl = new KFSImpl(uri.getHost(), uri.getPort(), statistics);
+          }
         }
+
+        this.localFs = FileSystem.getLocal(conf);
+        this.uri = URI.create(uri.getScheme() + "://" + uri.getAuthority());
+        this.workingDir = new Path("/user", System.getProperty("user.name")
+                                   ).makeQualified(this);
+        setConf(conf);
+
+      } catch (Exception e) {
+        e.printStackTrace();
+        System.out.println("Unable to initialize KFS");
+        System.exit(-1);
+      }
     }
 
     @Override

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3FileSystem.java?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3FileSystem.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3FileSystem.java Fri Apr  3 23:17:27
2009
@@ -70,6 +70,7 @@
 
   @Override
   public void initialize(URI uri, Configuration conf) throws IOException {
+    super.initialize(uri, conf);
     if (store == null) {
       store = createDefaultStore(conf);
     }

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java Fri Apr
 3 23:17:27 2009
@@ -209,6 +209,7 @@
   
   @Override
   public void initialize(URI uri, Configuration conf) throws IOException {
+    super.initialize(uri, conf);
     if (store == null) {
       store = createDefaultStore(conf);
     }

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java Fri Apr  3 23:17:27 2009
@@ -80,7 +80,7 @@
   private int socketTimeout;
   private int datanodeWriteTimeout;
   final int writePacketSize;
-  private FileSystem.Statistics stats;
+  private final FileSystem.Statistics stats;
   private int maxBlockAcquireFailures;
     
  
@@ -145,7 +145,7 @@
    * Create a new DFSClient connected to the default namenode.
    */
   public DFSClient(Configuration conf) throws IOException {
-    this(NameNode.getAddress(conf), conf);
+    this(NameNode.getAddress(conf), conf, null);
   }
 
   /** 
@@ -188,8 +188,7 @@
 
   public DFSClient(InetSocketAddress nameNodeAddr, 
                    Configuration conf) throws IOException {
-    this(nameNodeAddr, conf, 
-         FileSystem.getStatistics(DistributedFileSystem.class));
+    this(nameNodeAddr, conf, null);
   }
 
   private void checkOpen() throws IOException {

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java Fri Apr 
3 23:17:27 2009
@@ -24,7 +24,6 @@
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
 import org.apache.hadoop.hdfs.protocol.Block;
@@ -35,7 +34,7 @@
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.hdfs.DFSClient.DFSOutputStream;
 import org.apache.hadoop.security.AccessControlException;
-import org.apache.hadoop.util.*;
+import org.apache.hadoop.util.Progressable;
 
 
 /****************************************************************
@@ -70,6 +69,7 @@
 
   @Override
   public void initialize(URI uri, Configuration conf) throws IOException {
+    super.initialize(uri, conf);
     setConf(conf);
 
     String host = uri.getHost();

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java Fri Apr  3 23:17:27
2009
@@ -81,6 +81,7 @@
 
   @Override
   public void initialize(URI name, Configuration conf) throws IOException {
+    super.initialize(name, conf);
     setConf(conf);
     try {
       this.ugi = UnixUserGroupInformation.login(conf, true);

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Child.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Child.java?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Child.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Child.java Fri Apr  3 23:17:27 2009
@@ -130,7 +130,9 @@
         task = myTask.getTask();
         taskid = task.getTaskID();
         isCleanup = task.isTaskCleanupTask();
-        
+        // reset the statistics for the task
+        FileSystem.clearStatistics();
+
         //create the index file so that the log files 
         //are viewable immediately
         TaskLog.syncLogs(firstTaskid, taskid, isCleanup);

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Task.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Task.java?rev=761837&r1=761836&r2=761837&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Task.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Task.java Fri Apr  3 23:17:27 2009
@@ -37,6 +37,7 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalDirAllocator;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.FileSystem.Statistics;
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.Text;
@@ -634,15 +635,14 @@
      new HashMap<String, FileSystemStatisticUpdater>();
   
   private synchronized void updateCounters() {
-    for(Map.Entry<String, FileSystem.Statistics> entry : 
-      FileSystem.getStatistics().entrySet()) {
-      String uriScheme = entry.getKey();
+    for(Statistics stat: FileSystem.getAllStatistics()) {
+      String uriScheme = stat.getScheme();
       FileSystemStatisticUpdater updater = statisticUpdaters.get(uriScheme);
       if(updater==null) {//new FileSystem has been found in the cache
-        updater = new FileSystemStatisticUpdater(uriScheme, entry.getValue());
+        updater = new FileSystemStatisticUpdater(uriScheme, stat);
         statisticUpdaters.put(uriScheme, updater);
       }
-      updater.updateCounters();
+      updater.updateCounters();      
     }
   }
 



Mime
View raw message