hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r397288 - in /lucene/hadoop/trunk: CHANGES.txt src/java/org/apache/hadoop/io/ObjectWritable.java src/test/org/apache/hadoop/ipc/TestRPC.java
Date Wed, 26 Apr 2006 20:24:21 GMT
Author: cutting
Date: Wed Apr 26 13:24:18 2006
New Revision: 397288

URL: http://svn.apache.org/viewcvs?rev=397288&view=rev
Log:
Fix HADOOP-166.  RPCs can now pass subclasses of declared types as parameters.  Note this
change is incompatible for any application that stores ObjectWritables in a file.  Nutch only
stores ObjectWritable in temporary intermediate files, so this is not a problem for Nutch.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestRPC.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/CHANGES.txt?rev=397288&r1=397287&r2=397288&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Wed Apr 26 13:24:18 2006
@@ -93,6 +93,13 @@
 25. Fix HADOOP-160.  Remove some uneeded synchronization around
     time-consuming operations in the TaskTracker.  (omalley via cutting)
 
+26. Fix HADOOP-166.  RPCs failed when passed subclasses of a declared
+    parameter type.  This is fixed by changing ObjectWritable to store
+    both the declared type and the instance type for Writables.  Note
+    that this incompatibly changes the format of ObjectWritable and
+    will render unreadable any ObjectWritables stored in files.
+    Nutch only uses ObjectWritable in intermediate files, so this
+    should not be a problem for Nutch.  (Stefan & cutting)
 
 Release 0.1.1 - 2006-04-08
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java?rev=397288&r1=397287&r2=397288&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java Wed Apr 26 13:24:18
2006
@@ -77,51 +77,16 @@
     PRIMITIVE_NAMES.put("void", Void.TYPE);
   }
 
-  private static class NullInstance implements Writable {
-    private Class declaredClass;
-    public NullInstance() {}
-    public NullInstance(Class declaredClass) {
-      this.declaredClass = declaredClass;
-    }
-    public void readFields(DataInput in) throws IOException {
-      String className = UTF8.readString(in);
-      declaredClass = (Class)PRIMITIVE_NAMES.get(className);
-      if (declaredClass == null) {
-        try {
-          declaredClass = Class.forName(className);
-        } catch (ClassNotFoundException e) {
-          throw new RuntimeException(e.toString());
-        }
-      }
-    }
-    public void write(DataOutput out) throws IOException {
-      UTF8.writeString(out, declaredClass.getName());
-    }
-  }
-
   /** Write a {@link Writable}, {@link String}, primitive type, or an array of
    * the preceding. */
   public static void writeObject(DataOutput out, Object instance,
                                  Class declaredClass) throws IOException {
 
     if (instance == null) {                       // null
-      instance = new NullInstance(declaredClass);
-      declaredClass = NullInstance.class;
-    }
-
-    if (instance instanceof Writable) {           // Writable
-
-      // write instance's class, to support subclasses of the declared class
-      UTF8.writeString(out, instance.getClass().getName());
-      
-      ((Writable)instance).write(out);
-
-      return;
+      instance = NullWritable.get();
     }
 
-    // write declared class for primitives, as they can't be subclassed, and
-    // the class of the instance may be a wrapper
-    UTF8.writeString(out, declaredClass.getName());
+    UTF8.writeString(out, declaredClass.getName()); // always write declared
 
     if (declaredClass.isArray()) {                // array
       int length = Array.getLength(instance);
@@ -157,6 +122,10 @@
         throw new IllegalArgumentException("Not a primitive: "+declaredClass);
       }
       
+    } else if (Writable.class.isAssignableFrom(declaredClass)) { // Writable
+      UTF8.writeString(out, instance.getClass().getName());
+      ((Writable)instance).write(out);
+
     } else {
       throw new IOException("Can't write: "+instance+" as "+declaredClass);
     }
@@ -186,13 +155,7 @@
 
     Object instance;
     
-    if (declaredClass == NullInstance.class) {         // null
-      NullInstance wrapper = new NullInstance();
-      wrapper.readFields(in);
-      declaredClass = wrapper.declaredClass;
-      instance = null;
-
-    } else if (declaredClass.isPrimitive()) {          // primitive types
+    if (declaredClass.isPrimitive()) {            // primitive types
 
       if (declaredClass == Boolean.TYPE) {             // boolean
         instance = Boolean.valueOf(in.readBoolean());
@@ -227,12 +190,23 @@
       instance = UTF8.readString(in);
       
     } else {                                      // Writable
-      Writable writable = WritableFactories.newInstance(declaredClass);
-      if(writable instanceof Configurable) {
-        ((Configurable) writable).setConf(conf);
+      Class instanceClass = null;
+      try {
+        instanceClass = Class.forName(UTF8.readString(in));
+      } catch (ClassNotFoundException e) {
+        throw new RuntimeException(e.toString());
+      }
+      
+      if (instanceClass == NullWritable.class) {  // null
+        instance = null;
+      } else {
+        Writable writable = WritableFactories.newInstance(instanceClass);
+        if(writable instanceof Configurable) {
+          ((Configurable) writable).setConf(conf);
+        }
+        writable.readFields(in);
+        instance = writable;
       }
-      writable.readFields(in);
-      instance = writable;
     }
 
     if (objectWritable != null) {                 // store values

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestRPC.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestRPC.java?rev=397288&r1=397287&r2=397288&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestRPC.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestRPC.java Wed Apr 26 13:24:18 2006
@@ -28,6 +28,8 @@
 
 import org.apache.hadoop.util.LogFormatter;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.UTF8;
+import org.apache.hadoop.io.Writable;
 
 /** Unit tests for RPC. */
 public class TestRPC extends TestCase {
@@ -52,6 +54,7 @@
     void ping() throws IOException;
     String echo(String value) throws IOException;
     String[] echo(String[] value) throws IOException;
+    Writable echo(Writable value) throws IOException;
     int add(int v1, int v2) throws IOException;
     int add(int[] values) throws IOException;
     int error() throws IOException;
@@ -66,6 +69,9 @@
 
     public String[] echo(String[] values) throws IOException { return values; }
 
+    public Writable echo(Writable writable) {
+      return writable;
+    }
     public int add(int v1, int v2) {
       return v1 + v2;
     }
@@ -105,6 +111,9 @@
 
     String[] stringResults = proxy.echo(new String[]{"foo","bar"});
     assertTrue(Arrays.equals(stringResults, new String[]{"foo","bar"}));
+
+    UTF8 utf8Result = (UTF8)proxy.echo(new UTF8("hello world"));
+    assertEquals(utf8Result, new UTF8("hello world"));
 
     int intResult = proxy.add(1, 2);
     assertEquals(intResult, 3);



Mime
View raw message