avro-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r819010 - in /hadoop/avro/trunk: ./ src/java/org/apache/avro/ src/java/org/apache/avro/ipc/ src/java/org/apache/avro/reflect/ src/java/org/apache/avro/specific/ src/test/java/org/apache/avro/ src/test/schemata/
Date Fri, 25 Sep 2009 21:10:57 GMT
Author: cutting
Date: Fri Sep 25 21:10:53 2009
New Revision: 819010

URL: http://svn.apache.org/viewvc?rev=819010&view=rev
Log:
AVRO-120.  Improved package and namespace handling.

Added:
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestNamespaceReflect.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestNamespaceSpecific.java
    hadoop/avro/trunk/src/test/schemata/namespace.avpr
Modified:
    hadoop/avro/trunk/CHANGES.txt
    hadoop/avro/trunk/src/java/org/apache/avro/Protocol.java
    hadoop/avro/trunk/src/java/org/apache/avro/Schema.java
    hadoop/avro/trunk/src/java/org/apache/avro/ipc/Requestor.java
    hadoop/avro/trunk/src/java/org/apache/avro/ipc/Responder.java
    hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectData.java
    hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumReader.java
    hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumWriter.java
    hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectRequestor.java
    hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectResponder.java
    hadoop/avro/trunk/src/java/org/apache/avro/specific/ProtocolTask.java
    hadoop/avro/trunk/src/java/org/apache/avro/specific/SchemaTask.java
    hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificCompiler.java
    hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificData.java
    hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificDatumReader.java
    hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificDatumWriter.java
    hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificRequestor.java
    hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificResponder.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestCompare.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFileReflect.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolGeneric.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolSpecific.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java

Modified: hadoop/avro/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/CHANGES.txt?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/CHANGES.txt (original)
+++ hadoop/avro/trunk/CHANGES.txt Fri Sep 25 21:10:53 2009
@@ -8,6 +8,25 @@
     stateless transports like UDP and HTTP.  Add a UDP transport.
     (cutting)
 
+    AVRO-120. Improved package and namespace handling, including:
+
+     * Removed explicit package-name specification from specific and
+       reflect public APIs.  Package names are now determined either
+       by namespace declarations or by a Java classes, as appropriate.
+
+     * Changed the specific compiler to generate separate java files
+       per class, rather than nested classes.  This permits generated
+       classes to be in packages declared in their schema namespaces.
+
+     * Fix namespace defaulting.  The default namespace is declared in
+       the outermost schema or protocol.  Nested schemas can now
+       declare different namespaces than the default.
+
+     * Names may now be specified with a dotted notation, e.g.,
+       "foo.bar.Baz", to indicate the name "Baz" in namespace
+       "foo.bar".  This permits one to refer to schemas in a namespace
+       other than the default.
+
   NEW FEATURES
 
     AVRO-121.  Permit reflect and specific datum readers to read data

Modified: hadoop/avro/trunk/src/java/org/apache/avro/Protocol.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/Protocol.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/Protocol.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/Protocol.java Fri Sep 25 21:10:53 2009
@@ -27,6 +27,7 @@
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Collection;
 
 import org.apache.avro.Schema.Field;
 import org.codehaus.jackson.JsonNode;
@@ -156,7 +157,17 @@
   public String getNamespace() { return namespace; }
 
   /** The types of this protocol. */
-  public LinkedHashMap<String,Schema> getTypes() { return types; }
+  public Collection<Schema> getTypes() { return types.values(); }
+
+  /** Returns the named type. */
+  public Schema getType(String name) { return types.get(name); }
+
+  /** Set the types of this protocol. */
+  public void setTypes(Collection<Schema> newTypes) {
+    types = new Schema.Names();
+    for (Schema s : newTypes)
+      types.add(s);
+  }
 
   /** The messages of this protocol. */
   public Map<String,Message> getMessages() { return messages; }
@@ -194,13 +205,15 @@
     }
   }
   void toJson(JsonGenerator gen) throws IOException {
+    types.space(namespace);
+
     gen.writeStartObject();
     gen.writeStringField("protocol", name);
     gen.writeStringField("namespace", namespace);
     
     gen.writeArrayFieldStart("types");
     for (Schema type : types.values())
-      type.toJson(types.except(type.getName()), gen);
+      type.toJson(types.except(type), gen);
     gen.writeEndArray();
     
     gen.writeObjectFieldStart("messages");

Modified: hadoop/avro/trunk/src/java/org/apache/avro/Schema.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/Schema.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/Schema.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/Schema.java Fri Sep 25 21:10:53 2009
@@ -263,40 +263,74 @@
     public int hashCode() { return schema.hashCode(); }
   }
 
+  private static class Name {
+    private String name;
+    private String space;
+    public Name(Schema s) { this(s.getName(), s.getNamespace()); }
+    public Name(String name, String space) {
+      if (name == null) return;                   // anonymous
+      int lastDot = name.lastIndexOf('.');
+      if (lastDot < 0) {                          // unqualified name
+        this.space = space;                       // use default space
+        this.name = name;
+      } else {                                    // qualified name
+        this.space = name.substring(0, lastDot);  // get space from name
+        this.name = name.substring(lastDot+1, name.length());
+      }
+    }
+    public boolean equals(Object o) {
+      if (o == this) return true;
+      if (!(o instanceof Name)) return false;
+      Name that = (Name)o;
+      return that == null ? false
+        : (name==null ? that.name==null : name.equals(that.name))
+        && (space==null ? that.space==null : space.equals(that.space));
+    }
+    public int hashCode() {
+      return (name==null ? 0 : name.hashCode())
+        + (space==null ? 0 : space.hashCode());
+    }
+    public String toString() { return "name="+name + " namespace="+space; }
+    public void writeName(Names names, JsonGenerator gen) throws IOException {
+      if (name != null) gen.writeStringField("name", name);
+      if (space != null) {
+        if (!space.equals(names.space()))
+          gen.writeStringField("namespace", space);
+        if (names.space() == null)                // default namespace
+          names.space(space);
+      }
+    }
+  }
+
   private static abstract class NamedSchema extends Schema {
-    protected final String name; 
-    protected final String space; 
+    private final Name name;
     public NamedSchema(Type type, String name, String space) {
       super(type);
-      this.name = name;
-      this.space = space;
+      this.name = new Name(name, space);
     }
-    public String getName() { return name; }
-    public String getNamespace() { return space; }
+    public String getName() { return name.name; }
+    public String getNamespace() { return name.space; }
     public boolean writeNameRef(Names names, JsonGenerator gen)
       throws IOException {
       if (this.equals(names.get(name))) {
-        gen.writeString(name);
+        if (name.space == null || name.space.equals(names.space()))
+          gen.writeString(name.name);
+        else {
+          gen.writeString(name.space+"."+name.name);
+        }
         return true;
-      } else if (name != null) {
+      } else if (name.name != null) {
         names.put(name, this);
       }
       return false;
     }
     public void writeName(Names names, JsonGenerator gen) throws IOException {
-      if (name != null)  gen.writeStringField("name", name);
-      if (space != null) gen.writeStringField("namespace", space);
+      name.writeName(names, gen);
     }
     public boolean equalNames(NamedSchema that) {
-      return that == null ? false
-        : (name==null ? that.name==null : name.equals(that.name))
-        && (space==null ? that.space==null : space.equals(that.space));
-    }
-    public int hashCode() {
-      return getType().hashCode()
-        + (name==null ? 0 : name.hashCode())
-        + (space==null ? 0 : space.hashCode());
+      return this.name.equals(that.name);
     }
+    public int hashCode() { return getType().hashCode() + name.hashCode(); }
   }
 
   private static class SeenPair {
@@ -594,7 +628,7 @@
     return parse(parseJson(jsonSchema), new Names());
   }
 
-  static final Names PRIMITIVES = new Names(null);
+  static final Map<String,Schema> PRIMITIVES = new HashMap<String,Schema>();
   static {
     PRIMITIVES.put("string",  STRING_SCHEMA);
     PRIMITIVES.put("bytes",   BYTES_SCHEMA);
@@ -606,35 +640,46 @@
     PRIMITIVES.put("null",    NULL_SCHEMA);
   }
 
-  static class Names extends LinkedHashMap<String, Schema> {
-    private Names defaults = PRIMITIVES;
+  static class Names extends LinkedHashMap<Name, Schema> {
     private String space;                         // default namespace
 
-    public Names(Names defaults) { this.defaults = defaults; }
-    public Names() { this(PRIMITIVES); }
+    public Names() {}
+    public Names(String space) { this.space = space; }
+
+    public String space() { return space; }
+    public void space(String space) { this.space = space; }
 
     @Override
-    public Schema get(Object name) {
-      if (containsKey(name))
-        return super.get(name);
-      if (defaults != null)
-        return defaults.get(name);
-      return null;
+    public Schema get(Object o) {
+      Name name;
+      if (o instanceof String) {
+        Schema primitive = PRIMITIVES.get((String)o);
+        if (primitive != null) return primitive;
+        name = new Name((String)o, space);
+      } else {
+        name = (Name)o;
+      }
+      return super.get(name);
+    }
+    public void add(Schema schema) {
+      put(((NamedSchema)schema).name, schema);
     }
     @Override
-    public Schema put(String name, Schema schema) {
-      if (get(name) != null)
+    public Schema put(Name name, Schema schema) {
+      if (containsKey(name))
         throw new SchemaParseException("Can't redefine: "+name);
       return super.put(name, schema);
     }
-    public Names except(String name) {
-      Names result = new Names(this);
-      result.clear(name);
-      return result;
+    public Names except(final Schema schema) {
+      final Names parent = this;
+      return new Names(space) {
+        public Schema get(Object o) {
+          if (this.containsKey(o)) return this.get(o);
+          if (((NamedSchema)schema).name.equals(o)) return null;
+          return parent.get(o);
+        }
+      };
     }
-    public String space() { return space; }
-    public void space(String space) { this.space = space; }
-    private void clear(String name) { super.put(name, null); }
   }
 
   /** @see #parse(String) */
@@ -656,6 +701,8 @@
         name = nameNode != null ? nameNode.getTextValue() : null;
         JsonNode spaceNode = schema.get("namespace");
         space = spaceNode!=null?spaceNode.getTextValue():names.space();
+        if (names.space() == null && space != null)
+          names.space(space);                     // set default namespace
         if (name == null)
           throw new SchemaParseException("No name in schema: "+schema);
       }
@@ -663,7 +710,7 @@
         LinkedHashMap<String,Field> fields = new LinkedHashMap<String,Field>();
         RecordSchema result =
           new RecordSchema(name, space, type.equals("error"));
-        if (name != null) names.put(name, result);
+        if (name != null) names.add(result);
         JsonNode fieldsNode = schema.get("fields");
         if (fieldsNode == null || !fieldsNode.isArray())
           throw new SchemaParseException("Record has no fields: "+schema);
@@ -692,7 +739,7 @@
         for (JsonNode n : symbolsNode)
           symbols.add(n.getTextValue());
         Schema result = new EnumSchema(name, space, symbols);
-        if (name != null) names.put(name, result);
+        if (name != null) names.add(result);
         return result;
       } else if (type.equals("array")) {          // array
         return new ArraySchema(parse(schema.get("items"), names));
@@ -701,7 +748,7 @@
       } else if (type.equals("fixed")) {          // fixed
         Schema result = new FixedSchema(name, space,
                                         schema.get("size").getIntValue());
-        if (name != null) names.put(name, result);
+        if (name != null) names.add(result);
         return result;
       } else
         throw new SchemaParseException("Type not yet supported: "+type);
@@ -726,4 +773,3 @@
   }
 
 }
-

Modified: hadoop/avro/trunk/src/java/org/apache/avro/ipc/Requestor.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/ipc/Requestor.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/ipc/Requestor.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/ipc/Requestor.java Fri Sep 25 21:10:53 2009
@@ -144,10 +144,10 @@
     Collections.synchronizedMap(new HashMap<MD5,Protocol>());
 
   private static final SpecificDatumWriter HANDSHAKE_WRITER =
-    new SpecificDatumWriter(HandshakeRequest._SCHEMA);
+    new SpecificDatumWriter(HandshakeRequest.class);
 
   private static final SpecificDatumReader HANDSHAKE_READER =
-    new SpecificDatumReader(HandshakeResponse._SCHEMA);
+    new SpecificDatumReader(HandshakeResponse.class);
 
   private void writeHandshake(Encoder out) throws IOException {
     MD5 localHash = new MD5();

Modified: hadoop/avro/trunk/src/java/org/apache/avro/ipc/Responder.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/ipc/Responder.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/ipc/Responder.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/ipc/Responder.java Fri Sep 25 21:10:53 2009
@@ -151,9 +151,9 @@
   }
 
   private SpecificDatumWriter handshakeWriter =
-    new SpecificDatumWriter(HandshakeResponse._SCHEMA);
+    new SpecificDatumWriter(HandshakeResponse.class);
   private SpecificDatumReader handshakeReader =
-    new SpecificDatumReader(HandshakeRequest._SCHEMA);
+    new SpecificDatumReader(HandshakeRequest.class);
 
   @SuppressWarnings("unchecked")
   private Protocol handshake(Decoder in, Encoder out)

Modified: hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectData.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectData.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectData.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectData.java Fri Sep 25 21:10:53 2009
@@ -29,6 +29,7 @@
 import java.util.List;
 import java.util.Map;
 import java.util.WeakHashMap;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.avro.AvroRuntimeException;
 import org.apache.avro.AvroTypeException;
@@ -98,7 +99,7 @@
       for (Map.Entry<String, Schema> entry : schema.getFieldSchemas()) {
         try {
           if (!validate(entry.getValue(),
-                        ReflectData.getField(c, entry.getKey()).get(datum)))
+                        getField(c, entry.getKey()).get(datum)))
           return false;
         } catch (IllegalAccessException e) {
           throw new AvroRuntimeException(e);
@@ -142,6 +143,58 @@
     }
   }
 
+  private Map<String,Map<String,Class>> classCache =
+    new ConcurrentHashMap<String,Map<String,Class>>();
+
+  /** Return the class that implements this schema. */
+  public Class getClass(Schema schema) {
+    switch (schema.getType()) {
+    case FIXED:
+    case RECORD:
+    case ENUM:
+      String namespace = schema.getNamespace();
+      Map<String,Class> spaceCache = classCache.get(namespace);
+      if (spaceCache == null) {
+        spaceCache = new ConcurrentHashMap<String,Class>();
+        classCache.put(namespace, spaceCache);
+      }
+      String name = schema.getName();
+      Class c = spaceCache.get(name);
+      if (c == null) {
+        try {
+          c = Class.forName(getClassName(schema));
+          spaceCache.put(name, c);
+        } catch (ClassNotFoundException e) {
+          throw new AvroRuntimeException(e);
+        }
+      }
+      return c;
+    case ARRAY:   return GenericArray.class;
+    case MAP:     return Map.class;
+    case UNION:   return Object.class;
+    case STRING:  return Utf8.class;
+    case BYTES:   return ByteBuffer.class;
+    case INT:     return Integer.TYPE;
+    case LONG:    return Long.TYPE;
+    case FLOAT:   return Float.TYPE;
+    case DOUBLE:  return Double.TYPE;
+    case BOOLEAN: return Boolean.TYPE;
+    case NULL:    return Void.TYPE;
+    default: throw new AvroRuntimeException("Unknown type: "+schema);
+    }
+
+  }
+
+  /** Returns the Java class name indicated by a schema's name and namespace. */
+  public String getClassName(Schema schema) {
+    String namespace = schema.getNamespace();
+    String name = schema.getName();
+    if (namespace == null)
+      return name;
+    String dot = namespace.endsWith("$") ? "" : ".";
+    return namespace + dot + name;
+  }
+
   private final WeakHashMap<java.lang.reflect.Type,Schema> schemaCache =
     new WeakHashMap<java.lang.reflect.Type,Schema>();
 
@@ -187,10 +240,8 @@
     else if (type instanceof ParameterizedType) {
       ParameterizedType ptype = (ParameterizedType)type;
       Class raw = (Class)ptype.getRawType();
-      System.out.println("ptype = "+ptype+" raw = "+raw);
       java.lang.reflect.Type[] params = ptype.getActualTypeArguments();
       for (int i = 0; i < params.length; i++)
-        System.out.println("param ="+params[i]);
       if (GenericArray.class.isAssignableFrom(raw)) { // array
         if (params.length != 1)
           throw new AvroTypeException("No array type specified.");
@@ -206,8 +257,10 @@
       Class c = (Class)type;
       String name = c.getSimpleName();
       String space = c.getPackage().getName();
-      
-      Schema schema = names.get(name);
+      if (c.getEnclosingClass() != null)          // nested class
+        space = c.getEnclosingClass().getName() + "$";
+      String fullName = c.getName();
+      Schema schema = names.get(fullName);
       if (schema == null) {
 
         if (c.isEnum()) {                         // enum
@@ -216,14 +269,14 @@
           for (int i = 0; i < constants.length; i++)
             symbols.add(constants[i].name());
           schema = Schema.createEnum(name, space, symbols);
-          names.put(name, schema);
+          names.put(fullName, schema);
           return schema;
         }
                                                   // fixed
         if (GenericFixed.class.isAssignableFrom(c)) {
           int size = ((FixedSize)c.getAnnotation(FixedSize.class)).value();
           schema = Schema.createFixed(name, space, size);
-          names.put(name, schema);
+          names.put(fullName, schema);
           return schema;
         }
                                                   // record
@@ -231,8 +284,8 @@
           new LinkedHashMap<String,Schema.Field>();
         schema = Schema.createRecord(name, space,
                                      Throwable.class.isAssignableFrom(c));
-        if (!names.containsKey(name))
-          names.put(name, schema);
+        if (!names.containsKey(fullName))
+          names.put(fullName, schema);
         for (Field field : c.getDeclaredFields())
           if ((field.getModifiers()&(Modifier.TRANSIENT|Modifier.STATIC))==0) {
             Schema fieldSchema = createFieldSchema(field, names);
@@ -259,27 +312,25 @@
   public Protocol getProtocol(Class iface) {
     Protocol protocol =
       new Protocol(iface.getSimpleName(), iface.getPackage().getName()); 
+    Map<String,Schema> names = new LinkedHashMap<String,Schema>();
     for (Method method : iface.getDeclaredMethods())
       if ((method.getModifiers() & Modifier.STATIC) == 0)
         protocol.getMessages().put(method.getName(),
-                                   getMessage(method, protocol));
+                                   getMessage(method, protocol, names));
 
     // reverse types, since they were defined in reference order
-    List<Map.Entry<String,Schema>> names =
-      new ArrayList<Map.Entry<String,Schema>>();
-    names.addAll(protocol.getTypes().entrySet());
-    Collections.reverse(names);
-    protocol.getTypes().clear();
-    for (Map.Entry<String,Schema> name : names)
-      protocol.getTypes().put(name.getKey(), name.getValue());
+    List<Schema> types = new ArrayList<Schema>();
+    types.addAll(names.values());
+    Collections.reverse(types);
+    protocol.setTypes(types);
 
     return protocol;
   }
 
   private final Paranamer paranamer = new CachingParanamer();
 
-  private Message getMessage(Method method, Protocol protocol) {
-    Map<String,Schema> names = protocol.getTypes();
+  private Message getMessage(Method method, Protocol protocol,
+                             Map<String,Schema> names) {
     LinkedHashMap<String,Schema.Field> fields =
       new LinkedHashMap<String,Schema.Field>();
     String[] paramNames = paranamer.lookupParameterNames(method);
@@ -302,4 +353,3 @@
   }
 
 }
-

Modified: hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumReader.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumReader.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumReader.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumReader.java Fri Sep 25 21:10:53 2009
@@ -30,19 +30,18 @@
  * Java reflection.
  */
 public class ReflectDatumReader extends GenericDatumReader<Object> {
-  protected String packageName;
+  public ReflectDatumReader() {}
 
-  public ReflectDatumReader(String packageName) {
-    this.packageName = packageName;
+  public ReflectDatumReader(Class c) {
+    this(ReflectData.get().getSchema(c));
   }
 
-  public ReflectDatumReader(Schema root, String packageName) {
-    this(packageName);
+  public ReflectDatumReader(Schema root) {
     setSchema(root);
   }
 
   protected Object newRecord(Object old, Schema schema) {
-    Class c = getClass(schema);
+    Class c = ReflectData.get().getClass(schema);
     return (c.isInstance(old) ? old : newInstance(c));
   }
 
@@ -68,11 +67,11 @@
 
   @SuppressWarnings("unchecked")
   protected Object createEnum(String symbol, Schema schema) {
-    return Enum.valueOf(getClass(schema), symbol);
+    return Enum.valueOf(ReflectData.get().getClass(schema), symbol);
   }
 
   protected Object createFixed(Object old, Schema schema) {
-    Class c = getClass(schema);
+    Class c = ReflectData.get().getClass(schema);
     return c.isInstance(old) ? old : newInstance(c);
   }
 
@@ -80,22 +79,6 @@
   private static final Map<Class,Constructor> CTOR_CACHE =
     new ConcurrentHashMap<Class,Constructor>();
 
-  private Map<String,Class> classCache = new ConcurrentHashMap<String,Class>();
-
-  private Class getClass(Schema schema) {
-    String name = schema.getName();
-    Class c = classCache.get(name);
-    if (c == null) {
-      try {
-        c = Class.forName(packageName + name);
-        classCache.put(name, c);
-      } catch (ClassNotFoundException e) {
-        throw new AvroRuntimeException(e);
-      }
-    }
-    return c;
-  }
-
   /** Create a new instance of the named class. */
   @SuppressWarnings("unchecked")
   protected static Object newInstance(Class c) {

Modified: hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumWriter.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumWriter.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumWriter.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumWriter.java Fri Sep 25 21:10:53 2009
@@ -33,6 +33,14 @@
     this(ReflectData.get());
   }
 
+  public ReflectDatumWriter(Class c) {
+    this(c, ReflectData.get());
+  }
+
+  public ReflectDatumWriter(Class c, ReflectData data) {
+    this(data.getSchema(c), data);
+  }
+
   public ReflectDatumWriter(Schema root) {
     this(root, ReflectData.get());
   }

Modified: hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectRequestor.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectRequestor.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectRequestor.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectRequestor.java Fri Sep 25 21:10:53 2009
@@ -36,7 +36,6 @@
 
 /** A {@link Requestor} for existing interfaces via Java reflection. */
 public class ReflectRequestor extends Requestor implements InvocationHandler {
-  protected String packageName;
   
   public ReflectRequestor(Class<?> iface, Transceiver transceiver)
     throws IOException {
@@ -56,7 +55,6 @@
   protected ReflectRequestor(Protocol protocol, Transceiver transceiver, ReflectData reflectData)
     throws IOException {
     super(protocol, transceiver);
-    this.packageName = protocol.getNamespace()+"."+protocol.getName()+"$";
   }
 
   public Object invoke(Object proxy, Method method, Object[] args)
@@ -69,7 +67,7 @@
   }
 
   protected DatumReader<Object> getDatumReader(Schema schema) {
-    return new ReflectDatumReader(schema, packageName);
+    return new ReflectDatumReader(schema);
   }
 
   public void writeRequest(Schema schema, Object request, Encoder out)

Modified: hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectResponder.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectResponder.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectResponder.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectResponder.java Fri Sep 25 21:10:53 2009
@@ -21,25 +21,21 @@
 import java.io.IOException;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
-import java.nio.ByteBuffer;
 import java.util.Map;
 
 import org.apache.avro.AvroRuntimeException;
 import org.apache.avro.Schema;
 import org.apache.avro.Protocol.Message;
-import org.apache.avro.generic.GenericArray;
 import org.apache.avro.io.DatumReader;
 import org.apache.avro.io.DatumWriter;
 import org.apache.avro.io.Decoder;
 import org.apache.avro.io.Encoder;
 import org.apache.avro.ipc.AvroRemoteException;
 import org.apache.avro.ipc.Responder;
-import org.apache.avro.util.Utf8;
 
 /** {@link Responder} for existing interfaces via Java reflection.*/
 public class ReflectResponder extends Responder {
   private Object impl;
-  protected String packageName;
 
   public ReflectResponder(Class iface, Object impl) {
     this(iface, impl, ReflectData.get());
@@ -48,7 +44,6 @@
   public ReflectResponder(Class iface, Object impl, ReflectData reflectData) {
     super(reflectData.getProtocol(iface));
     this.impl = impl;
-    this.packageName = getLocal().getNamespace()+"."+getLocal().getName()+"$";
   }
 
   protected DatumWriter<Object> getDatumWriter(Schema schema) {
@@ -56,7 +51,7 @@
   }
 
   protected DatumReader<Object> getDatumReader(Schema schema) {
-    return new ReflectDatumReader(schema, packageName);
+    return new ReflectDatumReader(schema);
   }
 
   /** Reads a request message. */
@@ -86,7 +81,7 @@
     int i = 0;
     try {
       for (Map.Entry<String,Schema> param: message.getRequest().getFieldSchemas())
-        paramTypes[i++] = paramType(param.getValue());
+        paramTypes[i++] = ReflectData.get().getClass(param.getValue());
       Method method = impl.getClass().getMethod(message.getName(), paramTypes);
       return method.invoke(impl, (Object[])request);
     } catch (InvocationTargetException e) {
@@ -94,8 +89,6 @@
       if (target instanceof AvroRemoteException)
         throw (AvroRemoteException)target;
       else throw new AvroRuntimeException(e);
-    } catch (ClassNotFoundException e) {
-      throw new AvroRuntimeException(e);
     } catch (NoSuchMethodException e) {
       throw new AvroRuntimeException(e);
     } catch (IllegalAccessException e) {
@@ -103,27 +96,5 @@
     }
   }
 
-  private Class paramType(Schema schema) throws ClassNotFoundException {
-    switch (schema.getType()) {
-    case FIXED:
-    case RECORD:
-    case ENUM:    return Class.forName(packageName+schema.getName());
-    case ARRAY:   return GenericArray.class;
-    case MAP:     return Map.class;
-    case UNION:   return Object.class;
-    case STRING:  return Utf8.class;
-    case BYTES:   return ByteBuffer.class;
-    case INT:     return Integer.TYPE;
-    case LONG:    return Long.TYPE;
-    case FLOAT:   return Float.TYPE;
-    case DOUBLE:  return Double.TYPE;
-    case BOOLEAN: return Boolean.TYPE;
-    case NULL:    return Void.TYPE;
-    default: throw new AvroRuntimeException("Unknown type: "+schema);
-    }
-
-  }
-
-
 }
 

Modified: hadoop/avro/trunk/src/java/org/apache/avro/specific/ProtocolTask.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/specific/ProtocolTask.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/specific/ProtocolTask.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/specific/ProtocolTask.java Fri Sep 25 21:10:53 2009
@@ -18,9 +18,7 @@
 package org.apache.avro.specific;
 
 import java.io.File;
-import java.io.FileWriter;
 import java.io.IOException;
-import java.io.Writer;
 import java.util.ArrayList;
 
 import org.apache.avro.AvroRuntimeException;
@@ -65,37 +63,13 @@
     }
   }
   
-  protected SpecificCompiler doCompile(File file) throws IOException {
-    return SpecificCompiler.compileProtocol(file);
+  protected void doCompile(File file, File dir) throws IOException {
+    SpecificCompiler.compileProtocol(file, dir);
   }
 
   private void compile(File file) {
     try {
-      SpecificCompiler compiler = doCompile(file);
-      String namespace = compiler.getNamespace();
-      String text = compiler.getCode();
-      String name = file.getName();
-      name = name.substring(0, name.indexOf('.'))+".java";
-      name = SpecificCompiler.cap(name);
-      File outputFile;
-      if (namespace == null || namespace.length() == 0) {
-        outputFile = new File(dest, name);
-      } else {
-        File packageDir =
-            new File(dest, namespace.replace('.', File.separatorChar));
-        if (!packageDir.exists()) {
-            if (!packageDir.mkdirs()) {
-                throw new BuildException("Unable to create " + packageDir);
-            }
-        }
-        outputFile = new File(packageDir, name);
-      }
-      Writer out = new FileWriter(outputFile);
-      try {
-        out.write(text);
-      } finally {
-        out.close();
-      }
+      doCompile(file, dest);
     } catch (AvroRuntimeException e) {
       throw new BuildException(e);
     } catch (IOException e) {

Modified: hadoop/avro/trunk/src/java/org/apache/avro/specific/SchemaTask.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/specific/SchemaTask.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/specific/SchemaTask.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/specific/SchemaTask.java Fri Sep 25 21:10:53 2009
@@ -22,8 +22,8 @@
 
 /** Ant task to generate Java interface and classes for a protocol. */
 public class SchemaTask extends ProtocolTask {
-  protected SpecificCompiler doCompile(File file) throws IOException {
-    return SpecificCompiler.compileSchema(file);
+  protected void doCompile(File src, File dest) throws IOException {
+    SpecificCompiler.compileSchema(src, dest);
   }
 }
 

Modified: hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificCompiler.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificCompiler.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificCompiler.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificCompiler.java Fri Sep 25 21:10:53 2009
@@ -18,6 +18,9 @@
 package org.apache.avro.specific;
 
 import java.io.File;
+import java.io.FileOutputStream;
+import java.io.Writer;
+import java.io.OutputStreamWriter;
 import java.io.IOException;
 import java.util.Map;
 import java.util.Set;
@@ -27,69 +30,106 @@
 import org.apache.avro.Protocol;
 import org.apache.avro.Schema;
 import org.apache.avro.Protocol.Message;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.map.ObjectMapper;
 
 /** Generate specific Java interfaces and classes for protocols and schemas. */
 public class SpecificCompiler {
-  private static final ObjectMapper MAPPER = new ObjectMapper();
-  private static final JsonFactory FACTORY = new JsonFactory();
+  private File dest;
+  private Writer out;
+  private Set<Schema> queue = new HashSet<Schema>();
+
+  private SpecificCompiler(File dest) {
+    this.dest = dest;                             // root directory for output
+  }
+
+  /** Generates Java interface and classes for a protocol.
+   * @param src the source Avro protocol file
+   * @param dest the directory to place generated files in
+   */
+  public static void compileProtocol(File src, File dest) throws IOException {
+    SpecificCompiler compiler = new SpecificCompiler(dest);
+    Protocol protocol = Protocol.parse(src);
+    for (Schema s : protocol.getTypes())          // enqueue types
+      compiler.enqueue(s);
+    compiler.compileInterface(protocol);          // generate interface
+    compiler.compile();                           // generate classes for types
+  }
+
+  /** Generates Java classes for a schema. */
+  public static void compileSchema(File src, File dest) throws IOException {
+    SpecificCompiler compiler = new SpecificCompiler(dest);
+    compiler.enqueue(Schema.parse(src));          // enqueue types
+    compiler.compile();                           // generate classes for types
+  }
+
+  /** Recursively enqueue schemas that need a class generated. */
+  private void enqueue(Schema schema) throws IOException {
+    if (queue.contains(schema)) return;
+    switch (schema.getType()) {
+    case RECORD:
+      queue.add(schema);
+      for (Map.Entry<String, Schema> field : schema.getFieldSchemas())
+        enqueue(field.getValue());
+      break;
+    case MAP:
+      enqueue(schema.getValueType());
+      break;
+    case ARRAY:
+      enqueue(schema.getElementType());
+      break;
+    case UNION:
+      for (Schema s : schema.getTypes())
+        enqueue(s);
+      break;
+    case ENUM:
+    case FIXED:
+      queue.add(schema);
+      break;
+    case STRING: case BYTES:
+    case INT: case LONG:
+    case FLOAT: case DOUBLE:
+    case BOOLEAN: case NULL:
+      break;
+    default: throw new RuntimeException("Unknown type: "+schema);
+    }
+  }
 
-  private String namespace;
-  private StringBuilder buffer = new StringBuilder();
-  private Set<String> compiledTypes = new HashSet<String>();
-
-  private SpecificCompiler() {}                        // no public ctor
-
-  /** Returns generated Java interface for a protocol. */
-  public static SpecificCompiler compileProtocol(File file) throws IOException {
-    SpecificCompiler compiler = new SpecificCompiler();
-    Protocol protocol = Protocol.parse(file);
-    compiler.compile(protocol);
-    return compiler;
-  }
-
-  /** Returns generated Java class for a schema. */
-  public static SpecificCompiler compileSchema(File file) throws IOException {
-    SpecificCompiler compiler = new SpecificCompiler();
-    Schema schema = Schema.parse(file);
-    compiler.header(schema.getNamespace());
-    compiler.namespace = schema.getNamespace();
-    compiler.compile(schema, schema.getName(), 0);
-    return compiler;
-  }
-
-  /** Return namespace for compiled code. */
-  public String getNamespace() { return namespace; }
-
-  /** Return generated code. */
-  public String getCode() { return buffer.toString(); }
-  
-  private void compile(Protocol protocol) {
-    namespace = protocol.getNamespace();
-    header(namespace);
-
-    // define an interface
-    line(0, "public interface "+protocol.getName()+" {");
-
-    // nest type classes
-    for (Schema schema : protocol.getTypes().values())
-      compile(schema, schema.getName(), 1);
-
-    // define methods
-    buffer.append("\n");
-    for (Map.Entry<String,Message> entry : protocol.getMessages().entrySet()) {
-      String name = entry.getKey();
-      Message message = entry.getValue();
-      Schema request = message.getRequest();
-      Schema response = message.getResponse();
-      line(1, type(response, name+"Return")+" "+name+"("+params(request)+")");
-      line(2,"throws AvroRemoteException"+errors(message.getErrors())+";");
+  /** Generate java classes for enqueued schemas. */
+  private void compile() throws IOException {
+    for (Schema schema : queue)
+      compile(schema);
+  }
+
+  private void compileInterface(Protocol protocol) throws IOException {
+    startFile(protocol.getName(), protocol.getNamespace());
+    try {
+      line(0, "public interface "+protocol.getName()+" {");
+
+      out.append("\n");
+      for (Map.Entry<String,Message> e : protocol.getMessages().entrySet()) {
+        String name = e.getKey();
+        Message message = e.getValue();
+        Schema request = message.getRequest();
+        Schema response = message.getResponse();
+        line(1, type(response)+" "+name+"("+params(request)+")");
+        line(2,"throws AvroRemoteException"+errors(message.getErrors())+";");
+      }
+      line(0, "}");
+    } finally {
+      out.close();
     }
-    line(0, "}");
   }
 
-  private void header(String namespace) {
+  private void startFile(String name, String space) throws IOException {
+    File dir = new File(dest, space.replace('.', File.separatorChar));
+    if (!dir.exists())
+      if (!dir.mkdirs())
+        throw new IOException("Unable to create " + dir);
+    name = cap(name) + ".java";
+    out = new OutputStreamWriter(new FileOutputStream(new File(dir, name)));
+    header(space);
+  }
+
+  private void header(String namespace) throws IOException {
     if(namespace != null) {
       line(0, "package "+namespace+";\n");
     }
@@ -107,15 +147,20 @@
     line(0, "import org.apache.avro.specific.SpecificRecord;");
     line(0, "import org.apache.avro.specific.SpecificFixed;");
     line(0, "import org.apache.avro.reflect.FixedSize;");
-    buffer.append("\n");
+    for (Schema s : queue)
+      if (namespace == null
+          ? (s.getNamespace() != null)
+          : !namespace.equals(s.getNamespace()))
+        line(0, "import "+SpecificData.get().getClassName(s)+";");
+    line(0, "");
   }
 
-  private String params(Schema request) {
+  private String params(Schema request) throws IOException {
     StringBuilder b = new StringBuilder();
     int count = 0;
     for (Map.Entry<String, Schema> param : request.getFieldSchemas()) {
       String paramName = param.getKey();
-      b.append(type(param.getValue(), paramName));
+      b.append(type(param.getValue()));
       b.append(" ");
       b.append(paramName);
       if (++count < request.getFields().size())
@@ -124,7 +169,7 @@
     return b.toString();
   }
 
-  private String errors(Schema errs) {
+  private String errors(Schema errs) throws IOException {
     StringBuilder b = new StringBuilder();
     for (Schema error : errs.getTypes().subList(1, errs.getTypes().size())) {
       b.append(", ");
@@ -133,113 +178,88 @@
     return b.toString();
   }
 
-  private void compile(Schema schema, String name, int d) {
-    String type = type(schema, name);
-    if (compiledTypes.contains(type)) return; else compiledTypes.add(type);
-    switch (schema.getType()) {
-    case RECORD:
-      buffer.append("\n");
-      line(d, ((d==0)?"public ":"")
-           +((d>1)?"static ":"")+"class "+type
-           +(schema.isError()
-             ? " extends SpecificExceptionBase"
-             : " extends SpecificRecordBase")
-           +" implements SpecificRecord {");
-      // schema definition
-      line(d+1, "public static final Schema _SCHEMA = Schema.parse(\""
-           +esc(schema)+"\");");
-      // field declations
-      for (Map.Entry<String, Schema> field : schema.getFieldSchemas()) {
-        String fieldName = field.getKey();
-        line(d+1,"public "+unbox(field.getValue(),fieldName)+" "+fieldName+";");
-      }
-      // schema method
-      line(d+1, "public Schema getSchema() { return _SCHEMA; }");
-      // get method
-      line(d+1, "public Object get(int _field) {");
-      line(d+2, "switch (_field) {");
-      int i = 0;
-      for (Map.Entry<String, Schema> field : schema.getFieldSchemas())
-        line(d+2, "case "+(i++)+": return "+field.getKey()+";");
-      line(d+2, "default: throw new AvroRuntimeException(\"Bad index\");");
-      line(d+2, "}");
-      line(d+1, "}");
-      // set method
-      line(d+1, "@SuppressWarnings(value=\"unchecked\")");
-      line(d+1, "public void set(int _field, Object _value) {");
-      line(d+2, "switch (_field) {");
-      i = 0;
-      for (Map.Entry<String, Schema> field : schema.getFieldSchemas())
-        line(d+2, "case "+(i++)+": "+field.getKey()+" = ("+
-             type(field.getValue(),field.getKey())+")_value; break;");
-      line(d+2, "default: throw new AvroRuntimeException(\"Bad index\");");
-      line(d+2, "}");
-      line(d+1, "}");
-      line(d, "}");
-
-      // nested classes
-      if (d == 0)
+  private void compile(Schema schema) throws IOException {
+    startFile(schema.getName(), schema.getNamespace());
+    try {
+      switch (schema.getType()) {
+      case RECORD:
+        line(0, "public class "+type(schema)+
+             (schema.isError()
+              ? " extends SpecificExceptionBase"
+               : " extends SpecificRecordBase")
+             +" implements SpecificRecord {");
+        // schema definition
+        line(1, "public static final Schema _SCHEMA = Schema.parse(\""
+             +esc(schema)+"\");");
+        // field declations
         for (Map.Entry<String, Schema> field : schema.getFieldSchemas())
-          compile(field.getValue(), null, d+1);
-
-      break;
-    case ENUM:
-      buffer.append("\n");
-      line(d, ((d==0)?"public ":"")+"enum "+type+" { ");
-      StringBuilder b = new StringBuilder();
-      int count = 0;
-      for (String symbol : schema.getEnumSymbols()) {
-        b.append(symbol);
-        if (++count < schema.getEnumSymbols().size())
-          b.append(", ");
+          line(1,"public "+unbox(field.getValue())+" "+field.getKey()+";");
+        // schema method
+        line(1, "public Schema getSchema() { return _SCHEMA; }");
+        // get method
+        line(1, "public Object get(int _field) {");
+        line(2, "switch (_field) {");
+        int i = 0;
+        for (Map.Entry<String, Schema> field : schema.getFieldSchemas())
+          line(2, "case "+(i++)+": return "+field.getKey()+";");
+        line(2, "default: throw new AvroRuntimeException(\"Bad index\");");
+        line(2, "}");
+        line(1, "}");
+        // set method
+        line(1, "@SuppressWarnings(value=\"unchecked\")");
+        line(1, "public void set(int _field, Object _value) {");
+        line(2, "switch (_field) {");
+        i = 0;
+        for (Map.Entry<String, Schema> field : schema.getFieldSchemas())
+          line(2, "case "+(i++)+": "+field.getKey()+" = ("+
+               type(field.getValue())+")_value; break;");
+        line(2, "default: throw new AvroRuntimeException(\"Bad index\");");
+        line(2, "}");
+        line(1, "}");
+        line(0, "}");
+        break;
+      case ENUM:
+        line(0, "public enum "+type(schema)+" { ");
+        StringBuilder b = new StringBuilder();
+        int count = 0;
+        for (String symbol : schema.getEnumSymbols()) {
+          b.append(symbol);
+          if (++count < schema.getEnumSymbols().size())
+            b.append(", ");
+        }
+        line(1, b.toString());
+        line(0, "}");
+        break;
+      case FIXED:
+        line(0, "@FixedSize("+schema.getFixedSize()+")");
+        line(0, "public class "+type(schema)+" extends SpecificFixed {}");
+        break;
+      case MAP: case ARRAY: case UNION: case STRING: case BYTES:
+      case INT: case LONG: case FLOAT: case DOUBLE: case BOOLEAN: case NULL:
+        break;
+      default: throw new RuntimeException("Unknown type: "+schema);
       }
-      line(d+1, b.toString());
-      line(d, "}");
-      break;
-    case ARRAY:
-      compile(schema.getElementType(), name+"Element", d);
-      break;
-    case MAP:
-      compile(schema.getValueType(), name+"Value", d);
-      break;
-    case FIXED:
-      buffer.append("\n");
-      line(d, "@FixedSize("+schema.getFixedSize()+")");
-      line(d, ((d==0)?"public ":"")
-           +((d>1)?"static ":"")+"class "+type
-           +" extends SpecificFixed {}");
-      break;
-    case UNION:
-      int choice = 0;
-      for (Schema t : schema.getTypes())
-        compile(t, name+"Choice"+choice++, d);
-      break;
-
-    case STRING: case BYTES:
-    case INT: case LONG:
-    case FLOAT: case DOUBLE:
-    case BOOLEAN: case NULL:
-      break;
-    default: throw new RuntimeException("Unknown type: "+schema);
+    } finally {
+      out.close();
     }
   }
 
   private static final Schema NULL_SCHEMA = Schema.create(Schema.Type.NULL);
 
-  private String type(Schema schema, String name) {
+  private String type(Schema schema) {
     switch (schema.getType()) {
     case RECORD:
     case ENUM:
     case FIXED:
-      return schema.getName() == null ? cap(name) : schema.getName();
+      return schema.getName();
     case ARRAY:
-      return "GenericArray<"+type(schema.getElementType(),name+"Element")+">";
+      return "GenericArray<"+type(schema.getElementType())+">";
     case MAP:
-      return "Map<Utf8,"+type(schema.getValueType(),name+"Value")+">";
+      return "Map<Utf8,"+type(schema.getValueType())+">";
     case UNION:
       List<Schema> types = schema.getTypes();     // elide unions with null
       if ((types.size() == 2) && types.contains(NULL_SCHEMA))
-        return type(types.get(types.get(0).equals(NULL_SCHEMA) ? 1 : 0), name);
+        return type(types.get(types.get(0).equals(NULL_SCHEMA) ? 1 : 0));
       return "Object";
     case STRING:  return "Utf8";
     case BYTES:   return "ByteBuffer";
@@ -253,23 +273,23 @@
     }
   }
 
-  private String unbox(Schema schema, String name) {
+  private String unbox(Schema schema) {
     switch (schema.getType()) {
     case INT:     return "int";
     case LONG:    return "long";
     case FLOAT:   return "float";
     case DOUBLE:  return "double";
     case BOOLEAN: return "boolean";
-    default:      return type(schema, name);
+    default:      return type(schema);
     }
   }
 
-  private void line(int indent, String text) {
+  private void line(int indent, String text) throws IOException {
     for (int i = 0; i < indent; i ++) {
-      buffer.append("  ");
+      out.append("  ");
     }
-    buffer.append(text);
-    buffer.append("\n");
+    out.append(text);
+    out.append("\n");
   }
 
   static String cap(String name) {
@@ -281,7 +301,8 @@
   }
 
   public static void main(String[] args) throws Exception {
-    System.out.println(compileProtocol(new File(args[0])).getCode());
+    //compileSchema(new File(args[0]), new File(args[1]));
+    compileProtocol(new File(args[0]), new File(args[1]));
   }
 
 }

Modified: hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificData.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificData.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificData.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificData.java Fri Sep 25 21:10:53 2009
@@ -18,8 +18,10 @@
 package org.apache.avro.specific;
 
 import java.util.Iterator;
+import java.util.Map;
 
 import org.apache.avro.Schema;
+import org.apache.avro.AvroRuntimeException;
 import org.apache.avro.Schema.Field;
 import org.apache.avro.reflect.ReflectData;
 
@@ -34,6 +36,29 @@
   public static SpecificData get() { return INSTANCE; }
 
   @Override
+  protected Schema createSchema(java.lang.reflect.Type type,
+                             Map<String,Schema> names) {
+    if (type instanceof Class) {
+      Class c = (Class)type;
+      String name = c.getSimpleName();
+      Schema schema = names.get(name);
+      if (schema != null) return schema;
+      if (SpecificRecord.class.isAssignableFrom(c)) {
+        try {
+          schema = (Schema)((Class)type).getDeclaredField("_SCHEMA").get(null);
+        } catch (NoSuchFieldException e) {
+          throw new AvroRuntimeException(e);
+        } catch (IllegalAccessException e) {
+          throw new AvroRuntimeException(e);
+        }
+        names.put(name, schema);
+        return schema;
+      }
+    }
+    return super.createSchema(type, names);
+  }
+
+  @Override
   protected boolean isRecord(Object datum) {
     return datum instanceof SpecificRecord;
   }
@@ -66,6 +91,8 @@
     }
   }
 
-
 }
 
+
+
+

Modified: hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificDatumReader.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificDatumReader.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificDatumReader.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificDatumReader.java Fri Sep 25 21:10:53 2009
@@ -22,16 +22,14 @@
 
 /** {@link org.apache.avro.io.DatumReader DatumReader} for generated Java classes. */
 public class SpecificDatumReader extends ReflectDatumReader {
-  public SpecificDatumReader(String packageName) {
-    super(packageName);
-  }
+  public SpecificDatumReader() {}
 
-  public SpecificDatumReader(Schema root, String packageName) {
-    super(root, packageName);
+  public SpecificDatumReader(Class c) {
+    this(SpecificData.get().getSchema(c));
   }
 
-  public SpecificDatumReader(Schema root) {
-    super(root, root.getNamespace()+".");
+  public SpecificDatumReader(Schema schema) {
+    super(schema);
   }
 
   protected void addField(Object record, String name, int position, Object o) {

Modified: hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificDatumWriter.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificDatumWriter.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificDatumWriter.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificDatumWriter.java Fri Sep 25 21:10:53 2009
@@ -24,8 +24,12 @@
 public class SpecificDatumWriter extends ReflectDatumWriter {
   public SpecificDatumWriter() {}
 
-  public SpecificDatumWriter(Schema root) {
-    super(root);
+  public SpecificDatumWriter(Class c) {
+    super(SpecificData.get().getSchema(c), SpecificData.get());
+  }
+  
+  public SpecificDatumWriter(Schema schema) {
+    super(schema, SpecificData.get());
   }
   
   protected Object getField(Object record, String name, int position) {

Modified: hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificRequestor.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificRequestor.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificRequestor.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificRequestor.java Fri Sep 25 21:10:53 2009
@@ -52,7 +52,7 @@
   }
 
   protected DatumReader<Object> getDatumReader(Schema schema) {
-    return new SpecificDatumReader(schema, packageName);
+    return new SpecificDatumReader(schema);
   }
 
   /** Create a proxy instance whose methods invoke RPCs. */

Modified: hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificResponder.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificResponder.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificResponder.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificResponder.java Fri Sep 25 21:10:53 2009
@@ -34,7 +34,7 @@
   }
 
   protected DatumReader<Object> getDatumReader(Schema schema) {
-    return new SpecificDatumReader(schema, packageName);
+    return new SpecificDatumReader(schema);
   }
 
 }

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestCompare.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestCompare.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestCompare.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestCompare.java Fri Sep 25 21:10:53 2009
@@ -38,7 +38,10 @@
 import org.apache.avro.io.BinaryEncoder;
 import org.apache.avro.util.Utf8;
 
+import org.apache.avro.test.TestRecord;
 import org.apache.avro.test.Simple;
+import org.apache.avro.test.Kind;
+import org.apache.avro.test.MD5;
 
 public class TestCompare {
 
@@ -153,22 +156,22 @@
 
   @Test
   public void testSpecificRecord() throws Exception {
-    Simple.TestRecord s1 = new Simple.TestRecord();
-    Simple.TestRecord s2 = new Simple.TestRecord();
+    TestRecord s1 = new TestRecord();
+    TestRecord s2 = new TestRecord();
     s1.name = new Utf8("foo");
-    s1.kind = Simple.Kind.BAZ;
-    s1.hash = new Simple.MD5();
+    s1.kind = Kind.BAZ;
+    s1.hash = new MD5();
     s1.hash.bytes(new byte[] {0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5});
     s2.name = new Utf8("bar");
-    s2.kind = Simple.Kind.BAR;
-    s2.hash = new Simple.MD5();
+    s2.kind = Kind.BAR;
+    s2.hash = new MD5();
     s2.hash.bytes(new byte[] {0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,6});
-    check(Simple.TestRecord._SCHEMA, s1, s2, true,
-          new SpecificDatumWriter(Simple.TestRecord._SCHEMA),
+    Schema schema = SpecificData.get().getSchema(TestRecord.class);
+
+    check(schema, s1, s2, true, new SpecificDatumWriter(schema),
           SpecificData.get());
-    s2.kind = Simple.Kind.BAZ;
-    check(Simple.TestRecord._SCHEMA, s1, s2, true,
-          new SpecificDatumWriter(Simple.TestRecord._SCHEMA),
+    s2.kind = Kind.BAZ;
+    check(schema, s1, s2, true, new SpecificDatumWriter(schema),
           SpecificData.get());
   }  
 

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java Fri Sep 25 21:10:53 2009
@@ -124,12 +124,12 @@
 
   @Test
     public void testGeneratedSpecific() throws IOException {
-      readFiles(new SpecificDatumReader("org.apache.avro."));
+      readFiles(new SpecificDatumReader());
     }
 
   @Test
     public void testGeneratedReflect() throws IOException {
-      readFiles(new ReflectDatumReader("org.apache.avro."));
+      readFiles(new ReflectDatumReader());
     }
 
     private void readFiles(DatumReader<Object> datumReader) throws IOException {

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFileReflect.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFileReflect.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFileReflect.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFileReflect.java Fri Sep 25 21:10:53 2009
@@ -63,7 +63,7 @@
     write(writer, new FooRecord(20), check);
     writer.close();
 
-    ReflectDatumReader din = new ReflectDatumReader("org.apache.avro.");
+    ReflectDatumReader din = new ReflectDatumReader();
     SeekableFileInput sin = new SeekableFileInput(FILE);
     DataFileReader<Object> reader = new DataFileReader<Object>(sin, din);
     Object datum = null;
@@ -110,7 +110,7 @@
 
     writer.close();
 
-    ReflectDatumReader din = new ReflectDatumReader("org.apache.avro.");
+    ReflectDatumReader din = new ReflectDatumReader();
     SeekableFileInput sin = new SeekableFileInput(FILE);
     DataFileReader<Object> reader = new DataFileReader<Object>(sin, din);
     Object datum = null;
@@ -142,7 +142,7 @@
     write(writer, new BarRecord("Two beers please"), check);
     writer.close();
 
-    ReflectDatumReader din = new ReflectDatumReader("org.apache.avro.");
+    ReflectDatumReader din = new ReflectDatumReader();
     SeekableFileInput sin = new SeekableFileInput(FILE);
     DataFileReader<Object> reader = new DataFileReader<Object>(sin, din);
     Object datum = null;

Added: hadoop/avro/trunk/src/test/java/org/apache/avro/TestNamespaceReflect.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestNamespaceReflect.java?rev=819010&view=auto
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestNamespaceReflect.java (added)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestNamespaceReflect.java Fri Sep 25 21:10:53 2009
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import org.apache.avro.ipc.SocketServer;
+import org.apache.avro.ipc.SocketTransceiver;
+import org.apache.avro.reflect.ReflectRequestor;
+import org.apache.avro.reflect.ReflectResponder;
+import org.apache.avro.test.namespace.TestNamespace;
+import org.junit.Before;
+
+import java.net.InetSocketAddress;
+
+public class TestNamespaceReflect extends TestNamespaceSpecific {
+
+  @Before
+  public void testStartServer() throws Exception {
+    server = new SocketServer(new ReflectResponder(TestNamespace.class, new TestImpl()),
+                              new InetSocketAddress(0));
+    client = new SocketTransceiver(new InetSocketAddress(server.getPort()));
+    proxy = (TestNamespace)ReflectRequestor.getClient(TestNamespace.class, client);
+  }
+
+}

Added: hadoop/avro/trunk/src/test/java/org/apache/avro/TestNamespaceSpecific.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestNamespaceSpecific.java?rev=819010&view=auto
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestNamespaceSpecific.java (added)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestNamespaceSpecific.java Fri Sep 25 21:10:53 2009
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import org.apache.avro.ipc.AvroRemoteException;
+import org.apache.avro.ipc.SocketServer;
+import org.apache.avro.ipc.SocketTransceiver;
+import org.apache.avro.ipc.Transceiver;
+import org.apache.avro.specific.SpecificRequestor;
+import org.apache.avro.specific.SpecificResponder;
+import org.apache.avro.test.namespace.TestNamespace;
+import org.apache.avro.test.util.MD5;
+import org.apache.avro.test.errors.TestError;
+import org.apache.avro.test.namespace.TestRecord;
+import org.apache.avro.util.Utf8;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.*;
+import java.net.InetSocketAddress;
+import java.nio.ByteBuffer;
+import java.util.Random;
+
+
+public class TestNamespaceSpecific {
+  private static final Logger LOG
+    = LoggerFactory.getLogger(TestNamespaceSpecific.class);
+
+  public static class TestImpl implements TestNamespace {
+    public TestRecord echo(TestRecord record) { return record; }
+    public Void error() throws AvroRemoteException {
+      TestError error = new TestError();
+      error.message = new Utf8("an error");
+      throw error;
+    }
+  }
+
+  protected static SocketServer server;
+  protected static Transceiver client;
+  protected static TestNamespace proxy;
+
+  @Before
+  public void testStartServer() throws Exception {
+    server = new SocketServer(new SpecificResponder(TestNamespace.class, new TestImpl()),
+                              new InetSocketAddress(0));
+    client = new SocketTransceiver(new InetSocketAddress(server.getPort()));
+    proxy = (TestNamespace)SpecificRequestor.getClient(TestNamespace.class, client);
+  }
+
+  @Test
+  public void testEcho() throws IOException {
+    TestRecord record = new TestRecord();
+    record.hash = new MD5();
+    System.arraycopy(new byte[]{0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5}, 0,
+                     record.hash.bytes(), 0, 16);
+    TestRecord echoed = proxy.echo(record);
+    assertEquals(record, echoed);
+    assertEquals(record.hashCode(), echoed.hashCode());
+  }
+
+  @Test
+  public void testError() throws IOException {
+    TestError error = null;
+    try {
+      proxy.error();
+    } catch (TestError e) {
+      error = e;
+    }
+    assertNotNull(error);
+    assertEquals("an error", error.message.toString());
+  }
+
+  @After
+  public void testStopServer() throws IOException {
+    client.close();
+    server.close();
+  }
+}

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolGeneric.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolGeneric.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolGeneric.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolGeneric.java Fri Sep 25 21:10:53 2009
@@ -81,7 +81,7 @@
 
       if ("error".equals(message.getName())) {
         GenericRecord error =
-          new GenericData.Record(PROTOCOL.getTypes().get("TestError"));
+          new GenericData.Record(PROTOCOL.getType("TestError"));
         error.put("message", new Utf8("an error"));
         throw new AvroRemoteException(error);
       }
@@ -114,7 +114,7 @@
   @Test
   public void testEcho() throws IOException {
     GenericRecord record =
-      new GenericData.Record(PROTOCOL.getTypes().get("TestRecord"));
+      new GenericData.Record(PROTOCOL.getType("TestRecord"));
     record.put("name", new Utf8("foo"));
     record.put("kind", "BAR");
     record.put("hash", new GenericData.Fixed

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolSpecific.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolSpecific.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolSpecific.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolSpecific.java Fri Sep 25 21:10:53 2009
@@ -25,10 +25,10 @@
 import org.apache.avro.specific.SpecificRequestor;
 import org.apache.avro.specific.SpecificResponder;
 import org.apache.avro.test.Simple;
-import org.apache.avro.test.Simple.Kind;
-import org.apache.avro.test.Simple.MD5;
-import org.apache.avro.test.Simple.TestError;
-import org.apache.avro.test.Simple.TestRecord;
+import org.apache.avro.test.Kind;
+import org.apache.avro.test.MD5;
+import org.apache.avro.test.TestError;
+import org.apache.avro.test.TestRecord;
 import org.apache.avro.util.Utf8;
 import org.junit.After;
 import org.junit.Before;

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java?rev=819010&r1=819009&r2=819010&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java Fri Sep 25 21:10:53 2009
@@ -31,7 +31,7 @@
 import org.apache.avro.reflect.ReflectDatumReader;
 import org.apache.avro.reflect.ReflectDatumWriter;
 import org.apache.avro.test.Simple;
-import org.apache.avro.test.Simple.TestRecord;
+import org.apache.avro.test.TestRecord;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -52,7 +52,7 @@
 
   @Test
   public void testSchema() throws IOException {
-    assertEquals(PROTOCOL.getTypes().get("TestRecord"),
+    assertEquals(PROTOCOL.getType("TestRecord"),
                  ReflectData.get().getSchema(TestRecord.class));
   }
 
@@ -64,14 +64,13 @@
   @Test
   public void testRecord() throws IOException {
     Schema schm = ReflectData.get().getSchema(SampleRecord.class);
-    String prefix = getPrefix(SampleRecord.class);
     ReflectDatumWriter writer = new ReflectDatumWriter(schm);
     ByteArrayOutputStream out = new ByteArrayOutputStream();
     SampleRecord record = new SampleRecord();
     record.x = 5;
     record.y = 10;
     writer.write(record, new BinaryEncoder(out));
-    ReflectDatumReader reader = new ReflectDatumReader(schm, prefix);
+    ReflectDatumReader reader = new ReflectDatumReader(schm);
     Object decoded =
       reader.read(null, new BinaryDecoder
                   (new ByteArrayInputStream(out.toByteArray())));
@@ -82,7 +81,6 @@
   public void testRecordWithNull() throws IOException {
     ReflectData reflectData = ReflectData.AllowNull.get();
     Schema schm = reflectData.getSchema(AnotherSampleRecord.class);
-    String prefix = getPrefix(AnotherSampleRecord.class);
     ReflectDatumWriter writer = new ReflectDatumWriter(schm);
     ByteArrayOutputStream out = new ByteArrayOutputStream();
     // keep record.a null and see if that works
@@ -90,7 +88,7 @@
     writer.write(a, new BinaryEncoder(out));
     AnotherSampleRecord b = new AnotherSampleRecord(10);
     writer.write(b, new BinaryEncoder(out));
-    ReflectDatumReader reader = new ReflectDatumReader(schm, prefix);
+    ReflectDatumReader reader = new ReflectDatumReader(schm);
     ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
     Object decoded = reader.read(null, new BinaryDecoder(in));
     assertEquals(a, decoded);
@@ -98,15 +96,6 @@
     assertEquals(b, decoded);
   }
 
-  private String getPrefix(Class<?> c) {
-    String prefix =  
-      ((c.getEnclosingClass() == null 
-        || "null".equals(c.getEnclosingClass())) ? 
-       c.getPackage().getName() + "." 
-       : (c.getEnclosingClass().getName() + "$"));
-    return prefix;
-  }
-
   public static class SampleRecord {
     public int x = 1;
     private int y = 2;

Added: hadoop/avro/trunk/src/test/schemata/namespace.avpr
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/schemata/namespace.avpr?rev=819010&view=auto
==============================================================================
--- hadoop/avro/trunk/src/test/schemata/namespace.avpr (added)
+++ hadoop/avro/trunk/src/test/schemata/namespace.avpr Fri Sep 25 21:10:53 2009
@@ -0,0 +1,28 @@
+{"namespace": "org.apache.avro.test.namespace",
+ "protocol": "TestNamespace",
+
+ "types": [
+     {"name": "org.apache.avro.test.util.MD5", "type": "fixed", "size": 16},
+     {"name": "TestRecord", "type": "record",
+      "fields": [ {"name": "hash", "type": "org.apache.avro.test.util.MD5"} ]
+     },
+     {"name": "TestError", "namespace": "org.apache.avro.test.errors",
+      "type": "error", "fields": [ {"name": "message", "type": "string"} ]
+     }
+ ],
+
+ "messages": {
+     "echo": {	
+         "request": [{"name": "record", "type": "TestRecord"}],
+         "response": "TestRecord"
+     },
+
+     "error": {
+         "request": [],
+         "response": "null",
+         "errors": ["org.apache.avro.test.errors.TestError"]
+     }
+
+ }
+
+}



Mime
View raw message