hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r513122 [2/3] - in /lucene/hadoop/trunk: ./ src/c++/librecordio/ src/java/org/apache/hadoop/io/ src/java/org/apache/hadoop/record/ src/java/org/apache/hadoop/record/compiler/ src/java/org/apache/hadoop/record/compiler/ant/ src/java/org/apac...
Date Thu, 01 Mar 2007 03:47:29 GMT
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java?view=diff&rev=513122&r1=513121&r2=513122
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java Wed Feb 28 19:47:27 2007
@@ -18,49 +18,58 @@
 
 package org.apache.hadoop.record.compiler;
 
+import org.apache.hadoop.record.compiler.JType.CType;
+import org.apache.hadoop.record.compiler.JType.CppType;
+
 /**
  *
  * @author Milind Bhandarkar
  */
 public class JFloat extends JType {
+  
+  class JavaFloat extends JavaType {
     
-    /** Creates a new instance of JFloat */
-    public JFloat() {
-        super("float", "float", "Float", "Float", "toFloat");
+    JavaFloat() {
+      super("float", "Float", "Float");
     }
     
-    public String getSignature() {
-        return "f";
+    void genHashCode(CodeBuffer cb, String fname) {
+      cb.append("ret = Float.floatToIntBits("+fname+");\n");
     }
     
-    public String genJavaHashCode(String fname) {
-        return "    ret = Float.floatToIntBits("+fname+");\n";
+    void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
+      cb.append("{\n");
+      cb.append("if ("+l+"<4) {\n");
+      cb.append("throw new java.io.IOException(\"Float is exactly 4 bytes."+
+          " Provided buffer is smaller.\");\n");
+      cb.append("}\n");
+      cb.append(s+"+=4; "+l+"-=4;\n");
+      cb.append("}\n");
     }
     
-    public String genJavaSlurpBytes(String b, String s, String l) {
-      StringBuffer sb = new StringBuffer();
-      sb.append("        {\n");
-      sb.append("           if ("+l+"<4) {\n");
-      sb.append("             throw new IOException(\"Float is exactly 4 bytes. Provided buffer is smaller.\");\n");
-      sb.append("           }\n");
-      sb.append("           "+s+"+=4; "+l+"-=4;\n");
-      sb.append("        }\n");
-      return sb.toString();
-    }
-    
-    public String genJavaCompareBytes() {
-      StringBuffer sb = new StringBuffer();
-      sb.append("        {\n");
-      sb.append("           if (l1<4 || l2<4) {\n");
-      sb.append("             throw new IOException(\"Float is exactly 4 bytes. Provided buffer is smaller.\");\n");
-      sb.append("           }\n");
-      sb.append("           float f1 = WritableComparator.readFloat(b1, s1);\n");
-      sb.append("           float f2 = WritableComparator.readFloat(b2, s2);\n");
-      sb.append("           if (f1 != f2) {\n");
-      sb.append("             return ((f1-f2) < 0) ? -1 : 0;\n");
-      sb.append("           }\n");
-      sb.append("           s1+=4; s2+=4; l1-=4; l2-=4;\n");
-      sb.append("        }\n");
-      return sb.toString();
+    void genCompareBytes(CodeBuffer cb) {
+      cb.append("{\n");
+      cb.append("if (l1<4 || l2<4) {\n");
+      cb.append("throw new java.io.IOException(\"Float is exactly 4 bytes."+
+          " Provided buffer is smaller.\");\n");
+      cb.append("}\n");
+      cb.append("float f1 = org.apache.hadoop.record.Utils.readFloat(b1, s1);\n");
+      cb.append("float f2 = org.apache.hadoop.record.Utils.readFloat(b2, s2);\n");
+      cb.append("if (f1 != f2) {\n");
+      cb.append("return ((f1-f2) < 0) ? -1 : 0;\n");
+      cb.append("}\n");
+      cb.append("s1+=4; s2+=4; l1-=4; l2-=4;\n");
+      cb.append("}\n");
     }
+  }
+  /** Creates a new instance of JFloat */
+  public JFloat() {
+    setJavaType(new JavaFloat());
+    setCppType(new CppType("float"));
+    setCType(new CType());
+  }
+  
+  String getSignature() {
+    return "f";
+  }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JInt.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JInt.java?view=diff&rev=513122&r1=513121&r2=513122
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JInt.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JInt.java Wed Feb 28 19:47:27 2007
@@ -18,43 +18,50 @@
 
 package org.apache.hadoop.record.compiler;
 
+import org.apache.hadoop.record.compiler.JType.CType;
+import org.apache.hadoop.record.compiler.JType.CppType;
+
 /**
- *
+ * Code generator for "int" type
  * @author Milind Bhandarkar
  */
 public class JInt extends JType {
+  
+  class JavaInt extends JavaType {
     
-    /** Creates a new instance of JInt */
-    public JInt() {
-        super("int32_t", "int", "Int", "Integer", "toInt");
+    JavaInt() {
+      super("int", "Int", "Integer");
     }
     
-    public String getSignature() {
-        return "i";
+    void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
+      cb.append("{\n");
+      cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n");
+      cb.append("int z = org.apache.hadoop.record.Utils.getVIntSize(i);\n");
+      cb.append(s+"+=z; "+l+"-=z;\n");
+      cb.append("}\n");
     }
     
-    public String genJavaSlurpBytes(String b, String s, String l) {
-      StringBuffer sb = new StringBuffer();
-      sb.append("        {\n");
-      sb.append("           int i = WritableComparator.readVInt("+b+", "+s+");\n");
-      sb.append("           int z = WritableUtils.getVIntSize(i);\n");
-      sb.append("           "+s+"+=z; "+l+"-=z;\n");
-      sb.append("        }\n");
-      return sb.toString();
-    }
-    
-    public String genJavaCompareBytes() {
-      StringBuffer sb = new StringBuffer();
-      sb.append("        {\n");
-      sb.append("           int i1 = WritableComparator.readVInt(b1, s1);\n");
-      sb.append("           int i2 = WritableComparator.readVInt(b2, s2);\n");
-      sb.append("           if (i1 != i2) {\n");
-      sb.append("             return ((i1-i2) < 0) ? -1 : 0;\n");
-      sb.append("           }\n");
-      sb.append("           int z1 = WritableUtils.getVIntSize(i1);\n");
-      sb.append("           int z2 = WritableUtils.getVIntSize(i2);\n");
-      sb.append("           s1+=z1; s2+=z2; l1-=z1; l2-=z2;\n");
-      sb.append("        }\n");
-      return sb.toString();
+    void genCompareBytes(CodeBuffer cb) {
+      cb.append("{\n");
+      cb.append("int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n");
+      cb.append("int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);\n");
+      cb.append("if (i1 != i2) {\n");
+      cb.append("return ((i1-i2) < 0) ? -1 : 0;\n");
+      cb.append("}\n");
+      cb.append("int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);\n");
+      cb.append("int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);\n");
+      cb.append("s1+=z1; s2+=z2; l1-=z1; l2-=z2;\n");
+      cb.append("}\n");
     }
+  }
+  /** Creates a new instance of JInt */
+  public JInt() {
+    setJavaType(new JavaInt());
+    setCppType(new CppType("int32_t"));
+    setCType(new CType());
+  }
+  
+  String getSignature() {
+    return "i";
+  }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JLong.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JLong.java?view=diff&rev=513122&r1=513121&r2=513122
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JLong.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JLong.java Wed Feb 28 19:47:27 2007
@@ -18,47 +18,54 @@
 
 package org.apache.hadoop.record.compiler;
 
+import org.apache.hadoop.record.compiler.JType.CType;
+import org.apache.hadoop.record.compiler.JType.CppType;
+
 /**
- *
+ * Code generator for "long" type
  * @author Milind Bhandarkar
  */
 public class JLong extends JType {
+  
+  class JavaLong extends JavaType {
     
-    /** Creates a new instance of JLong */
-    public JLong() {
-        super("int64_t", "long", "Long", "Long", "toLong");
+    JavaLong() {
+      super("long", "Long", "Long");
     }
     
-    public String getSignature() {
-        return "l";
+    void genHashCode(CodeBuffer cb, String fname) {
+      cb.append("ret = (int) ("+fname+"^("+fname+">>>32));\n");
     }
     
-    public String genJavaHashCode(String fname) {
-        return "    ret = (int) ("+fname+"^("+fname+">>>32));\n";
+    void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
+      cb.append("{\n");
+      cb.append("long i = org.apache.hadoop.record.Utils.readVLong("+b+", "+s+");\n");
+      cb.append("int z = org.apache.hadoop.record.Utils.getVIntSize(i);\n");
+      cb.append(s+"+=z; "+l+"-=z;\n");
+      cb.append("}\n");
     }
     
-    public String genJavaSlurpBytes(String b, String s, String l) {
-      StringBuffer sb = new StringBuffer();
-      sb.append("        {\n");
-      sb.append("           long i = WritableComparator.readVLong("+b+", "+s+");\n");
-      sb.append("           int z = WritableUtils.getVIntSize(i);\n");
-      sb.append("           "+s+"+=z; "+l+"-=z;\n");
-      sb.append("        }\n");
-      return sb.toString();
-    }
-    
-    public String genJavaCompareBytes() {
-      StringBuffer sb = new StringBuffer();
-      sb.append("        {\n");
-      sb.append("           long i1 = WritableComparator.readVLong(b1, s1);\n");
-      sb.append("           long i2 = WritableComparator.readVLong(b2, s2);\n");
-      sb.append("           if (i1 != i2) {\n");
-      sb.append("             return ((i1-i2) < 0) ? -1 : 0;\n");
-      sb.append("           }\n");
-      sb.append("           int z1 = WritableUtils.getVIntSize(i1);\n");
-      sb.append("           int z2 = WritableUtils.getVIntSize(i2);\n");
-      sb.append("           s1+=z1; s2+=z2; l1-=z1; l2-=z2;\n");
-      sb.append("        }\n");
-      return sb.toString();
+    void genCompareBytes(CodeBuffer cb) {
+      cb.append("{\n");
+      cb.append("long i1 = org.apache.hadoop.record.Utils.readVLong(b1, s1);\n");
+      cb.append("long i2 = org.apache.hadoop.record.Utils.readVLong(b2, s2);\n");
+      cb.append("if (i1 != i2) {\n");
+      cb.append("return ((i1-i2) < 0) ? -1 : 0;\n");
+      cb.append("}\n");
+      cb.append("int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);\n");
+      cb.append("int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);\n");
+      cb.append("s1+=z1; s2+=z2; l1-=z1; l2-=z2;\n");
+      cb.append("}\n");
     }
+  }
+  /** Creates a new instance of JLong */
+  public JLong() {
+    setJavaType(new JavaLong());
+    setCppType(new CppType("int64_t"));
+    setCType(new CType());
+  }
+  
+  String getSignature() {
+    return "l";
+  }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java?view=diff&rev=513122&r1=513121&r2=513122
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java Wed Feb 28 19:47:27 2007
@@ -23,149 +23,152 @@
  * @author Milind Bhandarkar
  */
 public class JMap extends JCompType {
-   
-    static private int level = 0;
-    
-    static private String getLevel() { return Integer.toString(level); }
-    
-    static private void incrLevel() { level++; }
-    
-    static private void decrLevel() { level--; }
-    
-    static private String getId(String id) { return id+getLevel(); }
-    
-    private JType mKey;
-    private JType mValue;
-    
-    /** Creates a new instance of JMap */
-    public JMap(JType t1, JType t2) {
-        super(" ::std::map<"+t1.getCppType()+","+t2.getCppType()+">",
-                "java.util.TreeMap", "Map", "java.util.TreeMap");
-        mKey = t1;
-        mValue = t2;
+  
+  static private int level = 0;
+  
+  static private String getLevel() { return Integer.toString(level); }
+  
+  static private void incrLevel() { level++; }
+  
+  static private void decrLevel() { level--; }
+  
+  static private String getId(String id) { return id+getLevel(); }
+  
+  private JType keyType;
+  private JType valueType;
+  
+  class JavaMap extends JavaCompType {
+    
+    JType.JavaType key;
+    JType.JavaType value;
+    
+    JavaMap(JType.JavaType key, JType.JavaType value) {
+      super("java.util.TreeMap<"+key.getWrapperType()+","+value.getWrapperType()+">",
+          "Map",
+          "java.util.TreeMap<"+key.getWrapperType()+","+value.getWrapperType()+">");
+      this.key = key;
+      this.value = value;
+    }
+    
+    void genCompareTo(CodeBuffer cb, String fname, String other) {
+      String setType = "java.util.Set<"+key.getWrapperType()+"> ";
+      String iterType = "java.util.Iterator<"+key.getWrapperType()+"> ";
+      cb.append("{\n");
+      cb.append(setType+getId("set1")+" = "+fname+".keySet();\n");
+      cb.append(setType+getId("set2")+" = "+other+".keySet();\n");
+      cb.append(iterType+getId("miter1")+" = "+
+          getId("set1")+".iterator();\n");
+      cb.append(iterType+getId("miter2")+" = "+
+          getId("set2")+".iterator();\n");
+      cb.append("for(; "+getId("miter1")+".hasNext() && "+
+          getId("miter2")+".hasNext(); ) {\n");
+      cb.append(key.getType()+" "+getId("k1")+
+          " = "+getId("miter1")+".next();\n");
+      cb.append(key.getType()+" "+getId("k2")+
+          " = "+getId("miter2")+".next();\n");
+      key.genCompareTo(cb, getId("k1"), getId("k2"));
+      cb.append("if (ret != 0) { return ret; }\n");
+      cb.append("}\n");
+      cb.append("ret = ("+getId("set1")+".size() - "+getId("set2")+".size());\n");
+      cb.append("}\n");
+    }
+    
+    void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) {
+      if (decl) {
+        cb.append(getType()+" "+fname+";\n");
+      }
+      cb.append("{\n");
+      incrLevel();
+      cb.append("org.apache.hadoop.record.Index "+getId("midx")+" = a_.startMap(\""+tag+"\");\n");
+      cb.append(fname+"=new "+getType()+"();\n");
+      cb.append("for (; !"+getId("midx")+".done(); "+getId("midx")+".incr()) {\n");
+      key.genReadMethod(cb, getId("k"),getId("k"),true);
+      value.genReadMethod(cb, getId("v"),getId("v"),true);
+      cb.append(fname+".put("+getId("k")+","+getId("v")+");\n");
+      cb.append("}\n");
+      cb.append("a_.endMap(\""+tag+"\");\n");
+      decrLevel();
+      cb.append("}\n");
     }
     
-    public String getSignature() {
-        return "{" + mKey.getSignature() + mValue.getSignature() +"}";
+    void genWriteMethod(CodeBuffer cb, String fname, String tag) {
+      String setType = "java.util.Set<java.util.Map.Entry<"+
+          key.getWrapperType()+","+value.getWrapperType()+">> ";
+      String entryType = "java.util.Map.Entry<"+
+          key.getWrapperType()+","+value.getWrapperType()+"> ";
+      String iterType = "java.util.Iterator<java.util.Map.Entry<"+
+          key.getWrapperType()+","+value.getWrapperType()+">> ";
+      cb.append("{\n");
+      incrLevel();
+      cb.append("a_.startMap("+fname+",\""+tag+"\");\n");
+      cb.append(setType+getId("es")+" = "+fname+".entrySet();\n");
+      cb.append("for("+iterType+getId("midx")+" = "+getId("es")+".iterator(); "+getId("midx")+".hasNext(); ) {\n");
+      cb.append(entryType+getId("me")+" = "+getId("midx")+".next();\n");
+      cb.append(key.getType()+" "+getId("k")+" = "+getId("me")+".getKey();\n");
+      cb.append(value.getType()+" "+getId("v")+" = "+getId("me")+".getValue();\n");
+      key.genWriteMethod(cb, getId("k"),getId("k"));
+      value.genWriteMethod(cb, getId("v"),getId("v"));
+      cb.append("}\n");
+      cb.append("a_.endMap("+fname+",\""+tag+"\");\n");
+      cb.append("}\n");
+      decrLevel();
     }
     
-    public String genJavaCompareTo(String fname, String other) {
-      StringBuffer sb = new StringBuffer();
-      sb.append("    {\n");
-      sb.append("      java.util.Set "+getId("set1")+" = "+fname+".keySet();\n");
-      sb.append("      java.util.Set "+getId("set2")+" = "+other+".keySet();\n");
-      sb.append("      java.util.Iterator "+getId("miter1")+" = "+
-          getId("set1")+".iterator();\n");
-      sb.append("      java.util.Iterator "+getId("miter2")+" = "+
-          getId("set2")+".iterator();\n");
-      sb.append("      for(; "+getId("miter1")+".hasNext() && "+
-          getId("miter2")+".hasNext(); ) {\n");
-      sb.append("        "+mKey.getJavaWrapperType()+" "+getId("k1")+
-          " = ("+mKey.getJavaWrapperType()+") "+getId("miter1")+".next();\n");
-      sb.append("        "+mKey.getJavaWrapperType()+" "+getId("k2")+
-          " = ("+mKey.getJavaWrapperType()+") "+getId("miter2")+".next();\n");
-      sb.append(mKey.genJavaCompareToWrapper(getId("k1"), getId("k2")));
-      sb.append("         if (ret != 0) { return ret; }\n");
-      sb.append("      }\n");
-      sb.append("      ret = ("+getId("set1")+".size() - "+getId("set2")+".size());\n");
-      sb.append("    }\n");
-      return sb.toString();
-    }
-    
-    public String genJavaCompareToWrapper(String fname, String other) {
-      return genJavaCompareTo(fname, other);
-    }
-    
-    public String genJavaReadWrapper(String fname, String tag, boolean decl) {
-        StringBuffer ret = new StringBuffer("");
-        if (decl) {
-            ret.append("    java.util.TreeMap "+fname+";\n");
-        }
-        ret.append("    {\n");
-        incrLevel();
-        ret.append("      org.apache.hadoop.record.Index "+getId("midx")+" = a_.startMap(\""+tag+"\");\n");
-        ret.append("      "+fname+"=new java.util.TreeMap();\n");
-        ret.append("      for (; !"+getId("midx")+".done(); "+getId("midx")+".incr()) {\n");
-        ret.append(mKey.genJavaReadWrapper(getId("k"),getId("k"),true));
-        ret.append(mValue.genJavaReadWrapper(getId("v"),getId("v"),true));
-        ret.append("        "+fname+".put("+getId("k")+","+getId("v")+");\n");
-        ret.append("      }\n");
-        ret.append("    a_.endMap(\""+tag+"\");\n");
-        decrLevel();
-        ret.append("    }\n");
-        return ret.toString();
-    }
-    
-    public String genJavaReadMethod(String fname, String tag) {
-        return genJavaReadWrapper(fname, tag, false);
-    }
-    
-    public String genJavaWriteWrapper(String fname, String tag) {
-        StringBuffer ret = new StringBuffer("    {\n");
-        incrLevel();
-        ret.append("      a_.startMap("+fname+",\""+tag+"\");\n");
-        ret.append("      java.util.Set "+getId("es")+" = "+fname+".entrySet();\n");
-        ret.append("      for(java.util.Iterator "+getId("midx")+" = "+getId("es")+".iterator(); "+getId("midx")+".hasNext(); ) {\n");
-        ret.append("        java.util.Map.Entry "+getId("me")+" = (java.util.Map.Entry) "+getId("midx")+".next();\n");
-        ret.append("        "+mKey.getJavaWrapperType()+" "+getId("k")+" = ("+mKey.getJavaWrapperType()+") "+getId("me")+".getKey();\n");
-        ret.append("        "+mValue.getJavaWrapperType()+" "+getId("v")+" = ("+mValue.getJavaWrapperType()+") "+getId("me")+".getValue();\n");
-        ret.append(mKey.genJavaWriteWrapper(getId("k"),getId("k")));
-        ret.append(mValue.genJavaWriteWrapper(getId("v"),getId("v")));
-        ret.append("      }\n");
-        ret.append("      a_.endMap("+fname+",\""+tag+"\");\n");
-        ret.append("    }\n");
-        decrLevel();
-        return ret.toString();
-    }
-    
-    public String genJavaWriteMethod(String fname, String tag) {
-        return genJavaWriteWrapper(fname, tag);
-    }
-    
-    public String genJavaSlurpBytes(String b, String s, String l) {
-      StringBuffer sb = new StringBuffer();
-      sb.append("        {\n");
+    void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
+      cb.append("{\n");
       incrLevel();
-      sb.append("           int "+getId("mi")+
-          " = WritableComparator.readVInt("+b+", "+s+");\n");
-      sb.append("           int "+getId("mz")+
-          " = WritableUtils.getVIntSize("+getId("mi")+");\n");
-      sb.append("           "+s+"+="+getId("mz")+"; "+l+"-="+getId("mz")+";\n");
-      sb.append("           for (int "+getId("midx")+" = 0; "+getId("midx")+
+      cb.append("int "+getId("mi")+
+          " = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n");
+      cb.append("int "+getId("mz")+
+          " = org.apache.hadoop.record.Utils.getVIntSize("+getId("mi")+");\n");
+      cb.append(s+"+="+getId("mz")+"; "+l+"-="+getId("mz")+";\n");
+      cb.append("for (int "+getId("midx")+" = 0; "+getId("midx")+
           " < "+getId("mi")+"; "+getId("midx")+"++) {");
-      sb.append(mKey.genJavaSlurpBytes(b,s,l));
-      sb.append(mValue.genJavaSlurpBytes(b,s,l));
-      sb.append("           }\n");
+      key.genSlurpBytes(cb, b,s,l);
+      value.genSlurpBytes(cb, b,s,l);
+      cb.append("}\n");
       decrLevel();
-      sb.append("        }\n");
-      return sb.toString();
+      cb.append("}\n");
     }
     
-    public String genJavaCompareBytes() {
-      StringBuffer sb = new StringBuffer();
-      sb.append("        {\n");
+    void genCompareBytes(CodeBuffer cb) {
+      cb.append("{\n");
       incrLevel();
-      sb.append("           int "+getId("mi1")+
-          " = WritableComparator.readVInt(b1, s1);\n");
-      sb.append("           int "+getId("mi2")+
-          " = WritableComparator.readVInt(b2, s2);\n");
-      sb.append("           int "+getId("mz1")+
-          " = WritableUtils.getVIntSize("+getId("mi1")+");\n");
-      sb.append("           int "+getId("mz2")+
-          " = WritableUtils.getVIntSize("+getId("mi2")+");\n");
-      sb.append("           s1+="+getId("mz1")+"; s2+="+getId("mz2")+
+      cb.append("int "+getId("mi1")+
+          " = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n");
+      cb.append("int "+getId("mi2")+
+          " = org.apache.hadoop.record.Utils.readVInt(b2, s2);\n");
+      cb.append("int "+getId("mz1")+
+          " = org.apache.hadoop.record.Utils.getVIntSize("+getId("mi1")+");\n");
+      cb.append("int "+getId("mz2")+
+          " = org.apache.hadoop.record.Utils.getVIntSize("+getId("mi2")+");\n");
+      cb.append("s1+="+getId("mz1")+"; s2+="+getId("mz2")+
           "; l1-="+getId("mz1")+"; l2-="+getId("mz2")+";\n");
-      sb.append("           for (int "+getId("midx")+" = 0; "+getId("midx")+
+      cb.append("for (int "+getId("midx")+" = 0; "+getId("midx")+
           " < "+getId("mi1")+" && "+getId("midx")+" < "+getId("mi2")+
           "; "+getId("midx")+"++) {");
-      sb.append(mKey.genJavaCompareBytes());
-      sb.append(mValue.genJavaSlurpBytes("b1", "s1", "l1"));
-      sb.append(mValue.genJavaSlurpBytes("b2", "s2", "l2"));
-      sb.append("           }\n");
-      sb.append("           if ("+getId("mi1")+" != "+getId("mi2")+
+      key.genCompareBytes(cb);
+      value.genSlurpBytes(cb, "b1", "s1", "l1");
+      value.genSlurpBytes(cb, "b2", "s2", "l2");
+      cb.append("}\n");
+      cb.append("if ("+getId("mi1")+" != "+getId("mi2")+
           ") { return ("+getId("mi1")+"<"+getId("mi2")+")?-1:0; }\n");
       decrLevel();
-      sb.append("        }\n");
-      return sb.toString();
+      cb.append("}\n");
     }
+  }
+  
+  /** Creates a new instance of JMap */
+  public JMap(JType t1, JType t2) {
+    setJavaType(new JavaMap(t1.getJavaType(), t2.getJavaType()));
+    setCppType(new CppType(" ::std::map<"+t1.getCppType().getType()+","+
+        t2.getCppType().getType()+">"));
+    setCType(new CType());
+    keyType = t1;
+    valueType = t2;
+  }
+  
+  String getSignature() {
+    return "{" + keyType.getSignature() + valueType.getSignature() +"}";
+  }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java?view=diff&rev=513122&r1=513121&r2=513122
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java Wed Feb 28 19:47:27 2007
@@ -29,391 +29,451 @@
  * @author Milind Bhandarkar
  */
 public class JRecord extends JCompType {
-
-    private String mFQName;
-    private String mName;
-    private String mModule;
-    private ArrayList mFields;
-    
-    /**
-     * Creates a new instance of JRecord
-     */
-    public JRecord(String name, ArrayList flist) {
-        super(name.replaceAll("\\.","::"), name, "Record", name);
-        mFQName = name;
-        int idx = name.lastIndexOf('.');
-        mName = name.substring(idx+1);
-        mModule = name.substring(0, idx);
-        mFields = flist;
-    }
-    
-    public String getName() {
-        return mName;
-    }
-    
-    public String getJavaFQName() {
-        return mFQName;
-    }
-    
-    public String getCppFQName() {
-        return mFQName.replaceAll("\\.", "::");
-    }
-    
-    public String getJavaPackage() {
-        return mModule;
-    }
-    
-    public String getCppNameSpace() {
-        return mModule.replaceAll("\\.", "::");
-    }
-    
-    public ArrayList getFields() {
-        return mFields;
-    }
-    
-    public String getSignature() {
-        StringBuffer sb = new StringBuffer();
-        sb.append("L").append(mName).append("(");
-        for (Iterator i = mFields.iterator(); i.hasNext();) {
-            String s = ((JField) i.next()).getSignature();
-            sb.append(s);
-        }
-        sb.append(")");
-        return sb.toString();
-    }
-    
-    public String genCppDecl(String fname) {
-        return "  "+mName+" "+fname+";\n";
-    }
-    
-    public String genJavaReadMethod(String fname, String tag) {
-        return genJavaReadWrapper(fname, tag, false);
-    }
-    
-    public String genJavaReadWrapper(String fname, String tag, boolean decl) {
-        StringBuffer ret = new StringBuffer("");
-        if (decl) {
-            ret.append("    "+getJavaFQName()+" "+fname+";\n");
-        }
-        ret.append("    "+fname+"= new "+getJavaFQName()+"();\n");
-        ret.append("    a_.readRecord("+fname+",\""+tag+"\");\n");
-        return ret.toString();
-    }
-    
-    public String genJavaWriteWrapper(String fname, String tag) {
-        return "    a_.writeRecord("+fname+",\""+tag+"\");\n";
-    }
+  
+  class JavaRecord extends JavaCompType {
     
-    public String genJavaSlurpBytes(String b, String s, String l) {
-      StringBuffer sb = new StringBuffer();
-      sb.append("        {\n");
-      sb.append("           int r = "+getJavaFQName()+
+    private String fullName;
+    private String name;
+    private String module;
+    private ArrayList<JField<JavaType>> fields =
+        new ArrayList<JField<JavaType>>();
+    
+    JavaRecord(String name, ArrayList<JField<JType>> flist) {
+      super(name, "Record", name);
+      this.fullName = name;
+      int idx = name.lastIndexOf('.');
+      this.name = name.substring(idx+1);
+      this.module = name.substring(0, idx);
+      for (Iterator<JField<JType>> iter = flist.iterator(); iter.hasNext();) {
+        JField<JType> f = iter.next();
+        fields.add(new JField<JavaType>(f.getName(), f.getType().getJavaType()));
+      }
+    }
+    
+    void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) {
+      if (decl) {
+        cb.append(fullName+" "+fname+";\n");
+      }
+      cb.append(fname+"= new "+fullName+"();\n");
+      cb.append("a_.readRecord("+fname+",\""+tag+"\");\n");
+    }
+    
+    void genWriteMethod(CodeBuffer cb, String fname, String tag) {
+      cb.append("a_.writeRecord("+fname+",\""+tag+"\");\n");
+    }
+    
+    void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
+      cb.append("{\n");
+      cb.append("int r = "+fullName+
           ".Comparator.slurpRaw("+b+","+s+","+l+");\n");
-      sb.append("           "+s+"+=r; "+l+"-=r;\n");
-      sb.append("        }\n");
-      return sb.toString();
+      cb.append(s+"+=r; "+l+"-=r;\n");
+      cb.append("}\n");
     }
     
-    public String genJavaCompareBytes() {
-      StringBuffer sb = new StringBuffer();
-      sb.append("        {\n");
-      sb.append("           int r1 = "+getJavaFQName()+
+    void genCompareBytes(CodeBuffer cb) {
+      cb.append("{\n");
+      cb.append("int r1 = "+fullName+
           ".Comparator.compareRaw(b1,s1,l1,b2,s2,l2);\n");
-      sb.append("           if (r1 <= 0) { return r1; }\n");
-      sb.append("           s1+=r1; s2+=r1; l1-=r1; l2-=r1;\n");
-      sb.append("        }\n");
-      return sb.toString();
-    }
-    
-    public void genCppCode(FileWriter hh, FileWriter cc)
-        throws IOException {
-        String[] ns = getCppNameSpace().split("::");
-        for (int i = 0; i < ns.length; i++) {
-            hh.write("namespace "+ns[i]+" {\n");
-        }
-        
-        hh.write("class "+getName()+" : public ::hadoop::Record {\n");
-        hh.write("private:\n");
-        
-        for (Iterator i = mFields.iterator(); i.hasNext();) {
-            JField jf = (JField) i.next();
-            hh.write(jf.genCppDecl());
-        }
-        hh.write("public:\n");
-        hh.write("  virtual void serialize(::hadoop::OArchive& a_, const char* tag) const;\n");
-        hh.write("  virtual void deserialize(::hadoop::IArchive& a_, const char* tag);\n");
-        hh.write("  virtual const ::std::string& type() const;\n");
-        hh.write("  virtual const ::std::string& signature() const;\n");
-        hh.write("  virtual bool operator<(const "+getName()+"& peer_) const;\n");
-        hh.write("  virtual bool operator==(const "+getName()+"& peer_) const;\n");
-        hh.write("  virtual ~"+getName()+"() {};\n");
-        int fIdx = 0;
-        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
-            JField jf = (JField) i.next();
-            hh.write(jf.genCppGetSet(fIdx));
-        }
-        hh.write("}; // end record "+getName()+"\n");
-        for (int i=ns.length-1; i>=0; i--) {
-            hh.write("} // end namespace "+ns[i]+"\n");
-        }
-        cc.write("void "+getCppFQName()+"::serialize(::hadoop::OArchive& a_, const char* tag) const {\n");
-        cc.write("  a_.startRecord(*this,tag);\n");
-        fIdx = 0;
-        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
-            JField jf = (JField) i.next();
-            String name = jf.getName();
-            if (jf.getType() instanceof JBuffer) {
-                cc.write("  a_.serialize("+name+","+name+".length(),\""+jf.getTag()+"\");\n");
-            } else {
-                cc.write("  a_.serialize("+name+",\""+jf.getTag()+"\");\n");
-            }
-        }
-        cc.write("  a_.endRecord(*this,tag);\n");
-        cc.write("  return;\n");
-        cc.write("}\n");
-        
-        cc.write("void "+getCppFQName()+"::deserialize(::hadoop::IArchive& a_, const char* tag) {\n");
-        cc.write("  a_.startRecord(*this,tag);\n");
-        fIdx = 0;
-        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
-            JField jf = (JField) i.next();
-            String name = jf.getName();
-            if (jf.getType() instanceof JBuffer) {
-                cc.write("  { size_t len=0; a_.deserialize("+name+",len,\""+jf.getTag()+"\");}\n");
-            } else {
-                cc.write("  a_.deserialize("+name+",\""+jf.getTag()+"\");\n");
-            }
-        }
-        cc.write("  a_.endRecord(*this,tag);\n");
-        cc.write("  return;\n");
-        cc.write("}\n");
-        
-        
-        cc.write("bool "+getCppFQName()+"::operator< (const "+getCppFQName()+"& peer_) const {\n");
-        cc.write("  return (1\n");
-        for (Iterator i = mFields.iterator(); i.hasNext();) {
-            JField jf = (JField) i.next();
-            String name = jf.getName();
-            cc.write("    && ("+name+" < peer_."+name+")\n");
-        }
-        cc.write("  );\n");
-        cc.write("}\n");
-        
-        cc.write("bool "+getCppFQName()+"::operator== (const "+getCppFQName()+"& peer_) const {\n");
-        cc.write("  return (1\n");
-        for (Iterator i = mFields.iterator(); i.hasNext();) {
-            JField jf = (JField) i.next();
-            String name = jf.getName();
-            cc.write("    && ("+name+" == peer_."+name+")\n");
-        }
-        cc.write("  );\n");
-        cc.write("}\n");
-        
-        cc.write("const ::std::string&"+getCppFQName()+"::type() const {\n");
-        cc.write("  static const ::std::string type_(\""+mName+"\");\n");
-        cc.write("  return type_;\n");
-        cc.write("}\n");
-        
-        cc.write("const ::std::string&"+getCppFQName()+"::signature() const {\n");
-        cc.write("  static const ::std::string sig_(\""+getSignature()+"\");\n");
-        cc.write("  return sig_;\n");
-        cc.write("}\n");
-        
-    }
-    
-    public void genJavaCode(String destDir) throws IOException {
-        String pkg = getJavaPackage();
-        String pkgpath = pkg.replaceAll("\\.", "/");
-        File pkgdir = new File(destDir, pkgpath);
-        if (!pkgdir.exists()) {
-            // create the pkg directory
-            boolean ret = pkgdir.mkdirs();
-            if (!ret) {
-                throw new IOException("Cannnot create directory: "+pkgpath);
-            }
-        } else if (!pkgdir.isDirectory()) {
-            // not a directory
-            throw new IOException(pkgpath+" is not a directory.");
-        }
-        File jfile = new File(pkgdir, getName()+".java");
-        FileWriter jj = new FileWriter(jfile);
-        jj.write("// File generated by hadoop record compiler. Do not edit.\n");
-        jj.write("package "+getJavaPackage()+";\n\n");
-        jj.write("import java.io.IOException;\n");
-        jj.write("import org.apache.commons.logging.Log;\n");
-        jj.write("import org.apache.commons.logging.LogFactory;\n");
-        jj.write("import org.apache.hadoop.io.WritableComparator;\n");
-        jj.write("import org.apache.hadoop.io.WritableComparable;\n");
-        jj.write("import org.apache.hadoop.io.WritableUtils;\n");
-        jj.write("import org.apache.hadoop.io.BytesWritable;\n");
-        jj.write("import org.apache.hadoop.io.Text;\n\n");
-        jj.write("public class "+getName()+" implements org.apache.hadoop.record.Record, WritableComparable {\n");
-        jj.write("  private static final Log LOG= LogFactory.getLog(\""+
-            this.getJavaFQName()+"\");\n");
-        for (Iterator i = mFields.iterator(); i.hasNext();) {
-            JField jf = (JField) i.next();
-            jj.write(jf.genJavaDecl());
-        }
-        jj.write("  public "+getName()+"() { }\n");
-        
-        
-        jj.write("  public "+getName()+"(\n");
-        int fIdx = 0;
-        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
-            JField jf = (JField) i.next();
-            jj.write(jf.genJavaConstructorParam(fIdx));
-            jj.write((!i.hasNext())?"":",\n");
-        }
-        jj.write(") {\n");
-        fIdx = 0;
-        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
-            JField jf = (JField) i.next();
-            jj.write(jf.genJavaConstructorSet(fIdx));
-        }
-        jj.write("  }\n");
-        fIdx = 0;
-        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
-            JField jf = (JField) i.next();
-            jj.write(jf.genJavaGetSet(fIdx));
-        }
-        jj.write("  public void serialize(org.apache.hadoop.record.OutputArchive a_, String tag) throws java.io.IOException {\n");
-        jj.write("    a_.startRecord(this,tag);\n");
-        for (Iterator i = mFields.iterator(); i.hasNext();) {
-            JField jf = (JField) i.next();
-            jj.write(jf.genJavaWriteMethodName());
-        }
-        jj.write("    a_.endRecord(this,tag);\n");
-        jj.write("  }\n");
-        
-        jj.write("  public void deserialize(org.apache.hadoop.record.InputArchive a_, String tag) throws java.io.IOException {\n");
-        jj.write("    a_.startRecord(tag);\n");
-        for (Iterator i = mFields.iterator(); i.hasNext();) {
-            JField jf = (JField) i.next();
-            jj.write(jf.genJavaReadMethodName());
-        }
-        jj.write("    a_.endRecord(tag);\n");
-        jj.write("}\n");
-        
-        jj.write("  public String toString() {\n");
-        jj.write("    try {\n");
-        jj.write("      java.io.ByteArrayOutputStream s =\n");
-        jj.write("        new java.io.ByteArrayOutputStream();\n");
-        jj.write("      org.apache.hadoop.record.CsvOutputArchive a_ = \n");
-        jj.write("        new org.apache.hadoop.record.CsvOutputArchive(s);\n");
-        jj.write("      a_.startRecord(this,\"\");\n");
-        fIdx = 0;
-        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
-            JField jf = (JField) i.next();
-            jj.write(jf.genJavaWriteMethodName());
-        }
-        jj.write("      a_.endRecord(this,\"\");\n");
-        jj.write("      return new String(s.toByteArray(), \"UTF-8\");\n");
-        jj.write("    } catch (Throwable ex) {\n");
-        jj.write("      throw new RuntimeException(ex);\n");
-        jj.write("    }\n");
-        jj.write("  }\n");
-        
-        jj.write("  public void write(java.io.DataOutput out) throws java.io.IOException {\n");
-        jj.write("    org.apache.hadoop.record.BinaryOutputArchive archive = new org.apache.hadoop.record.BinaryOutputArchive(out);\n");
-        jj.write("    serialize(archive, \"\");\n");
-        jj.write("  }\n");
-        
-        jj.write("  public void readFields(java.io.DataInput in) throws java.io.IOException {\n");
-        jj.write("    org.apache.hadoop.record.BinaryInputArchive archive = new org.apache.hadoop.record.BinaryInputArchive(in);\n");
-        jj.write("    deserialize(archive, \"\");\n");
-        jj.write("  }\n");
-        
-        jj.write("  public int compareTo (Object peer_) throws ClassCastException {\n");
-        jj.write("    if (!(peer_ instanceof "+getName()+")) {\n");
-        jj.write("      throw new ClassCastException(\"Comparing different types of records.\");\n");
-        jj.write("    }\n");
-        jj.write("    "+getName()+" peer = ("+getName()+") peer_;\n");
-        jj.write("    int ret = 0;\n");
-        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
-            JField jf = (JField) i.next();
-            jj.write(jf.genJavaCompareTo());
-            jj.write("    if (ret != 0) return ret;\n");
-        }
-        jj.write("     return ret;\n");
-        jj.write("  }\n");
-        
-        jj.write("  public boolean equals(Object peer_) {\n");
-        jj.write("    if (!(peer_ instanceof "+getName()+")) {\n");
-        jj.write("      return false;\n");
-        jj.write("    }\n");
-        jj.write("    if (peer_ == this) {\n");
-        jj.write("      return true;\n");
-        jj.write("    }\n");
-        jj.write("    "+getName()+" peer = ("+getName()+") peer_;\n");
-        jj.write("    boolean ret = false;\n");
-        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
-            JField jf = (JField) i.next();
-            jj.write(jf.genJavaEquals());
-            jj.write("    if (!ret) return ret;\n");
-        }
-        jj.write("     return ret;\n");
-        jj.write("  }\n");
-        
-        jj.write("  public int hashCode() {\n");
-        jj.write("    int result = 17;\n");
-        jj.write("    int ret;\n");
-        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
-            JField jf = (JField) i.next();
-            jj.write(jf.genJavaHashCode());
-            jj.write("    result = 37*result + ret;\n");
-        }
-        jj.write("    return result;\n");
-        jj.write("  }\n");
-        jj.write("  public static String signature() {\n");
-        jj.write("    return \""+getSignature()+"\";\n");
-        jj.write("  }\n");
-        
-        jj.write("  public static class Comparator extends WritableComparator {\n");
-        jj.write("    public Comparator() {\n");
-        jj.write("      super("+getName()+".class);\n");
-        jj.write("    }\n");
-
-        jj.write("    static public int slurpRaw(byte[] b, int s, int l) {\n");
-        jj.write("      try {\n");
-        jj.write("        int os = s;\n");
-        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
-            JField jf = (JField) i.next();
-            jj.write(jf.genJavaSlurpBytes("b","s","l"));
-        }
-        jj.write("        return (os - s);\n");
-        jj.write("      } catch(IOException e) {\n");
-        jj.write("        LOG.warn(e);\n");
-        jj.write("        throw new RuntimeException(e);\n");
-        jj.write("      }\n");
-        jj.write("    }\n");
-        
-        jj.write("    static public int compareRaw(byte[] b1, int s1, int l1,\n");
-        jj.write("                       byte[] b2, int s2, int l2) {\n");
-        jj.write("      try {\n");
-        jj.write("        int os1 = s1;\n");
-        for (Iterator i = mFields.iterator(); i.hasNext(); fIdx++) {
-            JField jf = (JField) i.next();
-            jj.write(jf.genJavaCompareBytes());
-        }
-        jj.write("        return (os1 - s1);\n");
-        jj.write("      } catch(IOException e) {\n");
-        jj.write("        LOG.warn(e);\n");
-        jj.write("        throw new RuntimeException(e);\n");
-        jj.write("      }\n");
-        jj.write("    }\n");
-        jj.write("    public int compare(byte[] b1, int s1, int l1,\n");
-        jj.write("                       byte[] b2, int s2, int l2) {\n");
-        jj.write("      int ret = compareRaw(b1,s1,l1,b2,s2,l2);\n");
-        jj.write("      return (ret == -1)? -1 : ((ret==0)? 1 : 0);");
-        jj.write("    }\n");
-        jj.write("  }\n\n");
-        jj.write("  static {\n");
-        jj.write("    WritableComparator.define("+getName()+".class, new Comparator());\n");
-        jj.write("  }\n");
+      cb.append("if (r1 <= 0) { return r1; }\n");
+      cb.append("s1+=r1; s2+=r1; l1-=r1; l2-=r1;\n");
+      cb.append("}\n");
+    }
+    
+    void genCode(String destDir, ArrayList<String> options) throws IOException {
+      String pkg = module;
+      String pkgpath = pkg.replaceAll("\\.", "/");
+      File pkgdir = new File(destDir, pkgpath);
+      if (!pkgdir.exists()) {
+        // create the pkg directory
+        boolean ret = pkgdir.mkdirs();
+        if (!ret) {
+          throw new IOException("Cannnot create directory: "+pkgpath);
+        }
+      } else if (!pkgdir.isDirectory()) {
+        // not a directory
+        throw new IOException(pkgpath+" is not a directory.");
+      }
+      File jfile = new File(pkgdir, name+".java");
+      FileWriter jj = new FileWriter(jfile);
+      
+      CodeBuffer cb = new CodeBuffer();
+      cb.append("// File generated by hadoop record compiler. Do not edit.\n");
+      cb.append("package "+module+";\n\n");
+      cb.append("public class "+name+
+          " implements org.apache.hadoop.record.Record");
+      cb.append(", org.apache.hadoop.io.WritableComparable");
+      cb.append(" {\n");
+      
+      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
+        JField<JavaType> jf = i.next();
+        String name = jf.getName();
+        JavaType type = jf.getType();
+        type.genDecl(cb, name);
+      }
+      cb.append("public "+name+"() { }\n");
+      
+      
+      cb.append("public "+name+"(\n");
+      int fIdx = 0;
+      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext(); fIdx++) {
+        JField<JavaType> jf = i.next();
+        String name = jf.getName();
+        JavaType type = jf.getType();
+        type.genConstructorParam(cb, name);
+        cb.append((!i.hasNext())?"":",\n");
+      }
+      cb.append(") {\n");
+      fIdx = 0;
+      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext(); fIdx++) {
+        JField<JavaType> jf = i.next();
+        String name = jf.getName();
+        JavaType type = jf.getType();
+        type.genConstructorSet(cb, name);
+      }
+      cb.append("}\n");
+      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
+        JField<JavaType> jf = i.next();
+        String name = jf.getName();
+        JavaType type = jf.getType();
+        type.genGetSet(cb, name);
+      }
+      cb.append("public void serialize("+
+          "final org.apache.hadoop.record.OutputArchive a_, final String tag)\n"+
+          "throws java.io.IOException {\n");
+      cb.append("a_.startRecord(this,tag);\n");
+      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
+        JField<JavaType> jf = i.next();
+        String name = jf.getName();
+        JavaType type = jf.getType();
+        type.genWriteMethod(cb, name, name);
+      }
+      cb.append("a_.endRecord(this,tag);\n");
+      cb.append("}\n");
+      
+      cb.append("public void deserialize("+
+          "final org.apache.hadoop.record.InputArchive a_, final String tag)\n"+
+          "throws java.io.IOException {\n");
+      cb.append("a_.startRecord(tag);\n");
+      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
+        JField<JavaType> jf = i.next();
+        String name = jf.getName();
+        JavaType type = jf.getType();
+        type.genReadMethod(cb, name, name, false);
+      }
+      cb.append("a_.endRecord(tag);\n");
+      cb.append("}\n");
+      
+      cb.append("public String toString() {\n");
+      cb.append("try {\n");
+      cb.append("java.io.ByteArrayOutputStream s =\n");
+      cb.append("  new java.io.ByteArrayOutputStream();\n");
+      cb.append("org.apache.hadoop.record.CsvOutputArchive a_ = \n");
+      cb.append("  new org.apache.hadoop.record.CsvOutputArchive(s);\n");
+      cb.append("this.serialize(a_,\"\");\n");
+      cb.append("return new String(s.toByteArray(), \"UTF-8\");\n");
+      cb.append("} catch (Throwable ex) {\n");
+      cb.append("throw new RuntimeException(ex);\n");
+      cb.append("}\n");
+      cb.append("}\n");
+      
+      cb.append("public void write(final java.io.DataOutput out)\n"+
+          "throws java.io.IOException {\n");
+      cb.append("org.apache.hadoop.record.BinaryOutputArchive archive =\n"+
+          "new org.apache.hadoop.record.BinaryOutputArchive(out);\n");
+      cb.append("this.serialize(archive, \"\");\n");
+      cb.append("}\n");
+      
+      cb.append("public void readFields(final java.io.DataInput in)\n"+
+          "throws java.io.IOException {\n");
+      cb.append("org.apache.hadoop.record.BinaryInputArchive archive =\n"+
+          "new org.apache.hadoop.record.BinaryInputArchive(in);\n");
+      cb.append("this.deserialize(archive, \"\");\n");
+      cb.append("}\n");
+      cb.append("public int compareTo (final Object peer_) throws ClassCastException {\n");
+      cb.append("if (!(peer_ instanceof "+name+")) {\n");
+      cb.append("throw new ClassCastException(\"Comparing different types of records.\");\n");
+      cb.append("}\n");
+      cb.append(name+" peer = ("+name+") peer_;\n");
+      cb.append("int ret = 0;\n");
+      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
+        JField<JavaType> jf = i.next();
+        String name = jf.getName();
+        JavaType type = jf.getType();
+        type.genCompareTo(cb, name, "peer."+name);
+        cb.append("if (ret != 0) return ret;\n");
+      }
+      cb.append("return ret;\n");
+      cb.append("}\n");
+      
+      cb.append("public boolean equals(final Object peer_) {\n");
+      cb.append("if (!(peer_ instanceof "+name+")) {\n");
+      cb.append("return false;\n");
+      cb.append("}\n");
+      cb.append("if (peer_ == this) {\n");
+      cb.append("return true;\n");
+      cb.append("}\n");
+      cb.append(name+" peer = ("+name+") peer_;\n");
+      cb.append("boolean ret = false;\n");
+      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
+        JField<JavaType> jf = i.next();
+        String name = jf.getName();
+        JavaType type = jf.getType();
+        type.genEquals(cb, name, "peer."+name);
+        cb.append("if (!ret) return ret;\n");
+      }
+      cb.append("return ret;\n");
+      cb.append("}\n");
+      
+      cb.append("public Object clone() throws CloneNotSupportedException {\n");
+      cb.append(name+" other = new "+name+"();\n");
+      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
+        JField<JavaType> jf = i.next();
+        String name = jf.getName();
+        JavaType type = jf.getType();
+        type.genClone(cb, name);
+      }
+      cb.append("return other;\n");
+      cb.append("}\n");
+      
+      cb.append("public int hashCode() {\n");
+      cb.append("int result = 17;\n");
+      cb.append("int ret;\n");
+      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
+        JField<JavaType> jf = i.next();
+        String name = jf.getName();
+        JavaType type = jf.getType();
+        type.genHashCode(cb, name);
+        cb.append("result = 37*result + ret;\n");
+      }
+      cb.append("return result;\n");
+      cb.append("}\n");
+      
+      cb.append("public static String signature() {\n");
+      cb.append("return \""+getSignature()+"\";\n");
+      cb.append("}\n");
+      
+      cb.append("public static class Comparator extends"+
+          " org.apache.hadoop.io.WritableComparator {\n");
+      cb.append("public Comparator() {\n");
+      cb.append("super("+name+".class);\n");
+      cb.append("}\n");
+      
+      cb.append("static public int slurpRaw(byte[] b, int s, int l) {\n");
+      cb.append("try {\n");
+      cb.append("int os = s;\n");
+      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
+        JField<JavaType> jf = i.next();
+        String name = jf.getName();
+        JavaType type = jf.getType();
+        type.genSlurpBytes(cb, "b","s","l");
+      }
+      cb.append("return (os - s);\n");
+      cb.append("} catch(java.io.IOException e) {\n");
+      cb.append("throw new RuntimeException(e);\n");
+      cb.append("}\n");
+      cb.append("}\n");
+      
+      cb.append("static public int compareRaw(byte[] b1, int s1, int l1,\n");
+      cb.append("                             byte[] b2, int s2, int l2) {\n");
+      cb.append("try {\n");
+      cb.append("int os1 = s1;\n");
+      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
+        JField<JavaType> jf = i.next();
+        String name = jf.getName();
+        JavaType type = jf.getType();
+        type.genCompareBytes(cb);
+      }
+      cb.append("return (os1 - s1);\n");
+      cb.append("} catch(java.io.IOException e) {\n");
+      cb.append("throw new RuntimeException(e);\n");
+      cb.append("}\n");
+      cb.append("}\n");
+      cb.append("public int compare(byte[] b1, int s1, int l1,\n");
+      cb.append("                   byte[] b2, int s2, int l2) {\n");
+      cb.append("int ret = compareRaw(b1,s1,l1,b2,s2,l2);\n");
+      cb.append("return (ret == -1)? -1 : ((ret==0)? 1 : 0);");
+      cb.append("}\n");
+      cb.append("}\n\n");
+      cb.append("static {\n");
+      cb.append("org.apache.hadoop.io.WritableComparator.define("
+          +name+".class, new Comparator());\n");
+      cb.append("}\n");
+      cb.append("}\n");
 
-        
-        jj.write("}\n");
-        
-        jj.close();
+      jj.write(cb.toString());
+      jj.close();
     }
+  }
+  
+  class CppRecord extends CppCompType {
+    
+    private String fullName;
+    private String name;
+    private String module;
+    private ArrayList<JField<CppType>> fields = 
+        new ArrayList<JField<CppType>>();
+    
+    CppRecord(String name, ArrayList<JField<JType>> flist) {
+      super(name.replaceAll("\\.","::"));
+      this.fullName = name.replaceAll("\\.", "::");
+      int idx = name.lastIndexOf('.');
+      this.name = name.substring(idx+1);
+      this.module = name.substring(0, idx).replaceAll("\\.", "::");
+      for (Iterator<JField<JType>> iter = flist.iterator(); iter.hasNext();) {
+        JField<JType> f = iter.next();
+        fields.add(new JField<CppType>(f.getName(), f.getType().getCppType()));
+      }
+    }
+    
+    String genDecl(String fname) {
+      return "  "+name+" "+fname+";\n";
+    }
+    
+    void genCode(FileWriter hh, FileWriter cc, ArrayList<String> options)
+    throws IOException {
+      CodeBuffer hb = new CodeBuffer();
+      
+      String[] ns = module.split("::");
+      for (int i = 0; i < ns.length; i++) {
+        hb.append("namespace "+ns[i]+" {\n");
+      }
+      
+      hb.append("class "+name+" : public ::hadoop::Record {\n");
+      hb.append("private:\n");
+      
+      for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
+        JField<CppType> jf = i.next();
+        String name = jf.getName();
+        CppType type = jf.getType();
+        type.genDecl(hb, name);
+      }
+      hb.append("public:\n");
+      hb.append("virtual void serialize(::hadoop::OArchive& a_, const char* tag) const;\n");
+      hb.append("virtual void deserialize(::hadoop::IArchive& a_, const char* tag);\n");
+      hb.append("virtual const ::std::string& type() const;\n");
+      hb.append("virtual const ::std::string& signature() const;\n");
+      hb.append("virtual bool operator<(const "+name+"& peer_) const;\n");
+      hb.append("virtual bool operator==(const "+name+"& peer_) const;\n");
+      hb.append("virtual ~"+name+"() {};\n");
+      for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
+        JField<CppType> jf = i.next();
+        String name = jf.getName();
+        CppType type = jf.getType();
+        type.genGetSet(hb, name);
+      }
+      hb.append("}; // end record "+name+"\n");
+      for (int i=ns.length-1; i>=0; i--) {
+        hb.append("} // end namespace "+ns[i]+"\n");
+      }
+      
+      hh.write(hb.toString());
+      
+      CodeBuffer cb = new CodeBuffer();
+      
+      cb.append("void "+fullName+"::serialize(::hadoop::OArchive& a_, const char* tag) const {\n");
+      cb.append("a_.startRecord(*this,tag);\n");
+      for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
+        JField<CppType> jf = i.next();
+        String name = jf.getName();
+        CppType type = jf.getType();
+        if (type instanceof JBuffer.CppBuffer) {
+          cb.append("a_.serialize("+name+","+name+".length(),\""+name+"\");\n");
+        } else {
+          cb.append("a_.serialize("+name+",\""+name+"\");\n");
+        }
+      }
+      cb.append("a_.endRecord(*this,tag);\n");
+      cb.append("return;\n");
+      cb.append("}\n");
+      
+      cb.append("void "+fullName+"::deserialize(::hadoop::IArchive& a_, const char* tag) {\n");
+      cb.append("a_.startRecord(*this,tag);\n");
+      for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
+        JField<CppType> jf = i.next();
+        String name = jf.getName();
+        CppType type = jf.getType();
+        if (type instanceof JBuffer.CppBuffer) {
+          cb.append("{\nsize_t len=0; a_.deserialize("+name+",len,\""+name+"\");\n}\n");
+        } else {
+          cb.append("a_.deserialize("+name+",\""+name+"\");\n");
+        }
+      }
+      cb.append("a_.endRecord(*this,tag);\n");
+      cb.append("return;\n");
+      cb.append("}\n");
+      
+      
+      cb.append("bool "+fullName+"::operator< (const "+fullName+"& peer_) const {\n");
+      cb.append("return (1\n");
+      for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
+        JField<CppType> jf = i.next();
+        String name = jf.getName();
+        cb.append("&& ("+name+" < peer_."+name+")\n");
+      }
+      cb.append(");\n");
+      cb.append("}\n");
+      
+      cb.append("bool "+fullName+"::operator== (const "+fullName+"& peer_) const {\n");
+      cb.append("return (1\n");
+      for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
+        JField<CppType> jf = i.next();
+        String name = jf.getName();
+        cb.append("&& ("+name+" == peer_."+name+")\n");
+      }
+      cb.append(");\n");
+      cb.append("}\n");
+      
+      cb.append("const ::std::string&"+fullName+"::type() const {\n");
+      cb.append("static const ::std::string type_(\""+name+"\");\n");
+      cb.append("return type_;\n");
+      cb.append("}\n");
+      
+      cb.append("const ::std::string&"+fullName+"::signature() const {\n");
+      cb.append("static const ::std::string sig_(\""+getSignature()+"\");\n");
+      cb.append("return sig_;\n");
+      cb.append("}\n");
+      
+      cc.write(cb.toString());
+    }
+  }
+  
+  class CRecord extends CCompType {
+    
+  }
+  
+  private String signature;
+  
+  /**
+   * Creates a new instance of JRecord
+   */
+  public JRecord(String name, ArrayList<JField<JType>> flist) {
+    setJavaType(new JavaRecord(name, flist));
+    setCppType(new CppRecord(name, flist));
+    setCType(new CRecord());
+    // precompute signature
+    int idx = name.lastIndexOf('.');
+    String recName = name.substring(idx+1);
+    StringBuffer sb = new StringBuffer();
+    sb.append("L").append(recName).append("(");
+    for (Iterator<JField<JType>> i = flist.iterator(); i.hasNext();) {
+      String s = i.next().getType().getSignature();
+      sb.append(s);
+    }
+    sb.append(")");
+    signature = sb.toString();
+  }
+  
+  String getSignature() {
+    return signature;
+  }
+  
+  void genCppCode(FileWriter hh, FileWriter cc, ArrayList<String> options)
+    throws IOException {
+    ((CppRecord)getCppType()).genCode(hh,cc, options);
+  }
+  
+  void genJavaCode(String destDir, ArrayList<String> options)
+  throws IOException {
+    ((JavaRecord)getJavaType()).genCode(destDir, options);
+  }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java?view=diff&rev=513122&r1=513121&r2=513122
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java Wed Feb 28 19:47:27 2007
@@ -18,55 +18,54 @@
 
 package org.apache.hadoop.record.compiler;
 
+import org.apache.hadoop.record.compiler.JCompType.CCompType;
+import org.apache.hadoop.record.compiler.JCompType.CppCompType;
+
 /**
  *
  * @author Milind Bhandarkar
  */
 public class JString extends JCompType {
     
-    /** Creates a new instance of JString */
-    public JString() {
-        super(" ::std::string", "Text", "String", "Text");
-    }
+  class JavaString extends JavaCompType {
     
-    public String getSignature() {
-        return "s";
+    JavaString() {
+      super("String", "String", "String");
     }
     
-    public String genJavaReadWrapper(String fname, String tag, boolean decl) {
-        String ret = "";
-        if (decl) {
-            ret = "    Text "+fname+";\n";
-        }
-        return ret + "        "+fname+"=a_.readString(\""+tag+"\");\n";
+    void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
+      cb.append("{\n");
+      cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n");
+      cb.append("int z = org.apache.hadoop.record.Utils.getVIntSize(i);\n");
+      cb.append(s+"+=(z+i); "+l+"-= (z+i);\n");
+      cb.append("}\n");
     }
     
-    public String genJavaWriteWrapper(String fname, String tag) {
-        return "        a_.writeString("+fname+",\""+tag+"\");\n";
+    void genCompareBytes(CodeBuffer cb) {
+      cb.append("{\n");
+      cb.append("int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n");
+      cb.append("int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);\n");
+      cb.append("int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);\n");
+      cb.append("int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);\n");
+      cb.append("s1+=z1; s2+=z2; l1-=z1; l2-=z2;\n");
+      cb.append("int r1 = org.apache.hadoop.record.Utils.compareBytes(b1,s1,l1,b2,s2,l2);\n");
+      cb.append("if (r1 != 0) { return (r1<0)?-1:0; }\n");
+      cb.append("s1+=i1; s2+=i2; l1-=i1; l1-=i2;\n");
+      cb.append("}\n");
     }
     
-    public String genJavaSlurpBytes(String b, String s, String l) {
-      StringBuffer sb = new StringBuffer();
-      sb.append("        {\n");
-      sb.append("           int i = WritableComparator.readVInt("+b+", "+s+");\n");
-      sb.append("           int z = WritableUtils.getVIntSize(i);\n");
-      sb.append("           "+s+"+=(z+i); "+l+"-= (z+i);\n");
-      sb.append("        }\n");
-      return sb.toString();
+    void genClone(CodeBuffer cb, String fname) {
+      cb.append("other."+fname+" = this."+fname+";\n");
+    }
+  }
+    /** Creates a new instance of JString */
+    public JString() {
+      setJavaType(new JavaString());
+      setCppType(new CppCompType(" ::std::string"));
+      setCType(new CCompType());
     }
     
-    public String genJavaCompareBytes() {
-      StringBuffer sb = new StringBuffer();
-      sb.append("        {\n");
-      sb.append("           int i1 = WritableComparator.readVInt(b1, s1);\n");
-      sb.append("           int i2 = WritableComparator.readVInt(b2, s2);\n");
-      sb.append("           int z1 = WritableUtils.getVIntSize(i1);\n");
-      sb.append("           int z2 = WritableUtils.getVIntSize(i2);\n");
-      sb.append("           s1+=z1; s2+=z2; l1-=z1; l2-=z2;\n");
-      sb.append("           int r1 = WritableComparator.compareBytes(b1,s1,l1,b2,s2,l2);\n");
-      sb.append("           if (r1 != 0) { return (r1<0)?-1:0; }\n");
-      sb.append("           s1+=i1; s2+=i2; l1-=i1; l1-=i2;\n");
-      sb.append("        }\n");
-      return sb.toString();
+    String getSignature() {
+        return "s";
     }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java?view=diff&rev=513122&r1=513121&r2=513122
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java Wed Feb 28 19:47:27 2007
@@ -20,129 +20,154 @@
 
 /**
  * Abstract Base class for all types supported by Hadoop Record I/O.
- * 
+ *
  * @author Milind Bhandarkar
  */
 abstract public class JType {
-    
-    private String mCppName;
-    private String mJavaName;
-    private String mMethodSuffix;
-    private String mWrapper;
-    private String mUnwrapMethod;
-    
-    /**
-     * Creates a new instance of JType
-     */
-    JType(String cppname, String javaname, String suffix, String wrapper, String unwrap) {
-        mCppName = cppname;
-        mJavaName = javaname;
-        mMethodSuffix = suffix;
-        mWrapper = wrapper;
-        mUnwrapMethod = unwrap;
-    }
-    
-    abstract String getSignature();
-    
-    String genCppDecl(String fname) {
-        return "  "+mCppName+" m"+fname+";\n"; 
+  
+  static String toCamelCase(String name) {
+    char firstChar = name.charAt(0);
+    if (Character.isLowerCase(firstChar)) {
+      return ""+Character.toUpperCase(firstChar) + name.substring(1);
+    }
+    return name;
+  }
+  
+  JavaType javaType;
+  CppType cppType;
+  CType cType;
+  
+  abstract class JavaType {
+    private String name;
+    private String methodSuffix;
+    private String wrapper;
+    
+    JavaType(String javaname,
+        String suffix,
+        String wrapper) {
+      this.name = javaname;
+      this.methodSuffix = suffix;
+      this.wrapper = wrapper;
+    }
+    
+    void genDecl(CodeBuffer cb, String fname) {
+      cb.append("private "+name+" "+fname+";\n");
+    }
+    
+    void genConstructorParam(CodeBuffer cb, String fname) {
+      cb.append("final "+name+" "+fname);
+    }
+    
+    void genGetSet(CodeBuffer cb, String fname) {
+      cb.append("public "+name+" get"+toCamelCase(fname)+"() {\n");
+      cb.append("return "+fname+";\n");
+      cb.append("}\n");
+      cb.append("public void set"+toCamelCase(fname)+"(final "+name+" "+fname+") {\n");
+      cb.append("this."+fname+"="+fname+";\n");
+      cb.append("}\n");
     }
     
-    String genJavaDecl (String fname) {
-        return "  private "+mJavaName+" m"+fname+";\n";
+    String getType() {
+      return name;
     }
     
-    String genJavaConstructorParam (int fIdx) {
-        return "        "+mJavaName+" m"+fIdx;
+    String getWrapperType() {
+      return wrapper;
     }
     
-    String genCppGetSet(String fname, int fIdx) {
-        String getFunc = "  virtual "+mCppName+" get"+fname+"() const {\n";
-        getFunc += "    return m"+fname+";\n";
-        getFunc += "  }\n";
-        String setFunc = "  virtual void set"+fname+"("+mCppName+" m_) {\n";
-        setFunc += "    m"+fname+"=m_;\n";
-        setFunc += "  }\n";
-        return getFunc+setFunc;
+    String getMethodSuffix() {
+      return methodSuffix;
     }
     
-    String genJavaGetSet(String fname, int fIdx) {
-        String getFunc = "  public "+mJavaName+" get"+fname+"() {\n";
-        getFunc += "    return m"+fname+";\n";
-        getFunc += "  }\n";
-        String setFunc = "  public void set"+fname+"("+mJavaName+" m_) {\n";
-        setFunc += "    m"+fname+"=m_;\n";
-        setFunc += "  }\n";
-        return getFunc+setFunc;
+    void genWriteMethod(CodeBuffer cb, String fname, String tag) {
+      cb.append("a_.write"+methodSuffix+"("+fname+",\""+tag+"\");\n");
     }
     
-    String getCppType() {
-        return mCppName;
+    void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) {
+      if (decl) {
+        cb.append(name+" "+fname+";\n");
+      }
+      cb.append(fname+"=a_.read"+methodSuffix+"(\""+tag+"\");\n");
     }
     
-    String getJavaType() {
-        return mJavaName;
-    }
-   
-    String getJavaWrapperType() {
-        return mWrapper;
+    void genCompareTo(CodeBuffer cb, String fname, String other) {
+      cb.append("ret = ("+fname+" == "+other+")? 0 :(("+fname+"<"+other+
+          ")?-1:1);\n");
     }
     
-    String getMethodSuffix() {
-        return mMethodSuffix;
-    }
+    abstract void genCompareBytes(CodeBuffer cb);
     
-    String genJavaWriteMethod(String fname, String tag) {
-        return "    a_.write"+mMethodSuffix+"("+fname+",\""+tag+"\");\n";
-    }
+    abstract void genSlurpBytes(CodeBuffer cb, String b, String s, String l);
     
-    String genJavaReadMethod(String fname, String tag) {
-        return "    "+fname+"=a_.read"+mMethodSuffix+"(\""+tag+"\");\n";
+    void genEquals(CodeBuffer cb, String fname, String peer) {
+      cb.append("ret = ("+fname+"=="+peer+");\n");
     }
     
-    String genJavaReadWrapper(String fname, String tag, boolean decl) {
-        String ret = "";
-        if (decl) {
-            ret = "    "+mWrapper+" "+fname+";\n";
-        }
-        return ret + "    "+fname+"=new "+mWrapper+"(a_.read"+mMethodSuffix+"(\""+tag+"\"));\n";
+    void genHashCode(CodeBuffer cb, String fname) {
+      cb.append("ret = (int)"+fname+";\n");
     }
     
-    String genJavaWriteWrapper(String fname, String tag) {
-        return "        a_.write"+mMethodSuffix+"("+fname+"."+mUnwrapMethod+"(),\""+tag+"\");\n";
+    void genConstructorSet(CodeBuffer cb, String fname) {
+      cb.append("this."+fname+" = "+fname+";\n");
     }
     
-    String genJavaCompareToWrapper(String fname, String other) {
-      StringBuffer sb = new StringBuffer();
-      sb.append("        {\n");
-      sb.append("          "+mJavaName+" ee1 = ("+fname+"."+mUnwrapMethod+"();\n");
-      sb.append("          "+mJavaName+" ee2 = ("+other+"."+mUnwrapMethod+"();\n");
-      sb.append("          ret = (ee1 == ee2)? 0 :((ee1<ee2)?-1:1);\n");
-      sb.append("        }\n");
-      return sb.toString();
+    void genClone(CodeBuffer cb, String fname) {
+      cb.append("other."+fname+" = this."+fname+";\n");
     }
+  }
+  
+  class CppType {
+    private String name;
     
-    String genJavaCompareTo(String fname, String other) {
-        return "    ret = ("+fname+" == "+other+")? 0 :(("+fname+"<"+other+")?-1:1);\n";
+    CppType(String cppname) {
+      name = cppname;
     }
     
-    String genJavaCompareBytes() {
-      return "        // throw new IOException(\"Not Implemented yet!\");\n";
+    void genDecl(CodeBuffer cb, String fname) {
+      cb.append(name+" "+fname+";\n");
     }
     
-    String genJavaSlurpBytes(String b, String s, String l) {
-      return "        // throw new IOException(\"Not Implemented yet!\");\n";
+    void genGetSet(CodeBuffer cb, String fname) {
+      cb.append("virtual "+name+" get"+toCamelCase(fname)+"() const {\n");
+      cb.append("return "+fname+";\n");
+      cb.append("}\n");
+      cb.append("virtual void set"+toCamelCase(fname)+"("+name+" m_) {\n");
+      cb.append(fname+"=m_;\n");
+      cb.append("}\n");
     }
     
-    String genJavaEquals(String fname, String peer) {
-        return "    ret = ("+fname+"=="+peer+");\n";
+    String getType() {
+      return name;
     }
+  }
+  
+  class CType {
     
-    String genJavaHashCode(String fname) {
-        return "    ret = (int)"+fname+";\n";
-    }
-
-    String genJavaConstructorSet(String fname, int fIdx) {
-        return "    m"+fname+"=m"+fIdx+";\n";
-    }
+  }
+  
+  abstract String getSignature();
+  
+  void setJavaType(JavaType jType) {
+    this.javaType = jType;
+  }
+  
+  JavaType getJavaType() {
+    return javaType;
+  }
+  
+  void setCppType(CppType cppType) {
+    this.cppType = cppType;
+  }
+  
+  CppType getCppType() {
+    return cppType;
+  }
+  
+  void setCType(CType cType) {
+    this.cType = cType;
+  }
+  
+  CType getCType() {
+    return cType;
+  }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JVector.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JVector.java?view=diff&rev=513122&r1=513121&r2=513122
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JVector.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JVector.java Wed Feb 28 19:47:27 2007
@@ -18,141 +18,136 @@
 
 package org.apache.hadoop.record.compiler;
 
+import org.apache.hadoop.record.compiler.JCompType.CCompType;
+import org.apache.hadoop.record.compiler.JCompType.CppCompType;
+
 /**
  *
  * @author Milind Bhandarkar
  */
 public class JVector extends JCompType {
+  
+  static private int level = 0;
+  
+  static private String getId(String id) { return id+getLevel(); }
+  
+  static private String getLevel() { return Integer.toString(level); }
+  
+  static private void incrLevel() { level++; }
+  
+  static private void decrLevel() { level--; }
+  
+  private JType type;
+  
+  class JavaVector extends JavaCompType {
+    
+    private JType.JavaType element;
+    
+    JavaVector(JType.JavaType t) {
+      super("java.util.ArrayList<"+t.getWrapperType()+">",
+          "Vector", "java.util.ArrayList<"+t.getWrapperType()+">");
+      element = t;
+    }
+    
+    void genCompareTo(CodeBuffer cb, String fname, String other) {
+      cb.append("{\n");
+      cb.append("int "+getId("len1")+" = "+fname+".size();\n");
+      cb.append("int "+getId("len2")+" = "+other+".size();\n");
+      cb.append("for(int "+getId("vidx")+" = 0; "+getId("vidx")+"<"+
+          getId("len1")+" && "+getId("vidx")+"<"+getId("len2")+"; "+
+          getId("vidx")+"++) {\n");
+      cb.append(element.getType()+" "+getId("e1")+
+          " = "+fname+
+          ".get("+getId("vidx")+");\n");
+      cb.append(element.getType()+" "+getId("e2")+
+          " = "+other+
+          ".get("+getId("vidx")+");\n");
+      element.genCompareTo(cb, getId("e1"), getId("e2"));
+      cb.append("if (ret != 0) { return ret; }\n");
+      cb.append("}\n");
+      cb.append("ret = ("+getId("len1")+" - "+getId("len2")+");\n");
+      cb.append("}\n");
+    }
     
-    static private int level = 0;
-    
-    static private String getId(String id) { return id+getLevel(); }
-    
-    static private String getLevel() { return Integer.toString(level); }
-    
-    static private void incrLevel() { level++; }
-    
-    static private void decrLevel() { level--; }
-    
-    private JType mElement;
-    
-    /** Creates a new instance of JVector */
-    public JVector(JType t) {
-        super(" ::std::vector<"+t.getCppType()+">", "java.util.ArrayList", "Vector", "java.util.ArrayList");
-        mElement = t;
+    void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) {
+      if (decl) {
+        cb.append(getType()+" "+fname+";\n");
+      }
+      cb.append("{\n");
+      incrLevel();
+      cb.append("org.apache.hadoop.record.Index "+getId("vidx")+" = a_.startVector(\""+tag+"\");\n");
+      cb.append(fname+"=new "+getType()+"();\n");
+      cb.append("for (; !"+getId("vidx")+".done(); "+getId("vidx")+".incr()) {\n");
+      element.genReadMethod(cb, getId("e"), getId("e"), true);
+      cb.append(fname+".add("+getId("e")+");\n");
+      cb.append("}\n");
+      cb.append("a_.endVector(\""+tag+"\");\n");
+      decrLevel();
+      cb.append("}\n");
     }
     
-    public String getSignature() {
-        return "[" + mElement.getSignature() + "]";
+    void genWriteMethod(CodeBuffer cb, String fname, String tag) {
+      cb.append("{\n");
+      incrLevel();
+      cb.append("a_.startVector("+fname+",\""+tag+"\");\n");
+      cb.append("int "+getId("len")+" = "+fname+".size();\n");
+      cb.append("for(int "+getId("vidx")+" = 0; "+getId("vidx")+"<"+getId("len")+"; "+getId("vidx")+"++) {\n");
+      cb.append(element.getType()+" "+getId("e")+" = "+fname+".get("+getId("vidx")+");\n");
+      element.genWriteMethod(cb, getId("e"), getId("e"));
+      cb.append("}\n");
+      cb.append("a_.endVector("+fname+",\""+tag+"\");\n");
+      cb.append("}\n");
+      decrLevel();
     }
     
-    public String genJavaCompareTo(String fname, String other) {
-      StringBuffer sb = new StringBuffer();
-      sb.append("    {\n");
-      sb.append("      int "+getId("len1")+" = "+fname+".size();\n");
-      sb.append("      int "+getId("len2")+" = "+other+".size();\n");
-      sb.append("      for(int "+getId("vidx")+" = 0; "+getId("vidx")+"<"+
-          getId("len1")+" && "+getId("vidx")+"<"+getId("len2")+"; "+
-          getId("vidx")+"++) {\n");
-      sb.append("        "+mElement.getJavaWrapperType()+" "+getId("e1")+
-          " = ("+mElement.getJavaWrapperType()+") "+fname+
-          ".get("+getId("vidx")+");\n");
-      sb.append("        "+mElement.getJavaWrapperType()+" "+getId("e2")+
-          " = ("+mElement.getJavaWrapperType()+") "+other+
-          ".get("+getId("vidx")+");\n");
-      sb.append(mElement.genJavaCompareToWrapper(getId("e1"), getId("e2")));
-      sb.append("         if (ret != 0) { return ret; }\n");
-      sb.append("      }\n");
-      sb.append("      ret = ("+getId("len1")+" - "+getId("len2")+");\n");
-      sb.append("    }\n");
-      return sb.toString();
-    }
-    
-    public String genJavaCompareToWrapper(String fname, String other) {
-      return genJavaCompareTo(fname, other);
-    }
-    
-    public String genJavaReadWrapper(String fname, String tag, boolean decl) {
-        StringBuffer ret = new StringBuffer("");
-        if (decl) {
-            ret.append("      java.util.ArrayList "+fname+";\n");
-        }
-        ret.append("    {\n");
-        incrLevel();
-        ret.append("      org.apache.hadoop.record.Index "+getId("vidx")+" = a_.startVector(\""+tag+"\");\n");
-        ret.append("      "+fname+"=new java.util.ArrayList();\n");
-        ret.append("      for (; !"+getId("vidx")+".done(); "+getId("vidx")+".incr()) {\n");
-        ret.append(mElement.genJavaReadWrapper(getId("e"), getId("e"), true));
-        ret.append("        "+fname+".add("+getId("e")+");\n");
-        ret.append("      }\n");
-        ret.append("    a_.endVector(\""+tag+"\");\n");
-        decrLevel();
-        ret.append("    }\n");
-        return ret.toString();
-    }
-    
-    public String genJavaReadMethod(String fname, String tag) {
-        return genJavaReadWrapper(fname, tag, false);
-    }
-    
-    public String genJavaWriteWrapper(String fname, String tag) {
-        StringBuffer ret = new StringBuffer("    {\n");
-        incrLevel();
-        ret.append("      a_.startVector("+fname+",\""+tag+"\");\n");
-        ret.append("      int "+getId("len")+" = "+fname+".size();\n");
-        ret.append("      for(int "+getId("vidx")+" = 0; "+getId("vidx")+"<"+getId("len")+"; "+getId("vidx")+"++) {\n");
-        ret.append("        "+mElement.getJavaWrapperType()+" "+getId("e")+" = ("+mElement.getJavaWrapperType()+") "+fname+".get("+getId("vidx")+");\n");
-        ret.append(mElement.genJavaWriteWrapper(getId("e"), getId("e")));
-        ret.append("      }\n");
-        ret.append("      a_.endVector("+fname+",\""+tag+"\");\n");
-        ret.append("    }\n");
-        decrLevel();
-        return ret.toString();
-    }
-    
-    public String genJavaWriteMethod(String fname, String tag) {
-        return genJavaWriteWrapper(fname, tag);
-    }
-    
-    public String genJavaSlurpBytes(String b, String s, String l) {
-      StringBuffer sb = new StringBuffer();
-      sb.append("        {\n");
+    void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
+      cb.append("{\n");
       incrLevel();
-      sb.append("           int "+getId("vi")+
-          " = WritableComparator.readVInt("+b+", "+s+");\n");
-      sb.append("           int "+getId("vz")+
-          " = WritableUtils.getVIntSize("+getId("vi")+");\n");
-      sb.append("           "+s+"+="+getId("vz")+"; "+l+"-="+getId("vz")+";\n");
-      sb.append("           for (int "+getId("vidx")+" = 0; "+getId("vidx")+
+      cb.append("int "+getId("vi")+
+          " = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n");
+      cb.append("int "+getId("vz")+
+          " = org.apache.hadoop.record.Utils.getVIntSize("+getId("vi")+");\n");
+      cb.append(s+"+="+getId("vz")+"; "+l+"-="+getId("vz")+";\n");
+      cb.append("for (int "+getId("vidx")+" = 0; "+getId("vidx")+
           " < "+getId("vi")+"; "+getId("vidx")+"++)");
-      sb.append(mElement.genJavaSlurpBytes(b,s,l));
+      element.genSlurpBytes(cb, b,s,l);
       decrLevel();
-      sb.append("        }\n");
-      return sb.toString();
+      cb.append("}\n");
     }
     
-    public String genJavaCompareBytes() {
-      StringBuffer sb = new StringBuffer();
-      sb.append("        {\n");
+    void genCompareBytes(CodeBuffer cb) {
+      cb.append("{\n");
       incrLevel();
-      sb.append("           int "+getId("vi1")+
-          " = WritableComparator.readVInt(b1, s1);\n");
-      sb.append("           int "+getId("vi2")+
-          " = WritableComparator.readVInt(b2, s2);\n");
-      sb.append("           int "+getId("vz1")+
-          " = WritableUtils.getVIntSize("+getId("vi1")+");\n");
-      sb.append("           int "+getId("vz2")+
-          " = WritableUtils.getVIntSize("+getId("vi2")+");\n");
-      sb.append("           s1+="+getId("vz1")+"; s2+="+getId("vz2")+
+      cb.append("int "+getId("vi1")+
+          " = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n");
+      cb.append("int "+getId("vi2")+
+          " = org.apache.hadoop.record.Utils.readVInt(b2, s2);\n");
+      cb.append("int "+getId("vz1")+
+          " = org.apache.hadoop.record.Utils.getVIntSize("+getId("vi1")+");\n");
+      cb.append("int "+getId("vz2")+
+          " = org.apache.hadoop.record.Utils.getVIntSize("+getId("vi2")+");\n");
+      cb.append("s1+="+getId("vz1")+"; s2+="+getId("vz2")+
           "; l1-="+getId("vz1")+"; l2-="+getId("vz2")+";\n");
-      sb.append("           for (int "+getId("vidx")+" = 0; "+getId("vidx")+
+      cb.append("for (int "+getId("vidx")+" = 0; "+getId("vidx")+
           " < "+getId("vi1")+" && "+getId("vidx")+" < "+getId("vi2")+
           "; "+getId("vidx")+"++)");
-      sb.append(mElement.genJavaCompareBytes());
-      sb.append("           if ("+getId("vi1")+" != "+getId("vi2")+
+      element.genCompareBytes(cb);
+      cb.append("if ("+getId("vi1")+" != "+getId("vi2")+
           ") { return ("+getId("vi1")+"<"+getId("vi2")+")?-1:0; }\n");
       decrLevel();
-      sb.append("        }\n");
-      return sb.toString();
+      cb.append("}\n");
     }
+  }
+  
+  /** Creates a new instance of JVector */
+  public JVector(JType t) {
+    type = t;
+    setJavaType(new JavaVector(t.getJavaType()));
+    setCppType(new CppCompType(" ::std::vector<"+t.getCppType().getType()+">"));
+    setCType(new CCompType());
+  }
+  
+  String getSignature() {
+    return "[" + type.getSignature() + "]";
+  }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JavaGenerator.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JavaGenerator.java?view=diff&rev=513122&r1=513121&r2=513122
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JavaGenerator.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JavaGenerator.java Wed Feb 28 19:47:27 2007
@@ -19,8 +19,6 @@
 package org.apache.hadoop.record.compiler;
 
 import java.util.ArrayList;
-import java.io.File;
-import java.io.FileWriter;
 import java.io.IOException;
 import java.util.Iterator;
 
@@ -29,34 +27,26 @@
  *
  * @author Milind Bhandarkar
  */
-class JavaGenerator {
-    private String mName;
-    private String destDir;
-    private ArrayList mInclFiles;
-    private ArrayList mRecList;
-    
-    /** Creates a new instance of JavaGenerator
-     *
-     * @param name possibly full pathname to the file
-     * @param incl included files (as JFile)
-     * @param records List of records defined within this file
-     * @param destDir output directory
-     */
-    JavaGenerator(String name, ArrayList incl, ArrayList records, String destDir) {
-        mName = name;
-        mInclFiles = incl;
-        mRecList = records;
-        this.destDir = destDir;
-    }
-    
-    /**
-     * Generate Java code for records. This method is only a front-end to
-     * JRecord, since one file is generated for each record.
-     */
-    void genCode() throws IOException {
-        for (Iterator i = mRecList.iterator(); i.hasNext(); ) {
-            JRecord rec = (JRecord) i.next();
-            rec.genJavaCode(destDir);
-        }
+class JavaGenerator extends CodeGenerator {
+  
+  JavaGenerator() {
+  }
+  
+  /**
+   * Generate Java code for records. This method is only a front-end to
+   * JRecord, since one file is generated for each record.
+   *
+   * @param name possibly full pathname to the file
+   * @param ilist included files (as JFile)
+   * @param rlist List of records defined within this file
+   * @param destDir output directory
+   */
+  void genCode(String name, ArrayList<JFile> ilist,
+      ArrayList<JRecord> rlist, String destDir, ArrayList<String> options)
+      throws IOException {
+    for (Iterator<JRecord> iter = rlist.iterator(); iter.hasNext(); ) {
+      JRecord rec = iter.next();
+      rec.genJavaCode(destDir, options);
     }
+  }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/ant/RccTask.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/ant/RccTask.java?view=diff&rev=513122&r1=513121&r2=513122
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/ant/RccTask.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/ant/RccTask.java Wed Feb 28 19:47:27 2007
@@ -16,7 +16,7 @@
 package org.apache.hadoop.record.compiler.ant;
 
 import java.io.File;
-import java.util.Vector;
+import java.util.ArrayList;
 import org.apache.hadoop.record.compiler.generated.Rcc;
 import org.apache.tools.ant.BuildException;
 import org.apache.tools.ant.DirectoryScanner;
@@ -52,7 +52,7 @@
   private String language = "java";
   private File src;
   private File dest = new File(".");
-  private final Vector<FileSet> filesets = new Vector<FileSet>();
+  private final ArrayList<FileSet> filesets = new ArrayList<FileSet>();
   private boolean failOnError = true;
   
   /** Creates a new instance of RccTask */
@@ -111,7 +111,7 @@
     }
     Project myProject = getProject();
     for (int i = 0; i < filesets.size(); i++) {
-      FileSet fs = filesets.elementAt(i);
+      FileSet fs = filesets.get(i);
       DirectoryScanner ds = fs.getDirectoryScanner(myProject);
       File dir = fs.getDir(myProject);
       String[] srcs = ds.getIncludedFiles();



Mime
View raw message