hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From d...@apache.org
Subject svn commit: r612739 [2/3] - in /lucene/hadoop/trunk: ./ src/c++/librecordio/ src/c++/librecordio/test/ src/java/org/apache/hadoop/record/compiler/ src/java/org/apache/hadoop/record/meta/ src/test/ddl/ src/test/org/apache/hadoop/record/
Date Thu, 17 Jan 2008 07:02:10 GMT
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JByte.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JByte.java?rev=612739&r1=612738&r2=612739&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JByte.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JByte.java Wed Jan 16 23:02:07 2008
@@ -18,9 +18,6 @@
 
 package org.apache.hadoop.record.compiler;
 
-import org.apache.hadoop.record.compiler.JType.CType;
-import org.apache.hadoop.record.compiler.JType.CppType;
-
 /**
  * Code generator for "byte" type.
  */
@@ -29,9 +26,13 @@
   class JavaByte extends JavaType {
     
     JavaByte() {
-      super("byte", "Byte", "Byte");
+      super("byte", "Byte", "Byte", "TypeID.RIOType.BYTE");
     }
     
+    String getTypeIDObjectString() {
+      return "org.apache.hadoop.record.meta.TypeID.ByteTypeID";
+    }
+
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
       cb.append("{\n");
       cb.append("if ("+l+"<1) {\n");
@@ -56,9 +57,20 @@
     }
   }
   
+  class CppByte extends CppType {
+    
+    CppByte() {
+      super("int8_t");
+    }
+    
+    String getTypeIDObjectString() {
+      return "new ::hadoop::TypeID(::hadoop::RIOTYPE_BYTE)";
+    }
+  }
+
   public JByte() {
     setJavaType(new JavaByte());
-    setCppType(new CppType("int8_t"));
+    setCppType(new CppByte());
     setCType(new CType());
   }
   

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JCompType.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JCompType.java?rev=612739&r1=612738&r2=612739&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JCompType.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JCompType.java Wed Jan 16 23:02:07 2008
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.record.compiler;
 
+
 /**
  * Abstract base class for all the "compound" types such as ustring,
  * buffer, vector, map, and record.
@@ -26,28 +27,30 @@
   
   abstract class JavaCompType extends JavaType {
     
-    JavaCompType(String type, String suffix, String wrapper) {
-      super(type, suffix, wrapper);
+    JavaCompType(String type, String suffix, String wrapper, 
+        String typeIDByteString) { 
+      super(type, suffix, wrapper, typeIDByteString);
     }
     
     void genCompareTo(CodeBuffer cb, String fname, String other) {
-      cb.append("ret = "+fname+".compareTo("+other+");\n");
+      cb.append(Consts.RIO_PREFIX + "ret = "+fname+".compareTo("+other+");\n");
     }
     
     void genEquals(CodeBuffer cb, String fname, String peer) {
-      cb.append("ret = "+fname+".equals("+peer+");\n");
+      cb.append(Consts.RIO_PREFIX + "ret = "+fname+".equals("+peer+");\n");
     }
     
     void genHashCode(CodeBuffer cb, String fname) {
-      cb.append("ret = "+fname+".hashCode();\n");
+      cb.append(Consts.RIO_PREFIX + "ret = "+fname+".hashCode();\n");
     }
     
     void genClone(CodeBuffer cb, String fname) {
-      cb.append("other."+fname+" = ("+getType()+") this."+fname+".clone();\n");
+      cb.append(Consts.RIO_PREFIX + "other."+fname+" = ("+getType()+") this."+
+          fname+".clone();\n");
     }
   }
   
-  class CppCompType extends CppType {
+  abstract class CppCompType extends CppType {
     
     CppCompType(String type) {
       super(type);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JDouble.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JDouble.java?rev=612739&r1=612738&r2=612739&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JDouble.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JDouble.java Wed Jan 16 23:02:07 2008
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.record.compiler;
 
-import org.apache.hadoop.record.compiler.JType.CType;
-import org.apache.hadoop.record.compiler.JType.CppType;
 
 /**
  */
@@ -28,12 +26,16 @@
   class JavaDouble extends JavaType {
     
     JavaDouble() {
-      super("double", "Double", "Double");
+      super("double", "Double", "Double", "TypeID.RIOType.DOUBLE");
     }
     
+    String getTypeIDObjectString() {
+      return "org.apache.hadoop.record.meta.TypeID.DoubleTypeID";
+    }
+
     void genHashCode(CodeBuffer cb, String fname) {
       String tmp = "Double.doubleToLongBits("+fname+")";
-      cb.append("ret = (int)("+tmp+"^("+tmp+">>>32));\n");
+      cb.append(Consts.RIO_PREFIX + "ret = (int)("+tmp+"^("+tmp+">>>32));\n");
     }
     
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
@@ -61,10 +63,23 @@
       cb.append("}\n");
     }
   }
+
+  class CppDouble extends CppType {
+    
+    CppDouble() {
+      super("double");
+    }
+    
+    String getTypeIDObjectString() {
+      return "new ::hadoop::TypeID(::hadoop::RIOTYPE_DOUBLE)";
+    }
+  }
+
+  
   /** Creates a new instance of JDouble */
   public JDouble() {
     setJavaType(new JavaDouble());
-    setCppType(new CppType("double"));
+    setCppType(new CppDouble());
     setCType(new CType());
   }
   

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java?rev=612739&r1=612738&r2=612739&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java Wed Jan 16 23:02:07 2008
@@ -18,9 +18,6 @@
 
 package org.apache.hadoop.record.compiler;
 
-import org.apache.hadoop.record.compiler.JType.CType;
-import org.apache.hadoop.record.compiler.JType.CppType;
-
 /**
  */
 public class JFloat extends JType {
@@ -28,11 +25,15 @@
   class JavaFloat extends JavaType {
     
     JavaFloat() {
-      super("float", "Float", "Float");
+      super("float", "Float", "Float", "TypeID.RIOType.FLOAT");
     }
     
+    String getTypeIDObjectString() {
+      return "org.apache.hadoop.record.meta.TypeID.FloatTypeID";
+    }
+
     void genHashCode(CodeBuffer cb, String fname) {
-      cb.append("ret = Float.floatToIntBits("+fname+");\n");
+      cb.append(Consts.RIO_PREFIX + "ret = Float.floatToIntBits("+fname+");\n");
     }
     
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
@@ -60,10 +61,22 @@
       cb.append("}\n");
     }
   }
+
+  class CppFloat extends CppType {
+    
+    CppFloat() {
+      super("float");
+    }
+    
+    String getTypeIDObjectString() {
+      return "new ::hadoop::TypeID(::hadoop::RIOTYPE_FLOAT)";
+    }
+  }
+
   /** Creates a new instance of JFloat */
   public JFloat() {
     setJavaType(new JavaFloat());
-    setCppType(new CppType("float"));
+    setCppType(new CppFloat());
     setCType(new CType());
   }
   

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JInt.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JInt.java?rev=612739&r1=612738&r2=612739&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JInt.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JInt.java Wed Jan 16 23:02:07 2008
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.record.compiler;
 
-import org.apache.hadoop.record.compiler.JType.CType;
-import org.apache.hadoop.record.compiler.JType.CppType;
 
 /**
  * Code generator for "int" type
@@ -29,9 +27,13 @@
   class JavaInt extends JavaType {
     
     JavaInt() {
-      super("int", "Int", "Integer");
+      super("int", "Int", "Integer", "TypeID.RIOType.INT");
     }
     
+    String getTypeIDObjectString() {
+      return "org.apache.hadoop.record.meta.TypeID.IntTypeID";
+    }
+
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
       cb.append("{\n");
       cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n");
@@ -53,10 +55,22 @@
       cb.append("}\n");
     }
   }
+
+  class CppInt extends CppType {
+    
+    CppInt() {
+      super("int32_t");
+    }
+    
+    String getTypeIDObjectString() {
+      return "new ::hadoop::TypeID(::hadoop::RIOTYPE_INT)";
+    }
+  }
+
   /** Creates a new instance of JInt */
   public JInt() {
     setJavaType(new JavaInt());
-    setCppType(new CppType("int32_t"));
+    setCppType(new CppInt());
     setCType(new CType());
   }
   

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JLong.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JLong.java?rev=612739&r1=612738&r2=612739&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JLong.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JLong.java Wed Jan 16 23:02:07 2008
@@ -18,9 +18,6 @@
 
 package org.apache.hadoop.record.compiler;
 
-import org.apache.hadoop.record.compiler.JType.CType;
-import org.apache.hadoop.record.compiler.JType.CppType;
-
 /**
  * Code generator for "long" type
  */
@@ -29,11 +26,16 @@
   class JavaLong extends JavaType {
     
     JavaLong() {
-      super("long", "Long", "Long");
+      super("long", "Long", "Long", "TypeID.RIOType.LONG");
     }
     
+    String getTypeIDObjectString() {
+      return "org.apache.hadoop.record.meta.TypeID.LongTypeID";
+    }
+
     void genHashCode(CodeBuffer cb, String fname) {
-      cb.append("ret = (int) ("+fname+"^("+fname+">>>32));\n");
+      cb.append(Consts.RIO_PREFIX + "ret = (int) ("+fname+"^("+
+          fname+">>>32));\n");
     }
     
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
@@ -57,10 +59,22 @@
       cb.append("}\n");
     }
   }
+
+  class CppLong extends CppType {
+    
+    CppLong() {
+      super("int64_t");
+    }
+    
+    String getTypeIDObjectString() {
+      return "new ::hadoop::TypeID(::hadoop::RIOTYPE_LONG)";
+    }
+  }
+
   /** Creates a new instance of JLong */
   public JLong() {
     setJavaType(new JavaLong());
-    setCppType(new CppType("int64_t"));
+    setCppType(new CppLong());
     setCType(new CType());
   }
   

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java?rev=612739&r1=612738&r2=612739&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java Wed Jan 16 23:02:07 2008
@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.record.compiler;
 
+import java.util.Map;
+
+
 /**
  */
 public class JMap extends JCompType {
@@ -43,31 +46,48 @@
     JavaMap(JType.JavaType key, JType.JavaType value) {
       super("java.util.TreeMap<"+key.getWrapperType()+","+value.getWrapperType()+">",
             "Map",
-            "java.util.TreeMap<"+key.getWrapperType()+","+value.getWrapperType()+">");
+            "java.util.TreeMap<"+key.getWrapperType()+","+value.getWrapperType()+">",
+            "TypeID.RIOType.MAP");
       this.key = key;
       this.value = value;
     }
     
+    String getTypeIDObjectString() {
+      return "new org.apache.hadoop.record.meta.MapTypeID(" + 
+        key.getTypeIDObjectString() + ", " + 
+        value.getTypeIDObjectString() + ")";
+    }
+
+    void genSetRTIFilter(CodeBuffer cb, Map<String, Integer> nestedStructMap) {
+      key.genSetRTIFilter(cb, nestedStructMap);
+      value.genSetRTIFilter(cb, nestedStructMap);
+    }
+
     void genCompareTo(CodeBuffer cb, String fname, String other) {
       String setType = "java.util.Set<"+key.getWrapperType()+"> ";
       String iterType = "java.util.Iterator<"+key.getWrapperType()+"> ";
       cb.append("{\n");
-      cb.append(setType+getId("set1")+" = "+fname+".keySet();\n");
-      cb.append(setType+getId("set2")+" = "+other+".keySet();\n");
-      cb.append(iterType+getId("miter1")+" = "+
-                getId("set1")+".iterator();\n");
-      cb.append(iterType+getId("miter2")+" = "+
-                getId("set2")+".iterator();\n");
-      cb.append("for(; "+getId("miter1")+".hasNext() && "+
-                getId("miter2")+".hasNext();) {\n");
-      cb.append(key.getType()+" "+getId("k1")+
-                " = "+getId("miter1")+".next();\n");
-      cb.append(key.getType()+" "+getId("k2")+
-                " = "+getId("miter2")+".next();\n");
-      key.genCompareTo(cb, getId("k1"), getId("k2"));
-      cb.append("if (ret != 0) { return ret; }\n");
+      cb.append(setType+getId(Consts.RIO_PREFIX + "set1")+" = "+
+          fname+".keySet();\n");
+      cb.append(setType+getId(Consts.RIO_PREFIX + "set2")+" = "+
+          other+".keySet();\n");
+      cb.append(iterType+getId(Consts.RIO_PREFIX + "miter1")+" = "+
+                getId(Consts.RIO_PREFIX + "set1")+".iterator();\n");
+      cb.append(iterType+getId(Consts.RIO_PREFIX + "miter2")+" = "+
+                getId(Consts.RIO_PREFIX + "set2")+".iterator();\n");
+      cb.append("for(; "+getId(Consts.RIO_PREFIX + "miter1")+".hasNext() && "+
+                getId(Consts.RIO_PREFIX + "miter2")+".hasNext();) {\n");
+      cb.append(key.getType()+" "+getId(Consts.RIO_PREFIX + "k1")+
+                " = "+getId(Consts.RIO_PREFIX + "miter1")+".next();\n");
+      cb.append(key.getType()+" "+getId(Consts.RIO_PREFIX + "k2")+
+                " = "+getId(Consts.RIO_PREFIX + "miter2")+".next();\n");
+      key.genCompareTo(cb, getId(Consts.RIO_PREFIX + "k1"), 
+          getId(Consts.RIO_PREFIX + "k2"));
+      cb.append("if (" + Consts.RIO_PREFIX + "ret != 0) { return " + 
+          Consts.RIO_PREFIX + "ret; }\n");
       cb.append("}\n");
-      cb.append("ret = ("+getId("set1")+".size() - "+getId("set2")+".size());\n");
+      cb.append(Consts.RIO_PREFIX + "ret = ("+getId(Consts.RIO_PREFIX + "set1")+
+          ".size() - "+getId(Consts.RIO_PREFIX + "set2")+".size());\n");
       cb.append("}\n");
     }
     
@@ -77,14 +97,20 @@
       }
       cb.append("{\n");
       incrLevel();
-      cb.append("org.apache.hadoop.record.Index "+getId("midx")+" = a.startMap(\""+tag+"\");\n");
+      cb.append("org.apache.hadoop.record.Index " + 
+          getId(Consts.RIO_PREFIX + "midx")+" = " + 
+          Consts.RECORD_INPUT + ".startMap(\""+tag+"\");\n");
       cb.append(fname+"=new "+getType()+"();\n");
-      cb.append("for (; !"+getId("midx")+".done(); "+getId("midx")+".incr()) {\n");
-      key.genReadMethod(cb, getId("k"),getId("k"), true);
-      value.genReadMethod(cb, getId("v"), getId("v"), true);
-      cb.append(fname+".put("+getId("k")+","+getId("v")+");\n");
+      cb.append("for (; !"+getId(Consts.RIO_PREFIX + "midx")+".done(); "+
+          getId(Consts.RIO_PREFIX + "midx")+".incr()) {\n");
+      key.genReadMethod(cb, getId(Consts.RIO_PREFIX + "k"),
+          getId(Consts.RIO_PREFIX + "k"), true);
+      value.genReadMethod(cb, getId(Consts.RIO_PREFIX + "v"), 
+          getId(Consts.RIO_PREFIX + "v"), true);
+      cb.append(fname+".put("+getId(Consts.RIO_PREFIX + "k")+","+
+          getId(Consts.RIO_PREFIX + "v")+");\n");
       cb.append("}\n");
-      cb.append("a.endMap(\""+tag+"\");\n");
+      cb.append(Consts.RECORD_INPUT + ".endMap(\""+tag+"\");\n");
       decrLevel();
       cb.append("}\n");
     }
@@ -98,16 +124,24 @@
         key.getWrapperType()+","+value.getWrapperType()+">> ";
       cb.append("{\n");
       incrLevel();
-      cb.append("a.startMap("+fname+",\""+tag+"\");\n");
-      cb.append(setType+getId("es")+" = "+fname+".entrySet();\n");
-      cb.append("for("+iterType+getId("midx")+" = "+getId("es")+".iterator(); "+getId("midx")+".hasNext();) {\n");
-      cb.append(entryType+getId("me")+" = "+getId("midx")+".next();\n");
-      cb.append(key.getType()+" "+getId("k")+" = "+getId("me")+".getKey();\n");
-      cb.append(value.getType()+" "+getId("v")+" = "+getId("me")+".getValue();\n");
-      key.genWriteMethod(cb, getId("k"), getId("k"));
-      value.genWriteMethod(cb, getId("v"), getId("v"));
+      cb.append(Consts.RECORD_OUTPUT + ".startMap("+fname+",\""+tag+"\");\n");
+      cb.append(setType+getId(Consts.RIO_PREFIX + "es")+" = "+
+          fname+".entrySet();\n");
+      cb.append("for("+iterType+getId(Consts.RIO_PREFIX + "midx")+" = "+
+          getId(Consts.RIO_PREFIX + "es")+".iterator(); "+
+          getId(Consts.RIO_PREFIX + "midx")+".hasNext();) {\n");
+      cb.append(entryType+getId(Consts.RIO_PREFIX + "me")+" = "+
+          getId(Consts.RIO_PREFIX + "midx")+".next();\n");
+      cb.append(key.getType()+" "+getId(Consts.RIO_PREFIX + "k")+" = "+
+          getId(Consts.RIO_PREFIX + "me")+".getKey();\n");
+      cb.append(value.getType()+" "+getId(Consts.RIO_PREFIX + "v")+" = "+
+          getId(Consts.RIO_PREFIX + "me")+".getValue();\n");
+      key.genWriteMethod(cb, getId(Consts.RIO_PREFIX + "k"), 
+          getId(Consts.RIO_PREFIX + "k"));
+      value.genWriteMethod(cb, getId(Consts.RIO_PREFIX + "v"), 
+          getId(Consts.RIO_PREFIX + "v"));
       cb.append("}\n");
-      cb.append("a.endMap("+fname+",\""+tag+"\");\n");
+      cb.append(Consts.RECORD_OUTPUT + ".endMap("+fname+",\""+tag+"\");\n");
       cb.append("}\n");
       decrLevel();
     }
@@ -156,11 +190,34 @@
     }
   }
   
+  class CppMap extends CppCompType {
+    
+    JType.CppType key;
+    JType.CppType value;
+    
+    CppMap(JType.CppType key, JType.CppType value) {
+      super("::std::map< "+key.getType()+", "+ value.getType()+" >");
+      this.key = key;
+      this.value = value;
+    }
+    
+    String getTypeIDObjectString() {
+      return "new ::hadoop::MapTypeID(" + 
+        key.getTypeIDObjectString() + ", " + 
+        value.getTypeIDObjectString() + ")";
+    }
+
+    void genSetRTIFilter(CodeBuffer cb) {
+      key.genSetRTIFilter(cb);
+      value.genSetRTIFilter(cb);
+    }
+
+  }
+  
   /** Creates a new instance of JMap */
   public JMap(JType t1, JType t2) {
     setJavaType(new JavaMap(t1.getJavaType(), t2.getJavaType()));
-    setCppType(new CppCompType(" ::std::map<"+t1.getCppType().getType()+","+
-                               t2.getCppType().getType()+">"));
+    setCppType(new CppMap(t1.getCppType(), t2.getCppType()));
     setCType(new CType());
     keyType = t1;
     valueType = t2;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java?rev=612739&r1=612738&r2=612739&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java Wed Jan 16 23:02:07 2008
@@ -21,8 +21,7 @@
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Iterator;
+import java.util.*;
 
 /**
  */
@@ -37,7 +36,7 @@
       new ArrayList<JField<JavaType>>();
     
     JavaRecord(String name, ArrayList<JField<JType>> flist) {
-      super(name, "Record", name);
+      super(name, "Record", name, "TypeID.RIOType.STRUCT");
       this.fullName = name;
       int idx = name.lastIndexOf('.');
       this.name = name.substring(idx+1);
@@ -48,16 +47,91 @@
       }
     }
     
+    String getTypeIDObjectString() {
+      return "new org.apache.hadoop.record.meta.StructTypeID(" + 
+      fullName + ".getTypeInfo())";
+    }
+
+    void genSetRTIFilter(CodeBuffer cb, Map<String, Integer> nestedStructMap) {
+      // ignore, if we'ev already set the type filter for this record
+      if (!nestedStructMap.containsKey(fullName)) {
+        // we set the RTI filter here
+        cb.append(fullName + ".setTypeFilter(rti.getNestedStructTypeInfo(\""+
+            name + "\"));\n");
+        nestedStructMap.put(fullName, null);
+      }
+    }
+
+    // for each typeInfo in the filter, we see if there's a similar one in the record. 
+    // Since we store typeInfos in ArrayLists, thsi search is O(n squared). We do it faster
+    // if we also store a map (of TypeInfo to index), but since setupRtiFields() is called
+    // only once when deserializing, we're sticking with the former, as the code is easier.  
+    void genSetupRtiFields(CodeBuffer cb) {
+      cb.append("private static void setupRtiFields()\n{\n");
+      cb.append("if (null == " + Consts.RTI_FILTER + ") return;\n");
+      cb.append("// we may already have done this\n");
+      cb.append("if (null != " + Consts.RTI_FILTER_FIELDS + ") return;\n");
+      cb.append("int " + Consts.RIO_PREFIX + "i, " + Consts.RIO_PREFIX + "j;\n");
+      cb.append(Consts.RTI_FILTER_FIELDS + " = new int [" + 
+          Consts.RIO_PREFIX + "rtiFilter.getFieldTypeInfos().size()];\n");
+      cb.append("for (" + Consts.RIO_PREFIX + "i=0; " + Consts.RIO_PREFIX + "i<"+
+          Consts.RTI_FILTER_FIELDS + ".length; " + Consts.RIO_PREFIX + "i++) {\n");
+      cb.append(Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX + "i] = 0;\n");
+      cb.append("}\n");
+      cb.append("java.util.Iterator<org.apache.hadoop.record.meta." +
+          "FieldTypeInfo> " + Consts.RIO_PREFIX + "itFilter = " + 
+          Consts.RIO_PREFIX + "rtiFilter.getFieldTypeInfos().iterator();\n");
+      cb.append(Consts.RIO_PREFIX + "i=0;\n");
+      cb.append("while (" + Consts.RIO_PREFIX + "itFilter.hasNext()) {\n");
+      cb.append("org.apache.hadoop.record.meta.FieldTypeInfo " + 
+          Consts.RIO_PREFIX + "tInfoFilter = " + 
+          Consts.RIO_PREFIX + "itFilter.next();\n");
+      cb.append("java.util.Iterator<org.apache.hadoop.record.meta." + 
+          "FieldTypeInfo> " + Consts.RIO_PREFIX + "it = " + Consts.RTI_VAR + 
+          ".getFieldTypeInfos().iterator();\n");
+      cb.append(Consts.RIO_PREFIX + "j=1;\n");
+      cb.append("while (" + Consts.RIO_PREFIX + "it.hasNext()) {\n");
+      cb.append("org.apache.hadoop.record.meta.FieldTypeInfo " + 
+          Consts.RIO_PREFIX + "tInfo = " + Consts.RIO_PREFIX + "it.next();\n");
+      cb.append("if (" + Consts.RIO_PREFIX + "tInfo.equals(" +  
+          Consts.RIO_PREFIX + "tInfoFilter)) {\n");
+      cb.append(Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX + "i] = " +
+          Consts.RIO_PREFIX + "j;\n");
+      cb.append("break;\n");
+      cb.append("}\n");
+      cb.append(Consts.RIO_PREFIX + "j++;\n");
+      cb.append("}\n");
+      /*int ct = 0;
+      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
+        ct++;
+        JField<JavaType> jf = i.next();
+        JavaType type = jf.getType();
+        String name = jf.getName();
+        if (ct != 1) {
+          cb.append("else ");
+        }
+        type.genRtiFieldCondition(cb, name, ct);
+      }
+      if (ct != 0) {
+        cb.append("else {\n");
+        cb.append("rtiFilterFields[i] = 0;\n");
+        cb.append("}\n");
+      }*/
+      cb.append(Consts.RIO_PREFIX + "i++;\n");
+      cb.append("}\n");
+      cb.append("}\n");
+    }
+
     void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) {
       if (decl) {
         cb.append(fullName+" "+fname+";\n");
       }
       cb.append(fname+"= new "+fullName+"();\n");
-      cb.append(fname+".deserialize(a,\""+tag+"\");\n");
+      cb.append(fname+".deserialize(" + Consts.RECORD_INPUT + ",\""+tag+"\");\n");
     }
     
     void genWriteMethod(CodeBuffer cb, String fname, String tag) {
-      cb.append(fname+".serialize(a,\""+tag+"\");\n");
+      cb.append(fname+".serialize(" + Consts.RECORD_OUTPUT + ",\""+tag+"\");\n");
     }
     
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
@@ -100,15 +174,40 @@
       cb.append("public class "+name+
                 " extends org.apache.hadoop.record.Record {\n");
       
+      // type information declarations
+      cb.append("private static final " + 
+          "org.apache.hadoop.record.meta.RecordTypeInfo " + 
+          Consts.RTI_VAR + ";\n");
+      cb.append("private static " + 
+          "org.apache.hadoop.record.meta.RecordTypeInfo " + 
+          Consts.RTI_FILTER + ";\n");
+      cb.append("private static int[] " + Consts.RTI_FILTER_FIELDS + ";\n");
+      
+      // static init for type information
+      cb.append("static {\n");
+      cb.append(Consts.RTI_VAR + " = " +
+          "new org.apache.hadoop.record.meta.RecordTypeInfo(\"" +
+          name + "\");\n");
+      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
+        JField<JavaType> jf = i.next();
+        String name = jf.getName();
+        JavaType type = jf.getType();
+        type.genStaticTypeInfo(cb, name);
+      }
+      cb.append("}\n\n");
+
+      // field definitions
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
         JField<JavaType> jf = i.next();
         String name = jf.getName();
         JavaType type = jf.getType();
         type.genDecl(cb, name);
       }
+
+      // default constructor
       cb.append("public "+name+"() { }\n");
       
-      
+      // constructor
       cb.append("public "+name+"(\n");
       int fIdx = 0;
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext(); fIdx++) {
@@ -127,95 +226,182 @@
         type.genConstructorSet(cb, name);
       }
       cb.append("}\n");
+
+      // getter/setter for type info
+      cb.append("public static org.apache.hadoop.record.meta.RecordTypeInfo"
+              + " getTypeInfo() {\n");
+      cb.append("return " + Consts.RTI_VAR + ";\n");
+      cb.append("}\n");
+      cb.append("public static void setTypeFilter("
+          + "org.apache.hadoop.record.meta.RecordTypeInfo rti) {\n");
+      cb.append("if (null == rti) return;\n");
+      cb.append(Consts.RTI_FILTER + " = rti;\n");
+      cb.append(Consts.RTI_FILTER_FIELDS + " = null;\n");
+      // set RTIFilter for nested structs.
+      // To prevent setting up the type filter for the same struct more than once, 
+      // we use a hash map to keep track of what we've set. 
+      Map<String, Integer> nestedStructMap = new HashMap<String, Integer>();
+      for (JField<JavaType> jf : fields) {
+        JavaType type = jf.getType();
+        type.genSetRTIFilter(cb, nestedStructMap);
+      }
+      cb.append("}\n");
+
+      // setupRtiFields()
+      genSetupRtiFields(cb);
+
+      // getters/setters for member variables
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
         JField<JavaType> jf = i.next();
         String name = jf.getName();
         JavaType type = jf.getType();
         type.genGetSet(cb, name);
       }
-      cb.append("public void serialize("+
-                "final org.apache.hadoop.record.RecordOutput a, final String tag)\n"+
+      
+      // serialize()
+      cb.append("public void serialize("+ 
+          "final org.apache.hadoop.record.RecordOutput " + 
+          Consts.RECORD_OUTPUT + ", final String " + Consts.TAG + ")\n"+
                 "throws java.io.IOException {\n");
-      cb.append("a.startRecord(this,tag);\n");
+      cb.append(Consts.RECORD_OUTPUT + ".startRecord(this," + Consts.TAG + ");\n");
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
         JField<JavaType> jf = i.next();
         String name = jf.getName();
         JavaType type = jf.getType();
         type.genWriteMethod(cb, name, name);
       }
-      cb.append("a.endRecord(this,tag);\n");
+      cb.append(Consts.RECORD_OUTPUT + ".endRecord(this," + Consts.TAG+");\n");
       cb.append("}\n");
-      
-      cb.append("public void deserialize("+
-                "final org.apache.hadoop.record.RecordInput a, final String tag)\n"+
+
+      // deserializeWithoutFilter()
+      cb.append("private void deserializeWithoutFilter("+
+                "final org.apache.hadoop.record.RecordInput " + 
+                Consts.RECORD_INPUT + ", final String " + Consts.TAG + ")\n"+
                 "throws java.io.IOException {\n");
-      cb.append("a.startRecord(tag);\n");
+      cb.append(Consts.RECORD_INPUT + ".startRecord(" + Consts.TAG + ");\n");
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
         JField<JavaType> jf = i.next();
         String name = jf.getName();
         JavaType type = jf.getType();
         type.genReadMethod(cb, name, name, false);
       }
-      cb.append("a.endRecord(tag);\n");
+      cb.append(Consts.RECORD_INPUT + ".endRecord(" + Consts.TAG+");\n");
       cb.append("}\n");
       
-      cb.append("public int compareTo (final Object peer_) throws ClassCastException {\n");
-      cb.append("if (!(peer_ instanceof "+name+")) {\n");
+      // deserialize()
+      cb.append("public void deserialize(final " +
+          "org.apache.hadoop.record.RecordInput " + 
+          Consts.RECORD_INPUT + ", final String " + Consts.TAG + ")\n"+
+          "throws java.io.IOException {\n");
+      cb.append("if (null == " + Consts.RTI_FILTER + ") {\n");
+      cb.append("deserializeWithoutFilter(" + Consts.RECORD_INPUT + ", " + 
+          Consts.TAG + ");\n");
+      cb.append("return;\n");
+      cb.append("}\n");
+      cb.append("// if we're here, we need to read based on version info\n");
+      cb.append(Consts.RECORD_INPUT + ".startRecord(" + Consts.TAG + ");\n");
+      cb.append("setupRtiFields();\n");
+      cb.append("for (int " + Consts.RIO_PREFIX + "i=0; " + Consts.RIO_PREFIX + 
+          "i<" + Consts.RTI_FILTER + ".getFieldTypeInfos().size(); " + 
+          Consts.RIO_PREFIX + "i++) {\n");
+      int ct = 0;
+      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
+        JField<JavaType> jf = i.next();
+        String name = jf.getName();
+        JavaType type = jf.getType();
+        ct++;
+        if (1 != ct) {
+          cb.append("else ");
+        }
+        cb.append("if (" + ct + " == " + Consts.RTI_FILTER_FIELDS + "[" +
+            Consts.RIO_PREFIX + "i]) {\n");
+        type.genReadMethod(cb, name, name, false);
+        cb.append("}\n");
+      }
+      if (0 != ct) {
+        cb.append("else {\n");
+        cb.append("java.util.ArrayList<"
+                + "org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = "
+                + "(java.util.ArrayList<"
+                + "org.apache.hadoop.record.meta.FieldTypeInfo>)"
+                + "(" + Consts.RTI_FILTER + ".getFieldTypeInfos());\n");
+        cb.append("org.apache.hadoop.record.meta.Utils.skip(" + 
+            Consts.RECORD_INPUT + ", " + "typeInfos.get(" + Consts.RIO_PREFIX + 
+            "i).getFieldID(), typeInfos.get(" + 
+            Consts.RIO_PREFIX + "i).getTypeID());\n");
+        cb.append("}\n");
+      }
+      cb.append("}\n");
+      cb.append(Consts.RECORD_INPUT + ".endRecord(" + Consts.TAG+");\n");
+      cb.append("}\n");
+
+      // compareTo()
+      cb.append("public int compareTo (final Object " + Consts.RIO_PREFIX + 
+          "peer_) throws ClassCastException {\n");
+      cb.append("if (!(" + Consts.RIO_PREFIX + "peer_ instanceof "+name+")) {\n");
       cb.append("throw new ClassCastException(\"Comparing different types of records.\");\n");
       cb.append("}\n");
-      cb.append(name+" peer = ("+name+") peer_;\n");
-      cb.append("int ret = 0;\n");
+      cb.append(name+" " + Consts.RIO_PREFIX + "peer = ("+name+") " + 
+          Consts.RIO_PREFIX + "peer_;\n");
+      cb.append("int " + Consts.RIO_PREFIX + "ret = 0;\n");
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
         JField<JavaType> jf = i.next();
         String name = jf.getName();
         JavaType type = jf.getType();
-        type.genCompareTo(cb, name, "peer."+name);
-        cb.append("if (ret != 0) return ret;\n");
+        type.genCompareTo(cb, name, Consts.RIO_PREFIX + "peer."+name);
+        cb.append("if (" + Consts.RIO_PREFIX + "ret != 0) return " + 
+            Consts.RIO_PREFIX + "ret;\n");
       }
-      cb.append("return ret;\n");
+      cb.append("return " + Consts.RIO_PREFIX + "ret;\n");
       cb.append("}\n");
       
-      cb.append("public boolean equals(final Object peer_) {\n");
-      cb.append("if (!(peer_ instanceof "+name+")) {\n");
+      // equals()
+      cb.append("public boolean equals(final Object " + Consts.RIO_PREFIX + 
+          "peer_) {\n");
+      cb.append("if (!(" + Consts.RIO_PREFIX + "peer_ instanceof "+name+")) {\n");
       cb.append("return false;\n");
       cb.append("}\n");
-      cb.append("if (peer_ == this) {\n");
+      cb.append("if (" + Consts.RIO_PREFIX + "peer_ == this) {\n");
       cb.append("return true;\n");
       cb.append("}\n");
-      cb.append(name+" peer = ("+name+") peer_;\n");
-      cb.append("boolean ret = false;\n");
+      cb.append(name+" " + Consts.RIO_PREFIX + "peer = ("+name+") " + 
+          Consts.RIO_PREFIX + "peer_;\n");
+      cb.append("boolean " + Consts.RIO_PREFIX + "ret = false;\n");
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
         JField<JavaType> jf = i.next();
         String name = jf.getName();
         JavaType type = jf.getType();
-        type.genEquals(cb, name, "peer."+name);
-        cb.append("if (!ret) return ret;\n");
+        type.genEquals(cb, name, Consts.RIO_PREFIX + "peer."+name);
+        cb.append("if (!" + Consts.RIO_PREFIX + "ret) return " + 
+            Consts.RIO_PREFIX + "ret;\n");
       }
-      cb.append("return ret;\n");
+      cb.append("return " + Consts.RIO_PREFIX + "ret;\n");
       cb.append("}\n");
-      
+
+      // clone()
       cb.append("public Object clone() throws CloneNotSupportedException {\n");
-      cb.append(name+" other = new "+name+"();\n");
+      cb.append(name+" " + Consts.RIO_PREFIX + "other = new "+name+"();\n");
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
         JField<JavaType> jf = i.next();
         String name = jf.getName();
         JavaType type = jf.getType();
         type.genClone(cb, name);
       }
-      cb.append("return other;\n");
+      cb.append("return " + Consts.RIO_PREFIX + "other;\n");
       cb.append("}\n");
       
       cb.append("public int hashCode() {\n");
-      cb.append("int result = 17;\n");
-      cb.append("int ret;\n");
+      cb.append("int " + Consts.RIO_PREFIX + "result = 17;\n");
+      cb.append("int " + Consts.RIO_PREFIX + "ret;\n");
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
         JField<JavaType> jf = i.next();
         String name = jf.getName();
         JavaType type = jf.getType();
         type.genHashCode(cb, name);
-        cb.append("result = 37*result + ret;\n");
+        cb.append(Consts.RIO_PREFIX + "result = 37*" + Consts.RIO_PREFIX + 
+            "result + " + Consts.RIO_PREFIX + "ret;\n");
       }
-      cb.append("return result;\n");
+      cb.append("return " + Consts.RIO_PREFIX + "result;\n");
       cb.append("}\n");
       
       cb.append("public static String signature() {\n");
@@ -295,10 +481,51 @@
       }
     }
     
+    String getTypeIDObjectString() {
+      return "new ::hadoop::StructTypeID(" + 
+      fullName + "::getTypeInfo().getFieldTypeInfos())";
+    }
+
     String genDecl(String fname) {
       return "  "+name+" "+fname+";\n";
     }
     
+    void genSetRTIFilter(CodeBuffer cb) {
+      // we set the RTI filter here
+      cb.append(fullName + "::setTypeFilter(rti.getNestedStructTypeInfo(\""+
+          name + "\"));\n");
+    }
+
+    void genSetupRTIFields(CodeBuffer cb) {
+      cb.append("void " + fullName + "::setupRtiFields() {\n");
+      cb.append("if (NULL == p" + Consts.RTI_FILTER + ") return;\n");
+      cb.append("if (NULL != p" + Consts.RTI_FILTER_FIELDS + ") return;\n");
+      cb.append("p" + Consts.RTI_FILTER_FIELDS + " = new int[p" + 
+          Consts.RTI_FILTER + "->getFieldTypeInfos().size()];\n");
+      cb.append("for (unsigned int " + Consts.RIO_PREFIX + "i=0; " + 
+          Consts.RIO_PREFIX + "i<p" + Consts.RTI_FILTER + 
+          "->getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "i++) {\n");
+      cb.append("p" + Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX + 
+          "i] = 0;\n");
+      cb.append("}\n");
+      cb.append("for (unsigned int " + Consts.RIO_PREFIX + "i=0; " + 
+          Consts.RIO_PREFIX + "i<p" + Consts.RTI_FILTER + 
+          "->getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "i++) {\n");
+      cb.append("for (unsigned int " + Consts.RIO_PREFIX + "j=0; " + 
+          Consts.RIO_PREFIX + "j<p" + Consts.RTI_VAR + 
+          "->getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "j++) {\n");
+      cb.append("if (*(p" + Consts.RTI_FILTER + "->getFieldTypeInfos()[" + 
+          Consts.RIO_PREFIX + "i]) == *(p" + Consts.RTI_VAR + 
+          "->getFieldTypeInfos()[" + Consts.RIO_PREFIX + "j])) {\n");
+      cb.append("p" + Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX + 
+          "i] = " + Consts.RIO_PREFIX + "j+1;\n");
+      cb.append("break;\n");
+      cb.append("}\n");
+      cb.append("}\n");
+      cb.append("}\n");
+      cb.append("}\n");
+    }
+    
     void genCode(FileWriter hh, FileWriter cc, ArrayList<String> options)
       throws IOException {
       CodeBuffer hb = new CodeBuffer();
@@ -317,9 +544,24 @@
         CppType type = jf.getType();
         type.genDecl(hb, name);
       }
+      
+      // type info vars
+      hb.append("static ::hadoop::RecordTypeInfo* p" + Consts.RTI_VAR + ";\n");
+      hb.append("static ::hadoop::RecordTypeInfo* p" + Consts.RTI_FILTER + ";\n");
+      hb.append("static int* p" + Consts.RTI_FILTER_FIELDS + ";\n");
+      hb.append("static ::hadoop::RecordTypeInfo* setupTypeInfo();\n");
+      hb.append("static void setupRtiFields();\n");
+      hb.append("virtual void deserializeWithoutFilter(::hadoop::IArchive& " + 
+          Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ");\n");
       hb.append("public:\n");
-      hb.append("virtual void serialize(::hadoop::OArchive& a_, const char* tag) const;\n");
-      hb.append("virtual void deserialize(::hadoop::IArchive& a_, const char* tag);\n");
+      hb.append("static const ::hadoop::RecordTypeInfo& getTypeInfo() " +
+          "{return *p" + Consts.RTI_VAR + ";}\n");
+      hb.append("static void setTypeFilter(const ::hadoop::RecordTypeInfo& rti);\n");
+      hb.append("static void setTypeFilter(const ::hadoop::RecordTypeInfo* prti);\n");
+      hb.append("virtual void serialize(::hadoop::OArchive& " + 
+          Consts.RECORD_OUTPUT + ", const char* " + Consts.TAG + ") const;\n");
+      hb.append("virtual void deserialize(::hadoop::IArchive& " + 
+          Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ");\n");
       hb.append("virtual const ::std::string& type() const;\n");
       hb.append("virtual const ::std::string& signature() const;\n");
       hb.append("virtual bool operator<(const "+name+"& peer_) const;\n");
@@ -339,40 +581,151 @@
       hh.write(hb.toString());
       
       CodeBuffer cb = new CodeBuffer();
+
+      // initialize type info vars
+      cb.append("::hadoop::RecordTypeInfo* " + fullName + "::p" + 
+          Consts.RTI_VAR + " = " + fullName + "::setupTypeInfo();\n");
+      cb.append("::hadoop::RecordTypeInfo* " + fullName + "::p" + 
+          Consts.RTI_FILTER + " = NULL;\n");
+      cb.append("int* " + fullName + "::p" + 
+          Consts.RTI_FILTER_FIELDS + " = NULL;\n\n");
+
+      // setupTypeInfo()
+      cb.append("::hadoop::RecordTypeInfo* "+fullName+"::setupTypeInfo() {\n");
+      cb.append("::hadoop::RecordTypeInfo* p = new ::hadoop::RecordTypeInfo(\"" + 
+          name + "\");\n");
+      for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
+        JField<CppType> jf = i.next();
+        String name = jf.getName();
+        CppType type = jf.getType();
+        type.genStaticTypeInfo(cb, name);
+      }
+      cb.append("return p;\n");
+      cb.append("}\n");
+
+      // setTypeFilter()
+      cb.append("void "+fullName+"::setTypeFilter(const " +
+          "::hadoop::RecordTypeInfo& rti) {\n");
+      cb.append("if (NULL != p" + Consts.RTI_FILTER + ") {\n");
+      cb.append("delete p" + Consts.RTI_FILTER + ";\n");
+      cb.append("}\n");
+      cb.append("p" + Consts.RTI_FILTER + " = new ::hadoop::RecordTypeInfo(rti);\n");
+      cb.append("if (NULL != p" + Consts.RTI_FILTER_FIELDS + ") {\n");
+      cb.append("delete p" + Consts.RTI_FILTER_FIELDS + ";\n");
+      cb.append("}\n");
+      cb.append("p" + Consts.RTI_FILTER_FIELDS + " = NULL;\n");
+      // set RTIFilter for nested structs. We may end up with multiple lines that 
+      // do the same thing, if the same struct is nested in more than one field, 
+      // but that's OK. 
+      for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
+        JField<CppType> jf = i.next();
+        CppType type = jf.getType();
+        type.genSetRTIFilter(cb);
+      }
+      cb.append("}\n");
       
-      cb.append("void "+fullName+"::serialize(::hadoop::OArchive& a_, const char* tag) const {\n");
-      cb.append("a_.startRecord(*this,tag);\n");
+      // setTypeFilter()
+      cb.append("void "+fullName+"::setTypeFilter(const " +
+          "::hadoop::RecordTypeInfo* prti) {\n");
+      cb.append("if (NULL != prti) {\n");
+      cb.append("setTypeFilter(*prti);\n");
+      cb.append("}\n");
+      cb.append("}\n");
+
+      // setupRtiFields()
+      genSetupRTIFields(cb);
+
+      // serialize()
+      cb.append("void "+fullName+"::serialize(::hadoop::OArchive& " + 
+          Consts.RECORD_OUTPUT + ", const char* " + Consts.TAG + ") const {\n");
+      cb.append(Consts.RECORD_OUTPUT + ".startRecord(*this," + 
+          Consts.TAG + ");\n");
       for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
         JField<CppType> jf = i.next();
         String name = jf.getName();
         CppType type = jf.getType();
         if (type instanceof JBuffer.CppBuffer) {
-          cb.append("a_.serialize("+name+","+name+".length(),\""+name+"\");\n");
+          cb.append(Consts.RECORD_OUTPUT + ".serialize("+name+","+name+
+              ".length(),\""+name+"\");\n");
         } else {
-          cb.append("a_.serialize("+name+",\""+name+"\");\n");
+          cb.append(Consts.RECORD_OUTPUT + ".serialize("+name+",\""+
+              name+"\");\n");
         }
       }
-      cb.append("a_.endRecord(*this,tag);\n");
+      cb.append(Consts.RECORD_OUTPUT + ".endRecord(*this," + Consts.TAG + ");\n");
       cb.append("return;\n");
       cb.append("}\n");
       
-      cb.append("void "+fullName+"::deserialize(::hadoop::IArchive& a_, const char* tag) {\n");
-      cb.append("a_.startRecord(*this,tag);\n");
+      // deserializeWithoutFilter()
+      cb.append("void "+fullName+"::deserializeWithoutFilter(::hadoop::IArchive& " +
+          Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ") {\n");
+      cb.append(Consts.RECORD_INPUT + ".startRecord(*this," + 
+          Consts.TAG + ");\n");
       for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
         JField<CppType> jf = i.next();
         String name = jf.getName();
         CppType type = jf.getType();
         if (type instanceof JBuffer.CppBuffer) {
-          cb.append("{\nsize_t len=0; a_.deserialize("+name+",len,\""+name+"\");\n}\n");
+          cb.append("{\nsize_t len=0; " + Consts.RECORD_INPUT + ".deserialize("+
+              name+",len,\""+name+"\");\n}\n");
         } else {
-          cb.append("a_.deserialize("+name+",\""+name+"\");\n");
+          cb.append(Consts.RECORD_INPUT + ".deserialize("+name+",\""+
+              name+"\");\n");
         }
       }
-      cb.append("a_.endRecord(*this,tag);\n");
+      cb.append(Consts.RECORD_INPUT + ".endRecord(*this," + Consts.TAG + ");\n");
       cb.append("return;\n");
       cb.append("}\n");
       
-      
+      // deserialize()
+      cb.append("void "+fullName+"::deserialize(::hadoop::IArchive& " +
+          Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ") {\n");
+      cb.append("if (NULL == p" + Consts.RTI_FILTER + ") {\n");
+      cb.append("deserializeWithoutFilter(" + Consts.RECORD_INPUT + ", " + 
+          Consts.TAG + ");\n");
+      cb.append("return;\n");
+      cb.append("}\n");
+      cb.append("// if we're here, we need to read based on version info\n");
+      cb.append(Consts.RECORD_INPUT + ".startRecord(*this," + 
+          Consts.TAG + ");\n");
+      cb.append("setupRtiFields();\n");
+      cb.append("for (unsigned int " + Consts.RIO_PREFIX + "i=0; " + 
+          Consts.RIO_PREFIX + "i<p" + Consts.RTI_FILTER + 
+          "->getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "i++) {\n");
+      int ct = 0;
+      for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
+        JField<CppType> jf = i.next();
+        String name = jf.getName();
+        CppType type = jf.getType();
+        ct++;
+        if (1 != ct) {
+          cb.append("else ");
+        }
+        cb.append("if (" + ct + " == p" + Consts.RTI_FILTER_FIELDS + "[" +
+            Consts.RIO_PREFIX + "i]) {\n");
+        if (type instanceof JBuffer.CppBuffer) {
+          cb.append("{\nsize_t len=0; " + Consts.RECORD_INPUT + ".deserialize("+
+              name+",len,\""+name+"\");\n}\n");
+        } else {
+          cb.append(Consts.RECORD_INPUT + ".deserialize("+name+",\""+
+              name+"\");\n");
+        }
+        cb.append("}\n");
+      }
+      if (0 != ct) {
+        cb.append("else {\n");
+        cb.append("const std::vector< ::hadoop::FieldTypeInfo* >& typeInfos = p" + 
+            Consts.RTI_FILTER + "->getFieldTypeInfos();\n");
+        cb.append("::hadoop::Utils::skip(" + Consts.RECORD_INPUT + 
+            ", typeInfos[" + Consts.RIO_PREFIX + "i]->getFieldID()->c_str()" + 
+            ", *(typeInfos[" + Consts.RIO_PREFIX + "i]->getTypeID()));\n");
+        cb.append("}\n");
+      }
+      cb.append("}\n");
+      cb.append(Consts.RECORD_INPUT + ".endRecord(*this, " + Consts.TAG+");\n");
+      cb.append("}\n");
+
+      // operator <
       cb.append("bool "+fullName+"::operator< (const "+fullName+"& peer_) const {\n");
       cb.append("return (1\n");
       for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java?rev=612739&r1=612738&r2=612739&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java Wed Jan 16 23:02:07 2008
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.record.compiler;
 
-import org.apache.hadoop.record.compiler.JCompType.CCompType;
-import org.apache.hadoop.record.compiler.JCompType.CppCompType;
 
 /**
  */
@@ -28,9 +26,13 @@
   class JavaString extends JavaCompType {
     
     JavaString() {
-      super("String", "String", "String");
+      super("String", "String", "String", "TypeID.RIOType.STRING");
     }
     
+    String getTypeIDObjectString() {
+      return "org.apache.hadoop.record.meta.TypeID.StringTypeID";
+    }
+
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
       cb.append("{\n");
       cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n");
@@ -53,13 +55,25 @@
     }
     
     void genClone(CodeBuffer cb, String fname) {
-      cb.append("other."+fname+" = this."+fname+";\n");
+      cb.append(Consts.RIO_PREFIX + "other."+fname+" = this."+fname+";\n");
+    }
+  }
+
+  class CppString extends CppCompType {
+    
+    CppString() {
+      super("::std::string");
+    }
+    
+    String getTypeIDObjectString() {
+      return "new ::hadoop::TypeID(::hadoop::RIOTYPE_STRING)";
     }
   }
+  
   /** Creates a new instance of JString */
   public JString() {
     setJavaType(new JavaString());
-    setCppType(new CppCompType(" ::std::string"));
+    setCppType(new CppString());
     setCType(new CCompType());
   }
     

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java?rev=612739&r1=612738&r2=612739&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java Wed Jan 16 23:02:07 2008
@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.record.compiler;
 
+import java.util.Map;
+
+
 /**
  * Abstract Base class for all types supported by Hadoop Record I/O.
  */
@@ -39,19 +42,49 @@
     private String name;
     private String methodSuffix;
     private String wrapper;
+    private String typeIDByteString; // points to TypeID.RIOType 
     
     JavaType(String javaname,
-             String suffix,
-             String wrapper) {
+        String suffix,
+        String wrapper, 
+        String typeIDByteString) { 
       this.name = javaname;
       this.methodSuffix = suffix;
       this.wrapper = wrapper;
+      this.typeIDByteString = typeIDByteString;
     }
-    
+
     void genDecl(CodeBuffer cb, String fname) {
       cb.append("private "+name+" "+fname+";\n");
     }
     
+    void genStaticTypeInfo(CodeBuffer cb, String fname) {
+      cb.append(Consts.RTI_VAR + ".addField(\"" + fname + "\", " +
+          getTypeIDObjectString() + ");\n");
+    }
+    
+    abstract String getTypeIDObjectString();
+    
+    void genSetRTIFilter(CodeBuffer cb, Map<String, Integer> nestedStructMap) {
+      // do nothing by default
+      return;
+    }
+
+    /*void genRtiFieldCondition(CodeBuffer cb, String fname, int ct) {
+      cb.append("if ((tInfo.fieldID.equals(\"" + fname + "\")) && (typeVal ==" +
+          " org.apache.hadoop.record.meta." + getTypeIDByteString() + ")) {\n");
+      cb.append("rtiFilterFields[i] = " + ct + ";\n");
+      cb.append("}\n");
+    }
+
+    void genRtiNestedFieldCondition(CodeBuffer cb, String varName, int ct) {
+      cb.append("if (" + varName + ".getElementTypeID().getTypeVal() == " +
+          "org.apache.hadoop.record.meta." + getTypeIDByteString() + 
+          ") {\n");
+      cb.append("rtiFilterFields[i] = " + ct + ";\n");
+      cb.append("}\n");  
+    }*/
+
     void genConstructorParam(CodeBuffer cb, String fname) {
       cb.append("final "+name+" "+fname);
     }
@@ -77,20 +110,26 @@
       return methodSuffix;
     }
     
+    String getTypeIDByteString() {
+      return typeIDByteString;
+    }
+    
     void genWriteMethod(CodeBuffer cb, String fname, String tag) {
-      cb.append("a.write"+methodSuffix+"("+fname+",\""+tag+"\");\n");
+      cb.append(Consts.RECORD_OUTPUT + ".write"+methodSuffix + 
+          "("+fname+",\""+tag+"\");\n");
     }
     
     void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) {
       if (decl) {
         cb.append(name+" "+fname+";\n");
       }
-      cb.append(fname+"=a.read"+methodSuffix+"(\""+tag+"\");\n");
+      cb.append(fname+"=" + Consts.RECORD_INPUT + ".read" + 
+          methodSuffix+"(\""+tag+"\");\n");
     }
     
     void genCompareTo(CodeBuffer cb, String fname, String other) {
-      cb.append("ret = ("+fname+" == "+other+")? 0 :(("+fname+"<"+other+
-                ")?-1:1);\n");
+      cb.append(Consts.RIO_PREFIX + "ret = ("+fname+" == "+other+")? 0 :(("+
+          fname+"<"+other+")?-1:1);\n");
     }
     
     abstract void genCompareBytes(CodeBuffer cb);
@@ -98,11 +137,11 @@
     abstract void genSlurpBytes(CodeBuffer cb, String b, String s, String l);
     
     void genEquals(CodeBuffer cb, String fname, String peer) {
-      cb.append("ret = ("+fname+"=="+peer+");\n");
+      cb.append(Consts.RIO_PREFIX + "ret = ("+fname+"=="+peer+");\n");
     }
     
     void genHashCode(CodeBuffer cb, String fname) {
-      cb.append("ret = (int)"+fname+";\n");
+      cb.append(Consts.RIO_PREFIX + "ret = (int)"+fname+";\n");
     }
     
     void genConstructorSet(CodeBuffer cb, String fname) {
@@ -110,11 +149,11 @@
     }
     
     void genClone(CodeBuffer cb, String fname) {
-      cb.append("other."+fname+" = this."+fname+";\n");
+      cb.append(Consts.RIO_PREFIX + "other."+fname+" = this."+fname+";\n");
     }
   }
   
-  class CppType {
+  abstract class CppType {
     private String name;
     
     CppType(String cppname) {
@@ -125,6 +164,11 @@
       cb.append(name+" "+fname+";\n");
     }
     
+    void genStaticTypeInfo(CodeBuffer cb, String fname) {
+      cb.append("p->addField(new ::std::string(\"" + 
+          fname + "\"), " + getTypeIDObjectString() + ");\n");
+    }
+    
     void genGetSet(CodeBuffer cb, String fname) {
       cb.append("virtual "+name+" get"+toCamelCase(fname)+"() const {\n");
       cb.append("return "+fname+";\n");
@@ -134,6 +178,13 @@
       cb.append("}\n");
     }
     
+    abstract String getTypeIDObjectString();
+
+    void genSetRTIFilter(CodeBuffer cb) {
+      // do nothing by default
+      return;
+    }
+
     String getType() {
       return name;
     }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JVector.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JVector.java?rev=612739&r1=612738&r2=612739&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JVector.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JVector.java Wed Jan 16 23:02:07 2008
@@ -18,8 +18,7 @@
 
 package org.apache.hadoop.record.compiler;
 
-import org.apache.hadoop.record.compiler.JCompType.CCompType;
-import org.apache.hadoop.record.compiler.JCompType.CppCompType;
+import java.util.Map;
 
 /**
  */
@@ -43,27 +42,46 @@
     
     JavaVector(JType.JavaType t) {
       super("java.util.ArrayList<"+t.getWrapperType()+">",
-            "Vector", "java.util.ArrayList<"+t.getWrapperType()+">");
+            "Vector", "java.util.ArrayList<"+t.getWrapperType()+">",
+            "TypeID.RIOType.VECTOR");
       element = t;
     }
     
+    String getTypeIDObjectString() {
+      return "new org.apache.hadoop.record.meta.VectorTypeID(" + 
+      element.getTypeIDObjectString() + ")";
+    }
+
+    void genSetRTIFilter(CodeBuffer cb, Map<String, Integer> nestedStructMap) {
+      element.genSetRTIFilter(cb, nestedStructMap);
+    }
+
     void genCompareTo(CodeBuffer cb, String fname, String other) {
       cb.append("{\n");
-      cb.append("int "+getId("len1")+" = "+fname+".size();\n");
-      cb.append("int "+getId("len2")+" = "+other+".size();\n");
-      cb.append("for(int "+getId("vidx")+" = 0; "+getId("vidx")+"<"+
-                getId("len1")+" && "+getId("vidx")+"<"+getId("len2")+"; "+
-                getId("vidx")+"++) {\n");
-      cb.append(element.getType()+" "+getId("e1")+
+      incrLevel();
+      cb.append("int "+getId(Consts.RIO_PREFIX + "len1")+" = "+fname+
+          ".size();\n");
+      cb.append("int "+getId(Consts.RIO_PREFIX + "len2")+" = "+other+
+          ".size();\n");
+      cb.append("for(int "+getId(Consts.RIO_PREFIX + "vidx")+" = 0; "+
+          getId(Consts.RIO_PREFIX + "vidx")+"<"+getId(Consts.RIO_PREFIX + "len1")+
+          " && "+getId(Consts.RIO_PREFIX + "vidx")+"<"+
+          getId(Consts.RIO_PREFIX + "len2")+"; "+
+          getId(Consts.RIO_PREFIX + "vidx")+"++) {\n");
+      cb.append(element.getType()+" "+getId(Consts.RIO_PREFIX + "e1")+
                 " = "+fname+
-                ".get("+getId("vidx")+");\n");
-      cb.append(element.getType()+" "+getId("e2")+
+                ".get("+getId(Consts.RIO_PREFIX + "vidx")+");\n");
+      cb.append(element.getType()+" "+getId(Consts.RIO_PREFIX + "e2")+
                 " = "+other+
-                ".get("+getId("vidx")+");\n");
-      element.genCompareTo(cb, getId("e1"), getId("e2"));
-      cb.append("if (ret != 0) { return ret; }\n");
+                ".get("+getId(Consts.RIO_PREFIX + "vidx")+");\n");
+      element.genCompareTo(cb, getId(Consts.RIO_PREFIX + "e1"), 
+          getId(Consts.RIO_PREFIX + "e2"));
+      cb.append("if (" + Consts.RIO_PREFIX + "ret != 0) { return " +
+          Consts.RIO_PREFIX + "ret; }\n");
       cb.append("}\n");
-      cb.append("ret = ("+getId("len1")+" - "+getId("len2")+");\n");
+      cb.append(Consts.RIO_PREFIX + "ret = ("+getId(Consts.RIO_PREFIX + "len1")+
+          " - "+getId(Consts.RIO_PREFIX + "len2")+");\n");
+      decrLevel();
       cb.append("}\n");
     }
     
@@ -73,13 +91,17 @@
       }
       cb.append("{\n");
       incrLevel();
-      cb.append("org.apache.hadoop.record.Index "+getId("vidx")+" = a.startVector(\""+tag+"\");\n");
+      cb.append("org.apache.hadoop.record.Index "+
+          getId(Consts.RIO_PREFIX + "vidx")+" = " + 
+          Consts.RECORD_INPUT + ".startVector(\""+tag+"\");\n");
       cb.append(fname+"=new "+getType()+"();\n");
-      cb.append("for (; !"+getId("vidx")+".done(); "+getId("vidx")+".incr()) {\n");
-      element.genReadMethod(cb, getId("e"), getId("e"), true);
-      cb.append(fname+".add("+getId("e")+");\n");
+      cb.append("for (; !"+getId(Consts.RIO_PREFIX + "vidx")+".done(); " + 
+          getId(Consts.RIO_PREFIX + "vidx")+".incr()) {\n");
+      element.genReadMethod(cb, getId(Consts.RIO_PREFIX + "e"), 
+          getId(Consts.RIO_PREFIX + "e"), true);
+      cb.append(fname+".add("+getId(Consts.RIO_PREFIX + "e")+");\n");
       cb.append("}\n");
-      cb.append("a.endVector(\""+tag+"\");\n");
+      cb.append(Consts.RECORD_INPUT + ".endVector(\""+tag+"\");\n");
       decrLevel();
       cb.append("}\n");
     }
@@ -87,13 +109,17 @@
     void genWriteMethod(CodeBuffer cb, String fname, String tag) {
       cb.append("{\n");
       incrLevel();
-      cb.append("a.startVector("+fname+",\""+tag+"\");\n");
-      cb.append("int "+getId("len")+" = "+fname+".size();\n");
-      cb.append("for(int "+getId("vidx")+" = 0; "+getId("vidx")+"<"+getId("len")+"; "+getId("vidx")+"++) {\n");
-      cb.append(element.getType()+" "+getId("e")+" = "+fname+".get("+getId("vidx")+");\n");
-      element.genWriteMethod(cb, getId("e"), getId("e"));
+      cb.append(Consts.RECORD_OUTPUT + ".startVector("+fname+",\""+tag+"\");\n");
+      cb.append("int "+getId(Consts.RIO_PREFIX + "len")+" = "+fname+".size();\n");
+      cb.append("for(int "+getId(Consts.RIO_PREFIX + "vidx")+" = 0; " + 
+          getId(Consts.RIO_PREFIX + "vidx")+"<"+getId(Consts.RIO_PREFIX + "len")+
+          "; "+getId(Consts.RIO_PREFIX + "vidx")+"++) {\n");
+      cb.append(element.getType()+" "+getId(Consts.RIO_PREFIX + "e")+" = "+
+          fname+".get("+getId(Consts.RIO_PREFIX + "vidx")+");\n");
+      element.genWriteMethod(cb, getId(Consts.RIO_PREFIX + "e"), 
+          getId(Consts.RIO_PREFIX + "e"));
       cb.append("}\n");
-      cb.append("a.endVector("+fname+",\""+tag+"\");\n");
+      cb.append(Consts.RECORD_OUTPUT + ".endVector("+fname+",\""+tag+"\");\n");
       cb.append("}\n");
       decrLevel();
     }
@@ -137,11 +163,31 @@
     }
   }
   
+  class CppVector extends CppCompType {
+    
+    private JType.CppType element;
+    
+    CppVector(JType.CppType t) {
+      super("::std::vector< "+t.getType()+" >");
+      element = t;
+    }
+    
+    String getTypeIDObjectString() {
+      return "new ::hadoop::VectorTypeID(" +    
+      element.getTypeIDObjectString() + ")";
+    }
+
+    void genSetRTIFilter(CodeBuffer cb) {
+      element.genSetRTIFilter(cb);
+    }
+
+  }
+  
   /** Creates a new instance of JVector */
   public JVector(JType t) {
     type = t;
     setJavaType(new JavaVector(t.getJavaType()));
-    setCppType(new CppCompType(" ::std::vector<"+t.getCppType().getType()+">"));
+    setCppType(new CppVector(t.getCppType()));
     setCType(new CCompType());
   }
   

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/meta/FieldTypeInfo.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/meta/FieldTypeInfo.java?rev=612739&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/meta/FieldTypeInfo.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/meta/FieldTypeInfo.java Wed Jan 16 23:02:07 2008
@@ -0,0 +1,196 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.meta;
+
+import java.io.IOException;
+
+import org.apache.hadoop.record.RecordOutput;
+
+/** 
+ * Represents a type information for a field, which is made up of its 
+ * ID (name) and its type (a TypeID object).
+ */
+public class FieldTypeInfo
+{
+
+  private String fieldID;
+  private TypeID typeID;
+
+  /**
+   * Construct a FiledTypeInfo with the given field name and the type
+   */
+  FieldTypeInfo(String fieldID, TypeID typeID) {
+    this.fieldID = fieldID;
+    this.typeID = typeID;
+  }
+
+  /**
+   * get the field's TypeID object
+   */
+  public TypeID getTypeID() {
+    return typeID;
+  }
+  
+  /**
+   * get the field's id (name)
+   */
+  public String getFieldID() {
+    return fieldID;
+  }
+  
+  void write(RecordOutput rout, String tag) throws IOException {
+    rout.writeString(fieldID, tag);
+    typeID.write(rout, tag);
+  }
+  
+  /**
+   * Two FieldTypeInfos are equal if ach of their fields matches
+   */
+  public boolean equals(Object o) {
+    if (this == o) 
+      return true;
+    if (!(o instanceof FieldTypeInfo))
+      return false;
+    FieldTypeInfo fti = (FieldTypeInfo) o;
+    // first check if fieldID matches
+    if (!this.fieldID.equals(fti.fieldID)) {
+      return false;
+    }
+    // now see if typeID matches
+    return (this.typeID.equals(fti.typeID));
+  }
+  
+  /**
+   * We use a basic hashcode implementation, since this class will likely not
+   * be used as a hashmap key 
+   */
+  public int hashCode() {
+    return 37*17+typeID.hashCode() + 37*17+fieldID.hashCode();
+  }
+  
+
+  public boolean equals(FieldTypeInfo ti) {
+    // first check if fieldID matches
+    if (!this.fieldID.equals(ti.fieldID)) {
+      return false;
+    }
+    // now see if typeID matches
+    return (this.typeID.equals(ti.typeID));
+  }
+
+}
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.meta;
+
+import java.io.IOException;
+
+import org.apache.hadoop.record.RecordOutput;
+
+/** 
+ * Represents a type information for a field, which is made up of its 
+ * ID (name) and its type (a TypeID object).
+ */
+public class FieldTypeInfo
+{
+
+  private String fieldID;
+  private TypeID typeID;
+
+  /**
+   * Construct a FiledTypeInfo with the given field name and the type
+   */
+  FieldTypeInfo(String fieldID, TypeID typeID) {
+    this.fieldID = fieldID;
+    this.typeID = typeID;
+  }
+
+  /**
+   * get the field's TypeID object
+   */
+  public TypeID getTypeID() {
+    return typeID;
+  }
+  
+  /**
+   * get the field's id (name)
+   */
+  public String getFieldID() {
+    return fieldID;
+  }
+  
+  void write(RecordOutput rout, String tag) throws IOException {
+    rout.writeString(fieldID, tag);
+    typeID.write(rout, tag);
+  }
+  
+  /**
+   * Two FieldTypeInfos are equal if ach of their fields matches
+   */
+  public boolean equals(Object o) {
+    if (this == o) 
+      return true;
+    if (!(o instanceof FieldTypeInfo))
+      return false;
+    FieldTypeInfo fti = (FieldTypeInfo) o;
+    // first check if fieldID matches
+    if (!this.fieldID.equals(fti.fieldID)) {
+      return false;
+    }
+    // now see if typeID matches
+    return (this.typeID.equals(fti.typeID));
+  }
+  
+  /**
+   * We use a basic hashcode implementation, since this class will likely not
+   * be used as a hashmap key 
+   */
+  public int hashCode() {
+    return 37*17+typeID.hashCode() + 37*17+fieldID.hashCode();
+  }
+  
+
+  public boolean equals(FieldTypeInfo ti) {
+    // first check if fieldID matches
+    if (!this.fieldID.equals(ti.fieldID)) {
+      return false;
+    }
+    // now see if typeID matches
+    return (this.typeID.equals(ti.typeID));
+  }
+
+}
+

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/meta/MapTypeID.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/meta/MapTypeID.java?rev=612739&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/meta/MapTypeID.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/meta/MapTypeID.java Wed Jan 16 23:02:07 2008
@@ -0,0 +1,166 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.meta;
+
+import java.io.IOException;
+import java.util.*;
+
+import org.apache.hadoop.record.RecordOutput;
+
+/** 
+ * Represents typeID for a Map 
+ */
+public class MapTypeID extends TypeID {
+  
+  private TypeID typeIDKey; 
+  private TypeID typeIDValue; 
+  
+  public MapTypeID(TypeID typeIDKey, TypeID typeIDValue) {
+    super(RIOType.MAP);
+    this.typeIDKey = typeIDKey;
+    this.typeIDValue = typeIDValue;
+  }
+  
+  /**
+   * get the TypeID of the map's key element
+   */
+  public TypeID getKeyTypeID() {
+    return this.typeIDKey;
+  }
+  
+  /**
+   * get the TypeID of the map's value element
+   */
+  public TypeID getValueTypeID() {
+    return this.typeIDValue;
+  }
+  
+  void write(RecordOutput rout, String tag) throws IOException {
+    rout.writeByte(typeVal, tag);
+    typeIDKey.write(rout, tag);
+    typeIDValue.write(rout, tag);
+  }
+  
+  /**
+   * Two map  typeIDs are equal if their constituent elements have the 
+   * same type
+   */
+  public boolean equals(Object o) {
+    if (this == o) 
+      return true;
+    if (!(o instanceof MapTypeID))
+      return false;
+    MapTypeID mti = (MapTypeID) o;
+    if (!this.typeIDKey.equals(mti.typeIDKey))
+      return false;
+    return this.typeIDValue.equals(mti.typeIDValue);
+  }
+  
+  /**
+   * We use a basic hashcode implementation, since this class will likely not
+   * be used as a hashmap key 
+   */
+  public int hashCode() {
+    return 37*17+typeIDKey.hashCode() + 37*17+typeIDValue.hashCode();
+  }
+  
+}
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.meta;
+
+import java.io.IOException;
+import java.util.*;
+
+import org.apache.hadoop.record.RecordOutput;
+
+/** 
+ * Represents typeID for a Map 
+ */
+public class MapTypeID extends TypeID {
+  
+  private TypeID typeIDKey; 
+  private TypeID typeIDValue; 
+  
+  public MapTypeID(TypeID typeIDKey, TypeID typeIDValue) {
+    super(RIOType.MAP);
+    this.typeIDKey = typeIDKey;
+    this.typeIDValue = typeIDValue;
+  }
+  
+  /**
+   * get the TypeID of the map's key element
+   */
+  public TypeID getKeyTypeID() {
+    return this.typeIDKey;
+  }
+  
+  /**
+   * get the TypeID of the map's value element
+   */
+  public TypeID getValueTypeID() {
+    return this.typeIDValue;
+  }
+  
+  void write(RecordOutput rout, String tag) throws IOException {
+    rout.writeByte(typeVal, tag);
+    typeIDKey.write(rout, tag);
+    typeIDValue.write(rout, tag);
+  }
+  
+  /**
+   * Two map  typeIDs are equal if their constituent elements have the 
+   * same type
+   */
+  public boolean equals(Object o) {
+    if (this == o) 
+      return true;
+    if (!(o instanceof MapTypeID))
+      return false;
+    MapTypeID mti = (MapTypeID) o;
+    if (!this.typeIDKey.equals(mti.typeIDKey))
+      return false;
+    return this.typeIDValue.equals(mti.typeIDValue);
+  }
+  
+  /**
+   * We use a basic hashcode implementation, since this class will likely not
+   * be used as a hashmap key 
+   */
+  public int hashCode() {
+    return 37*17+typeIDKey.hashCode() + 37*17+typeIDValue.hashCode();
+  }
+  
+}

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/meta/RecordTypeInfo.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/meta/RecordTypeInfo.java?rev=612739&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/meta/RecordTypeInfo.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/meta/RecordTypeInfo.java Wed Jan 16 23:02:07 2008
@@ -0,0 +1,302 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.meta;
+
+import java.io.IOException;
+import java.util.*;
+
+import org.apache.hadoop.record.RecordInput;
+import org.apache.hadoop.record.RecordOutput;
+
+
+/** 
+ * A record's Type Information object which can read/write itself. 
+ * 
+ * Type information for a record comprises metadata about the record, 
+ * as well as a collection of type information for each field in the record. 
+ */
+public class RecordTypeInfo extends org.apache.hadoop.record.Record 
+{
+
+  private String name;
+  // A RecordTypeInfo is really just a wrapper around StructTypeID
+  StructTypeID sTid;
+   // A RecordTypeInfo object is just a collection of TypeInfo objects for each of its fields.  
+  //private ArrayList<FieldTypeInfo> typeInfos = new ArrayList<FieldTypeInfo>();
+  // we keep a hashmap of struct/record names and their type information, as we need it to 
+  // set filters when reading nested structs. This map is used during deserialization.
+  //private Map<String, RecordTypeInfo> structRTIs = new HashMap<String, RecordTypeInfo>();
+
+  /**
+   * Create an empty RecordTypeInfo object.
+   */
+  public RecordTypeInfo() {
+    sTid = new StructTypeID();
+  }
+
+  /**
+   * Create a RecordTypeInfo object representing a record with the given name
+   * @param name Name of the record
+   */
+  public RecordTypeInfo(String name) {
+    this.name = name;
+    sTid = new StructTypeID();
+  }
+
+  /*
+   * private constructor
+   */
+  private RecordTypeInfo(String name, StructTypeID stid) {
+    this.sTid = stid;
+    this.name = name;
+  }
+  
+  /**
+   * return the name of the record
+   */
+  public String getName() {
+    return name;
+  }
+
+  /**
+   * set the name of the record
+   */
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  /**
+   * Add a field. 
+   * @param fieldName Name of the field
+   * @param tid Type ID of the field
+   */
+  public void addField(String fieldName, TypeID tid) {
+    sTid.getFieldTypeInfos().add(new FieldTypeInfo(fieldName, tid));
+  }
+  
+  private void addAll(Collection<FieldTypeInfo> tis) {
+    sTid.getFieldTypeInfos().addAll(tis);
+  }
+
+  /**
+   * Return a collection of field type infos
+   */
+  public Collection<FieldTypeInfo> getFieldTypeInfos() {
+    return sTid.getFieldTypeInfos();
+  }
+  
+  /**
+   * Return the type info of a nested record. We only consider nesting 
+   * to one level. 
+   * @param name Name of the nested record
+   */
+  public RecordTypeInfo getNestedStructTypeInfo(String name) {
+    StructTypeID stid = sTid.findStruct(name);
+    if (null == stid) return null;
+    return new RecordTypeInfo(name, stid);
+  }
+
+  /**
+   * Serialize the type information for a record
+   */
+  public void serialize(RecordOutput rout, String tag) throws IOException {
+    // write out any header, version info, here
+    rout.startRecord(this, tag);
+    rout.writeString(name, tag);
+    sTid.writeRest(rout, tag);
+    rout.endRecord(this, tag);
+  }
+
+  /**
+   * Deserialize the type information for a record
+   */
+  public void deserialize(RecordInput rin, String tag) throws IOException {
+    // read in any header, version info 
+    rin.startRecord(tag);
+    // name
+    this.name = rin.readString(tag);
+    sTid.read(rin, tag);
+    rin.endRecord(tag);
+  }
+  
+  /**
+   * This class doesn't implement Comparable as it's not meant to be used 
+   * for anything besides de/serializing.
+   * So we always throw an exception.
+   * Not implemented. Always returns 0 if another RecordTypeInfo is passed in. 
+   */
+  public int compareTo (final Object peer_) throws ClassCastException {
+    if (!(peer_ instanceof RecordTypeInfo)) {
+      throw new ClassCastException("Comparing different types of records.");
+    }
+    throw new UnsupportedOperationException("compareTo() is not supported");
+  }
+}
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record.meta;
+
+import java.io.IOException;
+import java.util.*;
+
+import org.apache.hadoop.record.RecordInput;
+import org.apache.hadoop.record.RecordOutput;
+
+
+/** 
+ * A record's Type Information object which can read/write itself. 
+ * 
+ * Type information for a record comprises metadata about the record, 
+ * as well as a collection of type information for each field in the record. 
+ */
+public class RecordTypeInfo extends org.apache.hadoop.record.Record 
+{
+
+  private String name;
+  // A RecordTypeInfo is really just a wrapper around StructTypeID
+  StructTypeID sTid;
+   // A RecordTypeInfo object is just a collection of TypeInfo objects for each of its fields.  
+  //private ArrayList<FieldTypeInfo> typeInfos = new ArrayList<FieldTypeInfo>();
+  // we keep a hashmap of struct/record names and their type information, as we need it to 
+  // set filters when reading nested structs. This map is used during deserialization.
+  //private Map<String, RecordTypeInfo> structRTIs = new HashMap<String, RecordTypeInfo>();
+
+  /**
+   * Create an empty RecordTypeInfo object.
+   */
+  public RecordTypeInfo() {
+    sTid = new StructTypeID();
+  }
+
+  /**
+   * Create a RecordTypeInfo object representing a record with the given name
+   * @param name Name of the record
+   */
+  public RecordTypeInfo(String name) {
+    this.name = name;
+    sTid = new StructTypeID();
+  }
+
+  /*
+   * private constructor
+   */
+  private RecordTypeInfo(String name, StructTypeID stid) {
+    this.sTid = stid;
+    this.name = name;
+  }
+  
+  /**
+   * return the name of the record
+   */
+  public String getName() {
+    return name;
+  }
+
+  /**
+   * set the name of the record
+   */
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  /**
+   * Add a field. 
+   * @param fieldName Name of the field
+   * @param tid Type ID of the field
+   */
+  public void addField(String fieldName, TypeID tid) {
+    sTid.getFieldTypeInfos().add(new FieldTypeInfo(fieldName, tid));
+  }
+  
+  private void addAll(Collection<FieldTypeInfo> tis) {
+    sTid.getFieldTypeInfos().addAll(tis);
+  }
+
+  /**
+   * Return a collection of field type infos
+   */
+  public Collection<FieldTypeInfo> getFieldTypeInfos() {
+    return sTid.getFieldTypeInfos();
+  }
+  
+  /**
+   * Return the type info of a nested record. We only consider nesting 
+   * to one level. 
+   * @param name Name of the nested record
+   */
+  public RecordTypeInfo getNestedStructTypeInfo(String name) {
+    StructTypeID stid = sTid.findStruct(name);
+    if (null == stid) return null;
+    return new RecordTypeInfo(name, stid);
+  }
+
+  /**
+   * Serialize the type information for a record
+   */
+  public void serialize(RecordOutput rout, String tag) throws IOException {
+    // write out any header, version info, here
+    rout.startRecord(this, tag);
+    rout.writeString(name, tag);
+    sTid.writeRest(rout, tag);
+    rout.endRecord(this, tag);
+  }
+
+  /**
+   * Deserialize the type information for a record
+   */
+  public void deserialize(RecordInput rin, String tag) throws IOException {
+    // read in any header, version info 
+    rin.startRecord(tag);
+    // name
+    this.name = rin.readString(tag);
+    sTid.read(rin, tag);
+    rin.endRecord(tag);
+  }
+  
+  /**
+   * This class doesn't implement Comparable as it's not meant to be used 
+   * for anything besides de/serializing.
+   * So we always throw an exception.
+   * Not implemented. Always returns 0 if another RecordTypeInfo is passed in. 
+   */
+  public int compareTo (final Object peer_) throws ClassCastException {
+    if (!(peer_ instanceof RecordTypeInfo)) {
+      throw new ClassCastException("Comparing different types of records.");
+    }
+    throw new UnsupportedOperationException("compareTo() is not supported");
+  }
+}
+



Mime
View raw message