hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From d...@apache.org
Subject svn commit: r612765 - in /lucene/hadoop/trunk: ./ src/c++/librecordio/ src/c++/librecordio/test/ src/java/org/apache/hadoop/record/compiler/ src/test/ddl/
Date Thu, 17 Jan 2008 08:55:50 GMT
Author: ddas
Date: Thu Jan 17 00:55:47 2008
New Revision: 612765

URL: http://svn.apache.org/viewvc?rev=612765&view=rev
Log:
HADOOP-1883. Reverting due to a bad commit.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/c++/librecordio/Makefile
    lucene/hadoop/trunk/src/c++/librecordio/recordio.cc
    lucene/hadoop/trunk/src/c++/librecordio/recordio.hh
    lucene/hadoop/trunk/src/c++/librecordio/test/test.cc
    lucene/hadoop/trunk/src/c++/librecordio/test/test.jr
    lucene/hadoop/trunk/src/c++/librecordio/test/testFromJava.cc
    lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/CppGenerator.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBoolean.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBuffer.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JByte.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JCompType.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JDouble.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JInt.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JLong.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JVector.java
    lucene/hadoop/trunk/src/test/ddl/test.jr

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Thu Jan 17 00:55:47 2008
@@ -91,8 +91,6 @@
     HADOOP-2532.  Add to MapFile a getClosest method that returns the key
     that comes just before if the key is not present.  (stack via tomwhite)
 
-    HADOOP-1883. Add versioning for Record I/O. (Vivek Ratan via ddas) 
-
   IMPROVEMENTS
 
     HADOOP-2045.  Change committer list on website to a table, so that

Modified: lucene/hadoop/trunk/src/c++/librecordio/Makefile
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/c%2B%2B/librecordio/Makefile?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/c++/librecordio/Makefile (original)
+++ lucene/hadoop/trunk/src/c++/librecordio/Makefile Thu Jan 17 00:55:47 2008
@@ -18,17 +18,15 @@
 
 all: librecordio.a test
 
-librecordio.a: recordio.o filestream.o binarchive.o csvarchive.o xmlarchive.o \
-	exception.o typeIDs.o fieldTypeInfo.o recordTypeInfo.o utils.o
-	ar cru librecordio.a recordio.o filestream.o binarchive.o csvarchive.o xmlarchive.o \
-	exception.o typeIDs.o fieldTypeInfo.o recordTypeInfo.o utils.o
+librecordio.a: recordio.o filestream.o binarchive.o csvarchive.o xmlarchive.o exception.o
+	ar cru librecordio.a recordio.o filestream.o binarchive.o csvarchive.o xmlarchive.o exception.o
 
 recordio.o: recordio.cc recordio.hh archive.hh
 	g++ ${COPTS} -c -I${XERCESCROOT}/include -o recordio.o recordio.cc
-
+	
 filestream.o: filestream.cc recordio.hh filestream.hh
 	g++ ${COPTS} -c -o filestream.o filestream.cc
-
+	
 binarchive.o: binarchive.cc recordio.hh binarchive.hh archive.hh
 	g++ ${COPTS} -c -o binarchive.o binarchive.cc
 
@@ -37,33 +35,20 @@
 
 xmlarchive.o: xmlarchive.cc recordio.hh xmlarchive.hh archive.hh
 	g++ ${COPTS} -c -I${XERCESCROOT}/include -o xmlarchive.o xmlarchive.cc
-
+		
 exception.o: exception.cc exception.hh
 	g++ ${COPTS} -c -o exception.o exception.cc
-
-typeIDs.o: typeIDs.cc typeIDs.hh
-	g++ ${COPTS} -c -o typeIDs.o typeIDs.cc
-fieldTypeInfo.o: fieldTypeInfo.cc fieldTypeInfo.hh
-	g++ ${COPTS} -c -o fieldTypeInfo.o fieldTypeInfo.cc
-recordTypeInfo.o: recordTypeInfo.cc recordTypeInfo.hh
-	g++ ${COPTS} -c -o recordTypeInfo.o recordTypeInfo.cc
-utils.o: utils.cc utils.hh
-	g++ ${COPTS} -c -o utils.o utils.cc
-
+	
 recordio.cc: recordio.hh archive.hh exception.hh
 filestream.cc: recordio.hh filestream.hh 
 binarchive.cc: recordio.hh binarchive.hh 
 csvarchive.cc: recordio.hh csvarchive.hh 
 xmlarchive.cc: recordio.hh xmlarchive.hh 
 exception.cc: exception.hh 
-typeIDs.cc: typeIDs.hh
-fieldTypeInfo.cc: fieldTypeInfo.hh
-recordTypeInfo.cc: recordTypeInfo.hh
-utils.cc: utils.hh
 
 test: librecordio.a
 	make -C test all
-
+	
 clean:
 	rm -f *~ *.o *.a
 	make -C test clean

Modified: lucene/hadoop/trunk/src/c++/librecordio/recordio.cc
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/c%2B%2B/librecordio/recordio.cc?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/c++/librecordio/recordio.cc (original)
+++ lucene/hadoop/trunk/src/c++/librecordio/recordio.cc Thu Jan 17 00:55:47 2008
@@ -68,7 +68,7 @@
   delete mpArchive;
 }
 
-void hadoop::RecordWriter::write(const Record& record)
+void hadoop::RecordWriter::write(Record& record)
 {
   record.serialize(*mpArchive, (const char*) NULL);
 }

Modified: lucene/hadoop/trunk/src/c++/librecordio/recordio.hh
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/c%2B%2B/librecordio/recordio.hh?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/c++/librecordio/recordio.hh (original)
+++ lucene/hadoop/trunk/src/c++/librecordio/recordio.hh Thu Jan 17 00:55:47 2008
@@ -70,7 +70,7 @@
   OArchive* mpArchive;
 public:
   RecordWriter(OutStream& stream, RecFormat f);
-  virtual void write(const hadoop::Record& record);
+  virtual void write(hadoop::Record& record);
   virtual ~RecordWriter();
 };
 }; // end namspace hadoop

Modified: lucene/hadoop/trunk/src/c++/librecordio/test/test.cc
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/c%2B%2B/librecordio/test/test.cc?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/c++/librecordio/test/test.cc (original)
+++ lucene/hadoop/trunk/src/c++/librecordio/test/test.cc Thu Jan 17 00:55:47 2008
@@ -17,12 +17,11 @@
  */
 
 #include "test.hh"
-#include <vector>
 
 int main()
 {
-  org::apache::hadoop::record::test::RecRecord1 r1;
-  org::apache::hadoop::record::test::RecRecord1 r2;
+  org::apache::hadoop::record::test::TestRecord1 r1;
+  org::apache::hadoop::record::test::TestRecord1 r2;
   {
     hadoop::FileOutStream ostream;
     ostream.open("/tmp/hadooptmp.dat", true);
@@ -98,212 +97,6 @@
     }
     istream.close();
   }
-  
-  /* 
-   * Tests to check for versioning functionality
-   */
-  
-  // basic test
-  // write out a record and its type info, read it back using its typeinfo
-  {
-    hadoop::FileOutStream ostream, ortistream;
-    ostream.open("/tmp/hadooptmp.dat", true);
-    ortistream.open("/tmp/hadooprti.dat", true);
-    hadoop::RecordWriter writer(ostream, hadoop::kBinary);
-    hadoop::RecordWriter writerRti(ortistream, hadoop::kBinary);
-    r1.setBoolVal(true);
-    r1.setByteVal((int8_t)0x66);
-    r1.setFloatVal(3.145);
-    r1.setDoubleVal(1.5234);
-    r1.setIntVal(4567);
-    r1.setLongVal(0x5a5a5a5a5a5aLL);
-    std::string& s = r1.getStringVal();
-    s = "random text";
-    writer.write(r1);
-    ostream.close();
-    // write out rti info
-    writerRti.write(org::apache::hadoop::record::test::RecRecord1::getTypeInfo());
-    ortistream.close();
-
-    // read
-    hadoop::FileInStream istream;
-    istream.open("/tmp/hadooptmp.dat");
-    hadoop::RecordReader reader(istream, hadoop::kBinary);
-    hadoop::FileInStream irtistream;
-    irtistream.open("/tmp/hadooprti.dat");
-    hadoop::RecordReader readerRti(irtistream, hadoop::kBinary);
-    hadoop::RecordTypeInfo rti;
-    readerRti.read(rti);
-    irtistream.close();
-    org::apache::hadoop::record::test::RecRecord1::setTypeFilter(rti);
-    reader.read(r2);
-    if (r1 == r2) {
-      printf("Basic versioning test passed.\n");
-    } else {
-      printf("Basic versioning test failed.\n");
-    }
-    istream.close();
-  }     
-  
-  // versioning:write out a record and its type info, read back a similar record using the written record's typeinfo
-  {
-    hadoop::FileOutStream ostream, ortistream;
-    ostream.open("/tmp/hadooptmp.dat", true);
-    ortistream.open("/tmp/hadooprti.dat", true);
-    hadoop::RecordWriter writer(ostream, hadoop::kBinary);
-    hadoop::RecordWriter writerRti(ortistream, hadoop::kBinary);
-
-    // we create an array of records to write
-    std::vector<org::apache::hadoop::record::test::RecRecordOld*> recsWrite;
-    int i, j, k, l;
-    char buf[1000];
-    for (i=0; i<5; i++) {
-      org::apache::hadoop::record::test::RecRecordOld* ps1Rec = 
-        new org::apache::hadoop::record::test::RecRecordOld();
-      sprintf(buf, "This is record s1: %d", i);
-      ps1Rec->getName().assign(buf);
-
-      for (j=0; j<3; j++) {
-        ps1Rec->getIvec().push_back((int64_t)(i+j));
-      }
-
-      for (j=0; j<2; j++) {
-        std::vector<org::apache::hadoop::record::test::RecRecord0>* pVec = 
-          new std::vector<org::apache::hadoop::record::test::RecRecord0>();
-        for (k=0; k<3; k++) {
-          org::apache::hadoop::record::test::RecRecord0 *psRec = 
-            new org::apache::hadoop::record::test::RecRecord0();
-          sprintf(buf, "This is record s: (%d: %d)", j, k);
-          psRec->getStringVal().assign(buf);
-        }
-        ps1Rec->getSvec().push_back(*pVec);
-      }
-
-      sprintf(buf, "This is record s: %d", i);
-      ps1Rec->getInner().getStringVal().assign(buf);
-
-      for (l=0; l<2; l++) {
-        std::vector<std::vector<std::string> >* ppVec =
-          new std::vector<std::vector<std::string> >();
-        for (j=0; j<2; j++) {
-          std::vector< std::string >* pVec =
-            new std::vector< std::string >();
-          for (k=0; k<3; k++) {
-            sprintf(buf, "THis is a nested string: (%d: %d: %d)", l, j, k);
-            std::string* s = new std::string((const char*)buf);
-            pVec->push_back(*s);
-          }
-        }
-        ps1Rec->getStrvec().push_back(*ppVec);
-      }
-
-      ps1Rec->setI1(100+i);
-
-      ps1Rec->getMap1()[23] = "23";
-      ps1Rec->getMap1()[11] = "11";
-
-      std::map<int32_t, int64_t>* m1 = new std::map<int32_t, int64_t>();
-      std::map<int32_t, int64_t>* m2 = new std::map<int32_t, int64_t>();
-      (*m1)[5] = 5;
-      (*m1)[10] = 10;
-      (*m2)[15] = 15;
-      (*m2)[20] = 20;
-      ps1Rec->getMvec1().push_back(*m1);
-      ps1Rec->getMvec1().push_back(*m2);
-      ps1Rec->getMvec2().push_back(*m1);
-
-      recsWrite.push_back(ps1Rec);
-    }
-
-    // write out to file
-    for (unsigned int i=0; i<recsWrite.size(); i++) {
-      writer.write(*(recsWrite[i]));
-    }
-    ostream.close();
-    // write out rti info
-    writerRti.write(org::apache::hadoop::record::test::RecRecordOld::getTypeInfo());
-    ortistream.close();
-
-    // read
-    hadoop::FileInStream istream;
-    istream.open("/tmp/hadooptmp.dat");
-    hadoop::RecordReader reader(istream, hadoop::kBinary);
-    hadoop::FileInStream irtistream;
-    irtistream.open("/tmp/hadooprti.dat");
-    hadoop::RecordReader readerRti(irtistream, hadoop::kBinary);
-    hadoop::RecordTypeInfo rti;
-    readerRti.read(rti);
-    irtistream.close();
-    org::apache::hadoop::record::test::RecRecordNew::setTypeFilter(rti);
-    
-    // read records
-    std::vector<org::apache::hadoop::record::test::RecRecordNew*> recsRead;
-    for (unsigned int i=0; i<recsWrite.size(); i++) {
-      org::apache::hadoop::record::test::RecRecordNew* ps2Rec = 
-        new org::apache::hadoop::record::test::RecRecordNew();
-      reader.read(*ps2Rec);
-      recsRead.push_back(ps2Rec);
-    }
-    istream.close();
-
-    // compare
-    bool pass = true;
-    for (unsigned int i=0; i<recsRead.size(); i++) {
-      org::apache::hadoop::record::test::RecRecordNew* ps2In = recsRead[i];
-      org::apache::hadoop::record::test::RecRecordOld* ps1Out = recsWrite[i];
-
-      if (!ps2In->getName2().empty()) {
-        printf("Error in s2: name2\n");
-        pass = false;
-      }
-
-      if (!(ps2In->getInner() == ps1Out->getInner())) {
-        printf("error in s2: s1 struct\n");
-        pass = false;
-      }
-
-      if (0 != ps2In->getIvec().size()) {
-        printf("error in s2: ivec\n");
-        pass = false;
-      }
-
-      if (0 != ps2In->getSvec().size()) {
-        printf("error in s2: svec\n");
-        pass = false;
-      }
-
-      for (unsigned int j=0; j<ps2In->getStrvec().size(); j++) {
-        ::std::vector< ::std::vector< ::std::string > >& ss2Vec = ps2In->getStrvec()[j];
-        ::std::vector< ::std::vector< ::std::string > >& ss1Vec = ps1Out->getStrvec()[j];
-        for (unsigned int k=0; k<ss2Vec.size(); k++) {
-          ::std::vector< ::std::string >& s2Vec = ss2Vec[k];
-          ::std::vector< ::std::string >& s1Vec = ss1Vec[k];
-          for (unsigned int l=0; l<s2Vec.size(); l++) {
-            if (s2Vec[l] != s1Vec[l]) {
-              printf("Error in s2: s2Vec\n");
-              pass = false;
-            }
-          }
-        }
-      }
-
-      if (0 != ps2In->getMap1().size()) {
-        printf("Error in s2: map1\n");
-        pass = false;
-      }
-
-      for (unsigned int j=0; j<ps2In->getMvec2().size(); j++) {
-        if (ps2In->getMvec2()[j] != ps1Out->getMvec2()[j]) {
-          printf("Error in s2: mvec2\n");
-          pass = false;
-        }
-      }
-    }
-  
-    if (pass)   
-      printf("Versioning test passed.\n");
-  }     
-   
   return 0;
 }
 

Modified: lucene/hadoop/trunk/src/c++/librecordio/test/test.jr
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/c%2B%2B/librecordio/test/test.jr?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/c++/librecordio/test/test.jr (original)
+++ lucene/hadoop/trunk/src/c++/librecordio/test/test.jr Thu Jan 17 00:55:47 2008
@@ -17,11 +17,11 @@
  */
 
 module org.apache.hadoop.record.test {
-    class RecRecord0 {
+    class TestRecord0 {
         ustring     StringVal;
     }
 
-    class RecRecord1 {
+    class TestRecord1 {
         boolean         BoolVal;
         byte            ByteVal;
         int             IntVal;
@@ -33,31 +33,5 @@
         vector<ustring> VectorVal;
         map<ustring, ustring>   MapVal;
     }
-    
-  class RecRecordOld {
-  	ustring name;
-  	vector<long> ivec;
-  	vector<vector<RecRecord0>> svec;
-  	RecRecord0 inner;
-  	vector<vector<vector<ustring>>> strvec;
-  	float i1;
-  	map<byte, ustring> map1;
-  	vector<map<int, long>> mvec1;
-  	vector<map<int, long>> mvec2;
-  }
-  
-  /* RecRecordNew is a lot like RecRecordOld. Helps test for versioning. */
-  class RecRecordNew {
-  	ustring name2;
-  	RecRecord0 inner;
-  	vector<int> ivec;
-  	vector<vector<int>> svec;
-  	vector<vector<vector<ustring>>> strvec;
-		int i1;  	
-		map<long, ustring> map1;
-  	vector<map<int, long>> mvec2;	
-  }
-  
-    
 }
 

Modified: lucene/hadoop/trunk/src/c++/librecordio/test/testFromJava.cc
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/c%2B%2B/librecordio/test/testFromJava.cc?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/c++/librecordio/test/testFromJava.cc (original)
+++ lucene/hadoop/trunk/src/c++/librecordio/test/testFromJava.cc Thu Jan 17 00:55:47 2008
@@ -20,8 +20,8 @@
 
 int main()
 {
-  org::apache::hadoop::record::test::RecRecord1 r1;
-  org::apache::hadoop::record::test::RecRecord1 r2;
+  org::apache::hadoop::record::test::TestRecord1 r1;
+  org::apache::hadoop::record::test::TestRecord1 r2;
   r1.setBoolVal(true);
   r1.setByteVal((int8_t)0x66);
   r1.setFloatVal(3.145);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/CppGenerator.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/CppGenerator.java?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/CppGenerator.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/CppGenerator.java Thu Jan 17 00:55:47 2008
@@ -48,13 +48,11 @@
     hh.write("#ifndef __"+fileName.toUpperCase().replace('.','_')+"__\n");
     hh.write("#define __"+fileName.toUpperCase().replace('.','_')+"__\n");
     hh.write("#include \"recordio.hh\"\n");
-    hh.write("#include \"recordTypeInfo.hh\"\n");
     for (Iterator<JFile> iter = ilist.iterator(); iter.hasNext();) {
       hh.write("#include \""+iter.next().getName()+".hh\"\n");
     }
     
     cc.write("#include \""+fileName+".hh\"\n");
-    cc.write("#include \"utils.hh\"\n");
     
     for (Iterator<JRecord> iter = rlist.iterator(); iter.hasNext();) {
       iter.next().genCppCode(hh, cc, options);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBoolean.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBoolean.java?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBoolean.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBoolean.java Thu Jan 17 00:55:47 2008
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.record.compiler;
 
+import org.apache.hadoop.record.compiler.JType.CType;
+import org.apache.hadoop.record.compiler.JType.CppType;
 
 /**
  */
@@ -26,20 +28,15 @@
   class JavaBoolean extends JType.JavaType {
     
     JavaBoolean() {
-      super("boolean", "Bool", "Boolean", "TypeID.RIOType.BOOL");
+      super("boolean", "Bool", "Boolean");
     }
     
     void genCompareTo(CodeBuffer cb, String fname, String other) {
-      cb.append(Consts.RIO_PREFIX + "ret = ("+fname+" == "+other+")? 0 : ("+
-          fname+"?1:-1);\n");
+      cb.append("ret = ("+fname+" == "+other+")? 0 : ("+fname+"?1:-1);\n");
     }
     
-    String getTypeIDObjectString() {
-      return "org.apache.hadoop.record.meta.TypeID.BoolTypeID";
-    }
-
     void genHashCode(CodeBuffer cb, String fname) {
-      cb.append(Consts.RIO_PREFIX + "ret = ("+fname+")?0:1;\n");
+      cb.append("ret = ("+fname+")?0:1;\n");
     }
     
     // In Binary format, boolean is written as byte. true = 1, false = 0
@@ -68,21 +65,10 @@
     }
   }
   
-  class CppBoolean extends CppType {
-    
-    CppBoolean() {
-      super("bool");
-    }
-    
-    String getTypeIDObjectString() {
-      return "new ::hadoop::TypeID(::hadoop::RIOTYPE_BOOL)";
-    }
-  }
-
   /** Creates a new instance of JBoolean */
   public JBoolean() {
     setJavaType(new JavaBoolean());
-    setCppType(new CppBoolean());
+    setCppType(new CppType("bool"));
     setCType(new CType());
   }
   

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBuffer.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBuffer.java?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBuffer.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBuffer.java Thu Jan 17 00:55:47 2008
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.record.compiler;
 
+import org.apache.hadoop.record.compiler.JCompType.CCompType;
 
 /**
  * Code generator for "buffer" type.
@@ -27,24 +28,19 @@
   class JavaBuffer extends JavaCompType {
     
     JavaBuffer() {
-      super("org.apache.hadoop.record.Buffer", "Buffer", 
-          "org.apache.hadoop.record.Buffer", "TypeID.RIOType.BUFFER");
+      super("org.apache.hadoop.record.Buffer", "Buffer", "org.apache.hadoop.record.Buffer");
     }
     
-    String getTypeIDObjectString() {
-      return "org.apache.hadoop.record.meta.TypeID.BufferTypeID";
-    }
-
     void genCompareTo(CodeBuffer cb, String fname, String other) {
-      cb.append(Consts.RIO_PREFIX + "ret = "+fname+".compareTo("+other+");\n");
+      cb.append("ret = "+fname+".compareTo("+other+");\n");
     }
     
     void genEquals(CodeBuffer cb, String fname, String peer) {
-      cb.append(Consts.RIO_PREFIX + "ret = "+fname+".equals("+peer+");\n");
+      cb.append("ret = "+fname+".equals("+peer+");\n");
     }
     
     void genHashCode(CodeBuffer cb, String fname) {
-      cb.append(Consts.RIO_PREFIX + "ret = "+fname+".hashCode();\n");
+      cb.append("ret = "+fname+".hashCode();\n");
     }
     
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
@@ -84,11 +80,6 @@
       cb.append("return "+fname+";\n");
       cb.append("}\n");
     }
-    
-    String getTypeIDObjectString() {
-      return "new ::hadoop::TypeID(::hadoop::RIOTYPE_BUFFER)";
-    }
-
   }
   /** Creates a new instance of JBuffer */
   public JBuffer() {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JByte.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JByte.java?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JByte.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JByte.java Thu Jan 17 00:55:47 2008
@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.record.compiler;
 
+import org.apache.hadoop.record.compiler.JType.CType;
+import org.apache.hadoop.record.compiler.JType.CppType;
+
 /**
  * Code generator for "byte" type.
  */
@@ -26,13 +29,9 @@
   class JavaByte extends JavaType {
     
     JavaByte() {
-      super("byte", "Byte", "Byte", "TypeID.RIOType.BYTE");
+      super("byte", "Byte", "Byte");
     }
     
-    String getTypeIDObjectString() {
-      return "org.apache.hadoop.record.meta.TypeID.ByteTypeID";
-    }
-
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
       cb.append("{\n");
       cb.append("if ("+l+"<1) {\n");
@@ -57,20 +56,9 @@
     }
   }
   
-  class CppByte extends CppType {
-    
-    CppByte() {
-      super("int8_t");
-    }
-    
-    String getTypeIDObjectString() {
-      return "new ::hadoop::TypeID(::hadoop::RIOTYPE_BYTE)";
-    }
-  }
-
   public JByte() {
     setJavaType(new JavaByte());
-    setCppType(new CppByte());
+    setCppType(new CppType("int8_t"));
     setCType(new CType());
   }
   

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JCompType.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JCompType.java?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JCompType.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JCompType.java Thu Jan 17 00:55:47 2008
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.record.compiler;
 
-
 /**
  * Abstract base class for all the "compound" types such as ustring,
  * buffer, vector, map, and record.
@@ -27,30 +26,28 @@
   
   abstract class JavaCompType extends JavaType {
     
-    JavaCompType(String type, String suffix, String wrapper, 
-        String typeIDByteString) { 
-      super(type, suffix, wrapper, typeIDByteString);
+    JavaCompType(String type, String suffix, String wrapper) {
+      super(type, suffix, wrapper);
     }
     
     void genCompareTo(CodeBuffer cb, String fname, String other) {
-      cb.append(Consts.RIO_PREFIX + "ret = "+fname+".compareTo("+other+");\n");
+      cb.append("ret = "+fname+".compareTo("+other+");\n");
     }
     
     void genEquals(CodeBuffer cb, String fname, String peer) {
-      cb.append(Consts.RIO_PREFIX + "ret = "+fname+".equals("+peer+");\n");
+      cb.append("ret = "+fname+".equals("+peer+");\n");
     }
     
     void genHashCode(CodeBuffer cb, String fname) {
-      cb.append(Consts.RIO_PREFIX + "ret = "+fname+".hashCode();\n");
+      cb.append("ret = "+fname+".hashCode();\n");
     }
     
     void genClone(CodeBuffer cb, String fname) {
-      cb.append(Consts.RIO_PREFIX + "other."+fname+" = ("+getType()+") this."+
-          fname+".clone();\n");
+      cb.append("other."+fname+" = ("+getType()+") this."+fname+".clone();\n");
     }
   }
   
-  abstract class CppCompType extends CppType {
+  class CppCompType extends CppType {
     
     CppCompType(String type) {
       super(type);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JDouble.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JDouble.java?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JDouble.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JDouble.java Thu Jan 17 00:55:47 2008
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.record.compiler;
 
+import org.apache.hadoop.record.compiler.JType.CType;
+import org.apache.hadoop.record.compiler.JType.CppType;
 
 /**
  */
@@ -26,16 +28,12 @@
   class JavaDouble extends JavaType {
     
     JavaDouble() {
-      super("double", "Double", "Double", "TypeID.RIOType.DOUBLE");
+      super("double", "Double", "Double");
     }
     
-    String getTypeIDObjectString() {
-      return "org.apache.hadoop.record.meta.TypeID.DoubleTypeID";
-    }
-
     void genHashCode(CodeBuffer cb, String fname) {
       String tmp = "Double.doubleToLongBits("+fname+")";
-      cb.append(Consts.RIO_PREFIX + "ret = (int)("+tmp+"^("+tmp+">>>32));\n");
+      cb.append("ret = (int)("+tmp+"^("+tmp+">>>32));\n");
     }
     
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
@@ -63,23 +61,10 @@
       cb.append("}\n");
     }
   }
-
-  class CppDouble extends CppType {
-    
-    CppDouble() {
-      super("double");
-    }
-    
-    String getTypeIDObjectString() {
-      return "new ::hadoop::TypeID(::hadoop::RIOTYPE_DOUBLE)";
-    }
-  }
-
-  
   /** Creates a new instance of JDouble */
   public JDouble() {
     setJavaType(new JavaDouble());
-    setCppType(new CppDouble());
+    setCppType(new CppType("double"));
     setCType(new CType());
   }
   

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java Thu Jan 17 00:55:47 2008
@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.record.compiler;
 
+import org.apache.hadoop.record.compiler.JType.CType;
+import org.apache.hadoop.record.compiler.JType.CppType;
+
 /**
  */
 public class JFloat extends JType {
@@ -25,15 +28,11 @@
   class JavaFloat extends JavaType {
     
     JavaFloat() {
-      super("float", "Float", "Float", "TypeID.RIOType.FLOAT");
+      super("float", "Float", "Float");
     }
     
-    String getTypeIDObjectString() {
-      return "org.apache.hadoop.record.meta.TypeID.FloatTypeID";
-    }
-
     void genHashCode(CodeBuffer cb, String fname) {
-      cb.append(Consts.RIO_PREFIX + "ret = Float.floatToIntBits("+fname+");\n");
+      cb.append("ret = Float.floatToIntBits("+fname+");\n");
     }
     
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
@@ -61,22 +60,10 @@
       cb.append("}\n");
     }
   }
-
-  class CppFloat extends CppType {
-    
-    CppFloat() {
-      super("float");
-    }
-    
-    String getTypeIDObjectString() {
-      return "new ::hadoop::TypeID(::hadoop::RIOTYPE_FLOAT)";
-    }
-  }
-
   /** Creates a new instance of JFloat */
   public JFloat() {
     setJavaType(new JavaFloat());
-    setCppType(new CppFloat());
+    setCppType(new CppType("float"));
     setCType(new CType());
   }
   

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JInt.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JInt.java?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JInt.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JInt.java Thu Jan 17 00:55:47 2008
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.record.compiler;
 
+import org.apache.hadoop.record.compiler.JType.CType;
+import org.apache.hadoop.record.compiler.JType.CppType;
 
 /**
  * Code generator for "int" type
@@ -27,13 +29,9 @@
   class JavaInt extends JavaType {
     
     JavaInt() {
-      super("int", "Int", "Integer", "TypeID.RIOType.INT");
+      super("int", "Int", "Integer");
     }
     
-    String getTypeIDObjectString() {
-      return "org.apache.hadoop.record.meta.TypeID.IntTypeID";
-    }
-
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
       cb.append("{\n");
       cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n");
@@ -55,22 +53,10 @@
       cb.append("}\n");
     }
   }
-
-  class CppInt extends CppType {
-    
-    CppInt() {
-      super("int32_t");
-    }
-    
-    String getTypeIDObjectString() {
-      return "new ::hadoop::TypeID(::hadoop::RIOTYPE_INT)";
-    }
-  }
-
   /** Creates a new instance of JInt */
   public JInt() {
     setJavaType(new JavaInt());
-    setCppType(new CppInt());
+    setCppType(new CppType("int32_t"));
     setCType(new CType());
   }
   

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JLong.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JLong.java?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JLong.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JLong.java Thu Jan 17 00:55:47 2008
@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.record.compiler;
 
+import org.apache.hadoop.record.compiler.JType.CType;
+import org.apache.hadoop.record.compiler.JType.CppType;
+
 /**
  * Code generator for "long" type
  */
@@ -26,16 +29,11 @@
   class JavaLong extends JavaType {
     
     JavaLong() {
-      super("long", "Long", "Long", "TypeID.RIOType.LONG");
+      super("long", "Long", "Long");
     }
     
-    String getTypeIDObjectString() {
-      return "org.apache.hadoop.record.meta.TypeID.LongTypeID";
-    }
-
     void genHashCode(CodeBuffer cb, String fname) {
-      cb.append(Consts.RIO_PREFIX + "ret = (int) ("+fname+"^("+
-          fname+">>>32));\n");
+      cb.append("ret = (int) ("+fname+"^("+fname+">>>32));\n");
     }
     
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
@@ -59,22 +57,10 @@
       cb.append("}\n");
     }
   }
-
-  class CppLong extends CppType {
-    
-    CppLong() {
-      super("int64_t");
-    }
-    
-    String getTypeIDObjectString() {
-      return "new ::hadoop::TypeID(::hadoop::RIOTYPE_LONG)";
-    }
-  }
-
   /** Creates a new instance of JLong */
   public JLong() {
     setJavaType(new JavaLong());
-    setCppType(new CppLong());
+    setCppType(new CppType("int64_t"));
     setCType(new CType());
   }
   

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java Thu Jan 17 00:55:47 2008
@@ -18,9 +18,6 @@
 
 package org.apache.hadoop.record.compiler;
 
-import java.util.Map;
-
-
 /**
  */
 public class JMap extends JCompType {
@@ -46,48 +43,31 @@
     JavaMap(JType.JavaType key, JType.JavaType value) {
       super("java.util.TreeMap<"+key.getWrapperType()+","+value.getWrapperType()+">",
             "Map",
-            "java.util.TreeMap<"+key.getWrapperType()+","+value.getWrapperType()+">",
-            "TypeID.RIOType.MAP");
+            "java.util.TreeMap<"+key.getWrapperType()+","+value.getWrapperType()+">");
       this.key = key;
       this.value = value;
     }
     
-    String getTypeIDObjectString() {
-      return "new org.apache.hadoop.record.meta.MapTypeID(" + 
-        key.getTypeIDObjectString() + ", " + 
-        value.getTypeIDObjectString() + ")";
-    }
-
-    void genSetRTIFilter(CodeBuffer cb, Map<String, Integer> nestedStructMap) {
-      key.genSetRTIFilter(cb, nestedStructMap);
-      value.genSetRTIFilter(cb, nestedStructMap);
-    }
-
     void genCompareTo(CodeBuffer cb, String fname, String other) {
       String setType = "java.util.Set<"+key.getWrapperType()+"> ";
       String iterType = "java.util.Iterator<"+key.getWrapperType()+"> ";
       cb.append("{\n");
-      cb.append(setType+getId(Consts.RIO_PREFIX + "set1")+" = "+
-          fname+".keySet();\n");
-      cb.append(setType+getId(Consts.RIO_PREFIX + "set2")+" = "+
-          other+".keySet();\n");
-      cb.append(iterType+getId(Consts.RIO_PREFIX + "miter1")+" = "+
-                getId(Consts.RIO_PREFIX + "set1")+".iterator();\n");
-      cb.append(iterType+getId(Consts.RIO_PREFIX + "miter2")+" = "+
-                getId(Consts.RIO_PREFIX + "set2")+".iterator();\n");
-      cb.append("for(; "+getId(Consts.RIO_PREFIX + "miter1")+".hasNext() && "+
-                getId(Consts.RIO_PREFIX + "miter2")+".hasNext();) {\n");
-      cb.append(key.getType()+" "+getId(Consts.RIO_PREFIX + "k1")+
-                " = "+getId(Consts.RIO_PREFIX + "miter1")+".next();\n");
-      cb.append(key.getType()+" "+getId(Consts.RIO_PREFIX + "k2")+
-                " = "+getId(Consts.RIO_PREFIX + "miter2")+".next();\n");
-      key.genCompareTo(cb, getId(Consts.RIO_PREFIX + "k1"), 
-          getId(Consts.RIO_PREFIX + "k2"));
-      cb.append("if (" + Consts.RIO_PREFIX + "ret != 0) { return " + 
-          Consts.RIO_PREFIX + "ret; }\n");
+      cb.append(setType+getId("set1")+" = "+fname+".keySet();\n");
+      cb.append(setType+getId("set2")+" = "+other+".keySet();\n");
+      cb.append(iterType+getId("miter1")+" = "+
+                getId("set1")+".iterator();\n");
+      cb.append(iterType+getId("miter2")+" = "+
+                getId("set2")+".iterator();\n");
+      cb.append("for(; "+getId("miter1")+".hasNext() && "+
+                getId("miter2")+".hasNext();) {\n");
+      cb.append(key.getType()+" "+getId("k1")+
+                " = "+getId("miter1")+".next();\n");
+      cb.append(key.getType()+" "+getId("k2")+
+                " = "+getId("miter2")+".next();\n");
+      key.genCompareTo(cb, getId("k1"), getId("k2"));
+      cb.append("if (ret != 0) { return ret; }\n");
       cb.append("}\n");
-      cb.append(Consts.RIO_PREFIX + "ret = ("+getId(Consts.RIO_PREFIX + "set1")+
-          ".size() - "+getId(Consts.RIO_PREFIX + "set2")+".size());\n");
+      cb.append("ret = ("+getId("set1")+".size() - "+getId("set2")+".size());\n");
       cb.append("}\n");
     }
     
@@ -97,20 +77,14 @@
       }
       cb.append("{\n");
       incrLevel();
-      cb.append("org.apache.hadoop.record.Index " + 
-          getId(Consts.RIO_PREFIX + "midx")+" = " + 
-          Consts.RECORD_INPUT + ".startMap(\""+tag+"\");\n");
+      cb.append("org.apache.hadoop.record.Index "+getId("midx")+" = a.startMap(\""+tag+"\");\n");
       cb.append(fname+"=new "+getType()+"();\n");
-      cb.append("for (; !"+getId(Consts.RIO_PREFIX + "midx")+".done(); "+
-          getId(Consts.RIO_PREFIX + "midx")+".incr()) {\n");
-      key.genReadMethod(cb, getId(Consts.RIO_PREFIX + "k"),
-          getId(Consts.RIO_PREFIX + "k"), true);
-      value.genReadMethod(cb, getId(Consts.RIO_PREFIX + "v"), 
-          getId(Consts.RIO_PREFIX + "v"), true);
-      cb.append(fname+".put("+getId(Consts.RIO_PREFIX + "k")+","+
-          getId(Consts.RIO_PREFIX + "v")+");\n");
+      cb.append("for (; !"+getId("midx")+".done(); "+getId("midx")+".incr()) {\n");
+      key.genReadMethod(cb, getId("k"),getId("k"), true);
+      value.genReadMethod(cb, getId("v"), getId("v"), true);
+      cb.append(fname+".put("+getId("k")+","+getId("v")+");\n");
       cb.append("}\n");
-      cb.append(Consts.RECORD_INPUT + ".endMap(\""+tag+"\");\n");
+      cb.append("a.endMap(\""+tag+"\");\n");
       decrLevel();
       cb.append("}\n");
     }
@@ -124,24 +98,16 @@
         key.getWrapperType()+","+value.getWrapperType()+">> ";
       cb.append("{\n");
       incrLevel();
-      cb.append(Consts.RECORD_OUTPUT + ".startMap("+fname+",\""+tag+"\");\n");
-      cb.append(setType+getId(Consts.RIO_PREFIX + "es")+" = "+
-          fname+".entrySet();\n");
-      cb.append("for("+iterType+getId(Consts.RIO_PREFIX + "midx")+" = "+
-          getId(Consts.RIO_PREFIX + "es")+".iterator(); "+
-          getId(Consts.RIO_PREFIX + "midx")+".hasNext();) {\n");
-      cb.append(entryType+getId(Consts.RIO_PREFIX + "me")+" = "+
-          getId(Consts.RIO_PREFIX + "midx")+".next();\n");
-      cb.append(key.getType()+" "+getId(Consts.RIO_PREFIX + "k")+" = "+
-          getId(Consts.RIO_PREFIX + "me")+".getKey();\n");
-      cb.append(value.getType()+" "+getId(Consts.RIO_PREFIX + "v")+" = "+
-          getId(Consts.RIO_PREFIX + "me")+".getValue();\n");
-      key.genWriteMethod(cb, getId(Consts.RIO_PREFIX + "k"), 
-          getId(Consts.RIO_PREFIX + "k"));
-      value.genWriteMethod(cb, getId(Consts.RIO_PREFIX + "v"), 
-          getId(Consts.RIO_PREFIX + "v"));
+      cb.append("a.startMap("+fname+",\""+tag+"\");\n");
+      cb.append(setType+getId("es")+" = "+fname+".entrySet();\n");
+      cb.append("for("+iterType+getId("midx")+" = "+getId("es")+".iterator(); "+getId("midx")+".hasNext();) {\n");
+      cb.append(entryType+getId("me")+" = "+getId("midx")+".next();\n");
+      cb.append(key.getType()+" "+getId("k")+" = "+getId("me")+".getKey();\n");
+      cb.append(value.getType()+" "+getId("v")+" = "+getId("me")+".getValue();\n");
+      key.genWriteMethod(cb, getId("k"), getId("k"));
+      value.genWriteMethod(cb, getId("v"), getId("v"));
       cb.append("}\n");
-      cb.append(Consts.RECORD_OUTPUT + ".endMap("+fname+",\""+tag+"\");\n");
+      cb.append("a.endMap("+fname+",\""+tag+"\");\n");
       cb.append("}\n");
       decrLevel();
     }
@@ -190,34 +156,11 @@
     }
   }
   
-  class CppMap extends CppCompType {
-    
-    JType.CppType key;
-    JType.CppType value;
-    
-    CppMap(JType.CppType key, JType.CppType value) {
-      super("::std::map< "+key.getType()+", "+ value.getType()+" >");
-      this.key = key;
-      this.value = value;
-    }
-    
-    String getTypeIDObjectString() {
-      return "new ::hadoop::MapTypeID(" + 
-        key.getTypeIDObjectString() + ", " + 
-        value.getTypeIDObjectString() + ")";
-    }
-
-    void genSetRTIFilter(CodeBuffer cb) {
-      key.genSetRTIFilter(cb);
-      value.genSetRTIFilter(cb);
-    }
-
-  }
-  
   /** Creates a new instance of JMap */
   public JMap(JType t1, JType t2) {
     setJavaType(new JavaMap(t1.getJavaType(), t2.getJavaType()));
-    setCppType(new CppMap(t1.getCppType(), t2.getCppType()));
+    setCppType(new CppCompType(" ::std::map<"+t1.getCppType().getType()+","+
+                               t2.getCppType().getType()+">"));
     setCType(new CType());
     keyType = t1;
     valueType = t2;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java Thu Jan 17 00:55:47 2008
@@ -21,7 +21,8 @@
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Iterator;
 
 /**
  */
@@ -36,7 +37,7 @@
       new ArrayList<JField<JavaType>>();
     
     JavaRecord(String name, ArrayList<JField<JType>> flist) {
-      super(name, "Record", name, "TypeID.RIOType.STRUCT");
+      super(name, "Record", name);
       this.fullName = name;
       int idx = name.lastIndexOf('.');
       this.name = name.substring(idx+1);
@@ -47,91 +48,16 @@
       }
     }
     
-    String getTypeIDObjectString() {
-      return "new org.apache.hadoop.record.meta.StructTypeID(" + 
-      fullName + ".getTypeInfo())";
-    }
-
-    void genSetRTIFilter(CodeBuffer cb, Map<String, Integer> nestedStructMap) {
-      // ignore, if we'ev already set the type filter for this record
-      if (!nestedStructMap.containsKey(fullName)) {
-        // we set the RTI filter here
-        cb.append(fullName + ".setTypeFilter(rti.getNestedStructTypeInfo(\""+
-            name + "\"));\n");
-        nestedStructMap.put(fullName, null);
-      }
-    }
-
-    // for each typeInfo in the filter, we see if there's a similar one in the record. 
-    // Since we store typeInfos in ArrayLists, thsi search is O(n squared). We do it faster
-    // if we also store a map (of TypeInfo to index), but since setupRtiFields() is called
-    // only once when deserializing, we're sticking with the former, as the code is easier.  
-    void genSetupRtiFields(CodeBuffer cb) {
-      cb.append("private static void setupRtiFields()\n{\n");
-      cb.append("if (null == " + Consts.RTI_FILTER + ") return;\n");
-      cb.append("// we may already have done this\n");
-      cb.append("if (null != " + Consts.RTI_FILTER_FIELDS + ") return;\n");
-      cb.append("int " + Consts.RIO_PREFIX + "i, " + Consts.RIO_PREFIX + "j;\n");
-      cb.append(Consts.RTI_FILTER_FIELDS + " = new int [" + 
-          Consts.RIO_PREFIX + "rtiFilter.getFieldTypeInfos().size()];\n");
-      cb.append("for (" + Consts.RIO_PREFIX + "i=0; " + Consts.RIO_PREFIX + "i<"+
-          Consts.RTI_FILTER_FIELDS + ".length; " + Consts.RIO_PREFIX + "i++) {\n");
-      cb.append(Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX + "i] = 0;\n");
-      cb.append("}\n");
-      cb.append("java.util.Iterator<org.apache.hadoop.record.meta." +
-          "FieldTypeInfo> " + Consts.RIO_PREFIX + "itFilter = " + 
-          Consts.RIO_PREFIX + "rtiFilter.getFieldTypeInfos().iterator();\n");
-      cb.append(Consts.RIO_PREFIX + "i=0;\n");
-      cb.append("while (" + Consts.RIO_PREFIX + "itFilter.hasNext()) {\n");
-      cb.append("org.apache.hadoop.record.meta.FieldTypeInfo " + 
-          Consts.RIO_PREFIX + "tInfoFilter = " + 
-          Consts.RIO_PREFIX + "itFilter.next();\n");
-      cb.append("java.util.Iterator<org.apache.hadoop.record.meta." + 
-          "FieldTypeInfo> " + Consts.RIO_PREFIX + "it = " + Consts.RTI_VAR + 
-          ".getFieldTypeInfos().iterator();\n");
-      cb.append(Consts.RIO_PREFIX + "j=1;\n");
-      cb.append("while (" + Consts.RIO_PREFIX + "it.hasNext()) {\n");
-      cb.append("org.apache.hadoop.record.meta.FieldTypeInfo " + 
-          Consts.RIO_PREFIX + "tInfo = " + Consts.RIO_PREFIX + "it.next();\n");
-      cb.append("if (" + Consts.RIO_PREFIX + "tInfo.equals(" +  
-          Consts.RIO_PREFIX + "tInfoFilter)) {\n");
-      cb.append(Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX + "i] = " +
-          Consts.RIO_PREFIX + "j;\n");
-      cb.append("break;\n");
-      cb.append("}\n");
-      cb.append(Consts.RIO_PREFIX + "j++;\n");
-      cb.append("}\n");
-      /*int ct = 0;
-      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
-        ct++;
-        JField<JavaType> jf = i.next();
-        JavaType type = jf.getType();
-        String name = jf.getName();
-        if (ct != 1) {
-          cb.append("else ");
-        }
-        type.genRtiFieldCondition(cb, name, ct);
-      }
-      if (ct != 0) {
-        cb.append("else {\n");
-        cb.append("rtiFilterFields[i] = 0;\n");
-        cb.append("}\n");
-      }*/
-      cb.append(Consts.RIO_PREFIX + "i++;\n");
-      cb.append("}\n");
-      cb.append("}\n");
-    }
-
     void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) {
       if (decl) {
         cb.append(fullName+" "+fname+";\n");
       }
       cb.append(fname+"= new "+fullName+"();\n");
-      cb.append(fname+".deserialize(" + Consts.RECORD_INPUT + ",\""+tag+"\");\n");
+      cb.append(fname+".deserialize(a,\""+tag+"\");\n");
     }
     
     void genWriteMethod(CodeBuffer cb, String fname, String tag) {
-      cb.append(fname+".serialize(" + Consts.RECORD_OUTPUT + ",\""+tag+"\");\n");
+      cb.append(fname+".serialize(a,\""+tag+"\");\n");
     }
     
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
@@ -174,40 +100,15 @@
       cb.append("public class "+name+
                 " extends org.apache.hadoop.record.Record {\n");
       
-      // type information declarations
-      cb.append("private static final " + 
-          "org.apache.hadoop.record.meta.RecordTypeInfo " + 
-          Consts.RTI_VAR + ";\n");
-      cb.append("private static " + 
-          "org.apache.hadoop.record.meta.RecordTypeInfo " + 
-          Consts.RTI_FILTER + ";\n");
-      cb.append("private static int[] " + Consts.RTI_FILTER_FIELDS + ";\n");
-      
-      // static init for type information
-      cb.append("static {\n");
-      cb.append(Consts.RTI_VAR + " = " +
-          "new org.apache.hadoop.record.meta.RecordTypeInfo(\"" +
-          name + "\");\n");
-      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
-        JField<JavaType> jf = i.next();
-        String name = jf.getName();
-        JavaType type = jf.getType();
-        type.genStaticTypeInfo(cb, name);
-      }
-      cb.append("}\n\n");
-
-      // field definitions
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
         JField<JavaType> jf = i.next();
         String name = jf.getName();
         JavaType type = jf.getType();
         type.genDecl(cb, name);
       }
-
-      // default constructor
       cb.append("public "+name+"() { }\n");
       
-      // constructor
+      
       cb.append("public "+name+"(\n");
       int fIdx = 0;
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext(); fIdx++) {
@@ -226,182 +127,95 @@
         type.genConstructorSet(cb, name);
       }
       cb.append("}\n");
-
-      // getter/setter for type info
-      cb.append("public static org.apache.hadoop.record.meta.RecordTypeInfo"
-              + " getTypeInfo() {\n");
-      cb.append("return " + Consts.RTI_VAR + ";\n");
-      cb.append("}\n");
-      cb.append("public static void setTypeFilter("
-          + "org.apache.hadoop.record.meta.RecordTypeInfo rti) {\n");
-      cb.append("if (null == rti) return;\n");
-      cb.append(Consts.RTI_FILTER + " = rti;\n");
-      cb.append(Consts.RTI_FILTER_FIELDS + " = null;\n");
-      // set RTIFilter for nested structs.
-      // To prevent setting up the type filter for the same struct more than once, 
-      // we use a hash map to keep track of what we've set. 
-      Map<String, Integer> nestedStructMap = new HashMap<String, Integer>();
-      for (JField<JavaType> jf : fields) {
-        JavaType type = jf.getType();
-        type.genSetRTIFilter(cb, nestedStructMap);
-      }
-      cb.append("}\n");
-
-      // setupRtiFields()
-      genSetupRtiFields(cb);
-
-      // getters/setters for member variables
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
         JField<JavaType> jf = i.next();
         String name = jf.getName();
         JavaType type = jf.getType();
         type.genGetSet(cb, name);
       }
-      
-      // serialize()
-      cb.append("public void serialize("+ 
-          "final org.apache.hadoop.record.RecordOutput " + 
-          Consts.RECORD_OUTPUT + ", final String " + Consts.TAG + ")\n"+
+      cb.append("public void serialize("+
+                "final org.apache.hadoop.record.RecordOutput a, final String tag)\n"+
                 "throws java.io.IOException {\n");
-      cb.append(Consts.RECORD_OUTPUT + ".startRecord(this," + Consts.TAG + ");\n");
+      cb.append("a.startRecord(this,tag);\n");
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
         JField<JavaType> jf = i.next();
         String name = jf.getName();
         JavaType type = jf.getType();
         type.genWriteMethod(cb, name, name);
       }
-      cb.append(Consts.RECORD_OUTPUT + ".endRecord(this," + Consts.TAG+");\n");
+      cb.append("a.endRecord(this,tag);\n");
       cb.append("}\n");
-
-      // deserializeWithoutFilter()
-      cb.append("private void deserializeWithoutFilter("+
-                "final org.apache.hadoop.record.RecordInput " + 
-                Consts.RECORD_INPUT + ", final String " + Consts.TAG + ")\n"+
+      
+      cb.append("public void deserialize("+
+                "final org.apache.hadoop.record.RecordInput a, final String tag)\n"+
                 "throws java.io.IOException {\n");
-      cb.append(Consts.RECORD_INPUT + ".startRecord(" + Consts.TAG + ");\n");
+      cb.append("a.startRecord(tag);\n");
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
         JField<JavaType> jf = i.next();
         String name = jf.getName();
         JavaType type = jf.getType();
         type.genReadMethod(cb, name, name, false);
       }
-      cb.append(Consts.RECORD_INPUT + ".endRecord(" + Consts.TAG+");\n");
+      cb.append("a.endRecord(tag);\n");
       cb.append("}\n");
       
-      // deserialize()
-      cb.append("public void deserialize(final " +
-          "org.apache.hadoop.record.RecordInput " + 
-          Consts.RECORD_INPUT + ", final String " + Consts.TAG + ")\n"+
-          "throws java.io.IOException {\n");
-      cb.append("if (null == " + Consts.RTI_FILTER + ") {\n");
-      cb.append("deserializeWithoutFilter(" + Consts.RECORD_INPUT + ", " + 
-          Consts.TAG + ");\n");
-      cb.append("return;\n");
-      cb.append("}\n");
-      cb.append("// if we're here, we need to read based on version info\n");
-      cb.append(Consts.RECORD_INPUT + ".startRecord(" + Consts.TAG + ");\n");
-      cb.append("setupRtiFields();\n");
-      cb.append("for (int " + Consts.RIO_PREFIX + "i=0; " + Consts.RIO_PREFIX + 
-          "i<" + Consts.RTI_FILTER + ".getFieldTypeInfos().size(); " + 
-          Consts.RIO_PREFIX + "i++) {\n");
-      int ct = 0;
-      for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
-        JField<JavaType> jf = i.next();
-        String name = jf.getName();
-        JavaType type = jf.getType();
-        ct++;
-        if (1 != ct) {
-          cb.append("else ");
-        }
-        cb.append("if (" + ct + " == " + Consts.RTI_FILTER_FIELDS + "[" +
-            Consts.RIO_PREFIX + "i]) {\n");
-        type.genReadMethod(cb, name, name, false);
-        cb.append("}\n");
-      }
-      if (0 != ct) {
-        cb.append("else {\n");
-        cb.append("java.util.ArrayList<"
-                + "org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = "
-                + "(java.util.ArrayList<"
-                + "org.apache.hadoop.record.meta.FieldTypeInfo>)"
-                + "(" + Consts.RTI_FILTER + ".getFieldTypeInfos());\n");
-        cb.append("org.apache.hadoop.record.meta.Utils.skip(" + 
-            Consts.RECORD_INPUT + ", " + "typeInfos.get(" + Consts.RIO_PREFIX + 
-            "i).getFieldID(), typeInfos.get(" + 
-            Consts.RIO_PREFIX + "i).getTypeID());\n");
-        cb.append("}\n");
-      }
-      cb.append("}\n");
-      cb.append(Consts.RECORD_INPUT + ".endRecord(" + Consts.TAG+");\n");
-      cb.append("}\n");
-
-      // compareTo()
-      cb.append("public int compareTo (final Object " + Consts.RIO_PREFIX + 
-          "peer_) throws ClassCastException {\n");
-      cb.append("if (!(" + Consts.RIO_PREFIX + "peer_ instanceof "+name+")) {\n");
+      cb.append("public int compareTo (final Object peer_) throws ClassCastException {\n");
+      cb.append("if (!(peer_ instanceof "+name+")) {\n");
       cb.append("throw new ClassCastException(\"Comparing different types of records.\");\n");
       cb.append("}\n");
-      cb.append(name+" " + Consts.RIO_PREFIX + "peer = ("+name+") " + 
-          Consts.RIO_PREFIX + "peer_;\n");
-      cb.append("int " + Consts.RIO_PREFIX + "ret = 0;\n");
+      cb.append(name+" peer = ("+name+") peer_;\n");
+      cb.append("int ret = 0;\n");
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
         JField<JavaType> jf = i.next();
         String name = jf.getName();
         JavaType type = jf.getType();
-        type.genCompareTo(cb, name, Consts.RIO_PREFIX + "peer."+name);
-        cb.append("if (" + Consts.RIO_PREFIX + "ret != 0) return " + 
-            Consts.RIO_PREFIX + "ret;\n");
+        type.genCompareTo(cb, name, "peer."+name);
+        cb.append("if (ret != 0) return ret;\n");
       }
-      cb.append("return " + Consts.RIO_PREFIX + "ret;\n");
+      cb.append("return ret;\n");
       cb.append("}\n");
       
-      // equals()
-      cb.append("public boolean equals(final Object " + Consts.RIO_PREFIX + 
-          "peer_) {\n");
-      cb.append("if (!(" + Consts.RIO_PREFIX + "peer_ instanceof "+name+")) {\n");
+      cb.append("public boolean equals(final Object peer_) {\n");
+      cb.append("if (!(peer_ instanceof "+name+")) {\n");
       cb.append("return false;\n");
       cb.append("}\n");
-      cb.append("if (" + Consts.RIO_PREFIX + "peer_ == this) {\n");
+      cb.append("if (peer_ == this) {\n");
       cb.append("return true;\n");
       cb.append("}\n");
-      cb.append(name+" " + Consts.RIO_PREFIX + "peer = ("+name+") " + 
-          Consts.RIO_PREFIX + "peer_;\n");
-      cb.append("boolean " + Consts.RIO_PREFIX + "ret = false;\n");
+      cb.append(name+" peer = ("+name+") peer_;\n");
+      cb.append("boolean ret = false;\n");
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
         JField<JavaType> jf = i.next();
         String name = jf.getName();
         JavaType type = jf.getType();
-        type.genEquals(cb, name, Consts.RIO_PREFIX + "peer."+name);
-        cb.append("if (!" + Consts.RIO_PREFIX + "ret) return " + 
-            Consts.RIO_PREFIX + "ret;\n");
+        type.genEquals(cb, name, "peer."+name);
+        cb.append("if (!ret) return ret;\n");
       }
-      cb.append("return " + Consts.RIO_PREFIX + "ret;\n");
+      cb.append("return ret;\n");
       cb.append("}\n");
-
-      // clone()
+      
       cb.append("public Object clone() throws CloneNotSupportedException {\n");
-      cb.append(name+" " + Consts.RIO_PREFIX + "other = new "+name+"();\n");
+      cb.append(name+" other = new "+name+"();\n");
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
         JField<JavaType> jf = i.next();
         String name = jf.getName();
         JavaType type = jf.getType();
         type.genClone(cb, name);
       }
-      cb.append("return " + Consts.RIO_PREFIX + "other;\n");
+      cb.append("return other;\n");
       cb.append("}\n");
       
       cb.append("public int hashCode() {\n");
-      cb.append("int " + Consts.RIO_PREFIX + "result = 17;\n");
-      cb.append("int " + Consts.RIO_PREFIX + "ret;\n");
+      cb.append("int result = 17;\n");
+      cb.append("int ret;\n");
       for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
         JField<JavaType> jf = i.next();
         String name = jf.getName();
         JavaType type = jf.getType();
         type.genHashCode(cb, name);
-        cb.append(Consts.RIO_PREFIX + "result = 37*" + Consts.RIO_PREFIX + 
-            "result + " + Consts.RIO_PREFIX + "ret;\n");
+        cb.append("result = 37*result + ret;\n");
       }
-      cb.append("return " + Consts.RIO_PREFIX + "result;\n");
+      cb.append("return result;\n");
       cb.append("}\n");
       
       cb.append("public static String signature() {\n");
@@ -481,51 +295,10 @@
       }
     }
     
-    String getTypeIDObjectString() {
-      return "new ::hadoop::StructTypeID(" + 
-      fullName + "::getTypeInfo().getFieldTypeInfos())";
-    }
-
     String genDecl(String fname) {
       return "  "+name+" "+fname+";\n";
     }
     
-    void genSetRTIFilter(CodeBuffer cb) {
-      // we set the RTI filter here
-      cb.append(fullName + "::setTypeFilter(rti.getNestedStructTypeInfo(\""+
-          name + "\"));\n");
-    }
-
-    void genSetupRTIFields(CodeBuffer cb) {
-      cb.append("void " + fullName + "::setupRtiFields() {\n");
-      cb.append("if (NULL == p" + Consts.RTI_FILTER + ") return;\n");
-      cb.append("if (NULL != p" + Consts.RTI_FILTER_FIELDS + ") return;\n");
-      cb.append("p" + Consts.RTI_FILTER_FIELDS + " = new int[p" + 
-          Consts.RTI_FILTER + "->getFieldTypeInfos().size()];\n");
-      cb.append("for (unsigned int " + Consts.RIO_PREFIX + "i=0; " + 
-          Consts.RIO_PREFIX + "i<p" + Consts.RTI_FILTER + 
-          "->getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "i++) {\n");
-      cb.append("p" + Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX + 
-          "i] = 0;\n");
-      cb.append("}\n");
-      cb.append("for (unsigned int " + Consts.RIO_PREFIX + "i=0; " + 
-          Consts.RIO_PREFIX + "i<p" + Consts.RTI_FILTER + 
-          "->getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "i++) {\n");
-      cb.append("for (unsigned int " + Consts.RIO_PREFIX + "j=0; " + 
-          Consts.RIO_PREFIX + "j<p" + Consts.RTI_VAR + 
-          "->getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "j++) {\n");
-      cb.append("if (*(p" + Consts.RTI_FILTER + "->getFieldTypeInfos()[" + 
-          Consts.RIO_PREFIX + "i]) == *(p" + Consts.RTI_VAR + 
-          "->getFieldTypeInfos()[" + Consts.RIO_PREFIX + "j])) {\n");
-      cb.append("p" + Consts.RTI_FILTER_FIELDS + "[" + Consts.RIO_PREFIX + 
-          "i] = " + Consts.RIO_PREFIX + "j+1;\n");
-      cb.append("break;\n");
-      cb.append("}\n");
-      cb.append("}\n");
-      cb.append("}\n");
-      cb.append("}\n");
-    }
-    
     void genCode(FileWriter hh, FileWriter cc, ArrayList<String> options)
       throws IOException {
       CodeBuffer hb = new CodeBuffer();
@@ -544,24 +317,9 @@
         CppType type = jf.getType();
         type.genDecl(hb, name);
       }
-      
-      // type info vars
-      hb.append("static ::hadoop::RecordTypeInfo* p" + Consts.RTI_VAR + ";\n");
-      hb.append("static ::hadoop::RecordTypeInfo* p" + Consts.RTI_FILTER + ";\n");
-      hb.append("static int* p" + Consts.RTI_FILTER_FIELDS + ";\n");
-      hb.append("static ::hadoop::RecordTypeInfo* setupTypeInfo();\n");
-      hb.append("static void setupRtiFields();\n");
-      hb.append("virtual void deserializeWithoutFilter(::hadoop::IArchive& " + 
-          Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ");\n");
       hb.append("public:\n");
-      hb.append("static const ::hadoop::RecordTypeInfo& getTypeInfo() " +
-          "{return *p" + Consts.RTI_VAR + ";}\n");
-      hb.append("static void setTypeFilter(const ::hadoop::RecordTypeInfo& rti);\n");
-      hb.append("static void setTypeFilter(const ::hadoop::RecordTypeInfo* prti);\n");
-      hb.append("virtual void serialize(::hadoop::OArchive& " + 
-          Consts.RECORD_OUTPUT + ", const char* " + Consts.TAG + ") const;\n");
-      hb.append("virtual void deserialize(::hadoop::IArchive& " + 
-          Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ");\n");
+      hb.append("virtual void serialize(::hadoop::OArchive& a_, const char* tag) const;\n");
+      hb.append("virtual void deserialize(::hadoop::IArchive& a_, const char* tag);\n");
       hb.append("virtual const ::std::string& type() const;\n");
       hb.append("virtual const ::std::string& signature() const;\n");
       hb.append("virtual bool operator<(const "+name+"& peer_) const;\n");
@@ -581,151 +339,40 @@
       hh.write(hb.toString());
       
       CodeBuffer cb = new CodeBuffer();
-
-      // initialize type info vars
-      cb.append("::hadoop::RecordTypeInfo* " + fullName + "::p" + 
-          Consts.RTI_VAR + " = " + fullName + "::setupTypeInfo();\n");
-      cb.append("::hadoop::RecordTypeInfo* " + fullName + "::p" + 
-          Consts.RTI_FILTER + " = NULL;\n");
-      cb.append("int* " + fullName + "::p" + 
-          Consts.RTI_FILTER_FIELDS + " = NULL;\n\n");
-
-      // setupTypeInfo()
-      cb.append("::hadoop::RecordTypeInfo* "+fullName+"::setupTypeInfo() {\n");
-      cb.append("::hadoop::RecordTypeInfo* p = new ::hadoop::RecordTypeInfo(\"" + 
-          name + "\");\n");
-      for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
-        JField<CppType> jf = i.next();
-        String name = jf.getName();
-        CppType type = jf.getType();
-        type.genStaticTypeInfo(cb, name);
-      }
-      cb.append("return p;\n");
-      cb.append("}\n");
-
-      // setTypeFilter()
-      cb.append("void "+fullName+"::setTypeFilter(const " +
-          "::hadoop::RecordTypeInfo& rti) {\n");
-      cb.append("if (NULL != p" + Consts.RTI_FILTER + ") {\n");
-      cb.append("delete p" + Consts.RTI_FILTER + ";\n");
-      cb.append("}\n");
-      cb.append("p" + Consts.RTI_FILTER + " = new ::hadoop::RecordTypeInfo(rti);\n");
-      cb.append("if (NULL != p" + Consts.RTI_FILTER_FIELDS + ") {\n");
-      cb.append("delete p" + Consts.RTI_FILTER_FIELDS + ";\n");
-      cb.append("}\n");
-      cb.append("p" + Consts.RTI_FILTER_FIELDS + " = NULL;\n");
-      // set RTIFilter for nested structs. We may end up with multiple lines that 
-      // do the same thing, if the same struct is nested in more than one field, 
-      // but that's OK. 
-      for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
-        JField<CppType> jf = i.next();
-        CppType type = jf.getType();
-        type.genSetRTIFilter(cb);
-      }
-      cb.append("}\n");
       
-      // setTypeFilter()
-      cb.append("void "+fullName+"::setTypeFilter(const " +
-          "::hadoop::RecordTypeInfo* prti) {\n");
-      cb.append("if (NULL != prti) {\n");
-      cb.append("setTypeFilter(*prti);\n");
-      cb.append("}\n");
-      cb.append("}\n");
-
-      // setupRtiFields()
-      genSetupRTIFields(cb);
-
-      // serialize()
-      cb.append("void "+fullName+"::serialize(::hadoop::OArchive& " + 
-          Consts.RECORD_OUTPUT + ", const char* " + Consts.TAG + ") const {\n");
-      cb.append(Consts.RECORD_OUTPUT + ".startRecord(*this," + 
-          Consts.TAG + ");\n");
+      cb.append("void "+fullName+"::serialize(::hadoop::OArchive& a_, const char* tag) const {\n");
+      cb.append("a_.startRecord(*this,tag);\n");
       for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
         JField<CppType> jf = i.next();
         String name = jf.getName();
         CppType type = jf.getType();
         if (type instanceof JBuffer.CppBuffer) {
-          cb.append(Consts.RECORD_OUTPUT + ".serialize("+name+","+name+
-              ".length(),\""+name+"\");\n");
+          cb.append("a_.serialize("+name+","+name+".length(),\""+name+"\");\n");
         } else {
-          cb.append(Consts.RECORD_OUTPUT + ".serialize("+name+",\""+
-              name+"\");\n");
+          cb.append("a_.serialize("+name+",\""+name+"\");\n");
         }
       }
-      cb.append(Consts.RECORD_OUTPUT + ".endRecord(*this," + Consts.TAG + ");\n");
+      cb.append("a_.endRecord(*this,tag);\n");
       cb.append("return;\n");
       cb.append("}\n");
       
-      // deserializeWithoutFilter()
-      cb.append("void "+fullName+"::deserializeWithoutFilter(::hadoop::IArchive& " +
-          Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ") {\n");
-      cb.append(Consts.RECORD_INPUT + ".startRecord(*this," + 
-          Consts.TAG + ");\n");
+      cb.append("void "+fullName+"::deserialize(::hadoop::IArchive& a_, const char* tag) {\n");
+      cb.append("a_.startRecord(*this,tag);\n");
       for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
         JField<CppType> jf = i.next();
         String name = jf.getName();
         CppType type = jf.getType();
         if (type instanceof JBuffer.CppBuffer) {
-          cb.append("{\nsize_t len=0; " + Consts.RECORD_INPUT + ".deserialize("+
-              name+",len,\""+name+"\");\n}\n");
+          cb.append("{\nsize_t len=0; a_.deserialize("+name+",len,\""+name+"\");\n}\n");
         } else {
-          cb.append(Consts.RECORD_INPUT + ".deserialize("+name+",\""+
-              name+"\");\n");
+          cb.append("a_.deserialize("+name+",\""+name+"\");\n");
         }
       }
-      cb.append(Consts.RECORD_INPUT + ".endRecord(*this," + Consts.TAG + ");\n");
+      cb.append("a_.endRecord(*this,tag);\n");
       cb.append("return;\n");
       cb.append("}\n");
       
-      // deserialize()
-      cb.append("void "+fullName+"::deserialize(::hadoop::IArchive& " +
-          Consts.RECORD_INPUT + ", const char* " + Consts.TAG + ") {\n");
-      cb.append("if (NULL == p" + Consts.RTI_FILTER + ") {\n");
-      cb.append("deserializeWithoutFilter(" + Consts.RECORD_INPUT + ", " + 
-          Consts.TAG + ");\n");
-      cb.append("return;\n");
-      cb.append("}\n");
-      cb.append("// if we're here, we need to read based on version info\n");
-      cb.append(Consts.RECORD_INPUT + ".startRecord(*this," + 
-          Consts.TAG + ");\n");
-      cb.append("setupRtiFields();\n");
-      cb.append("for (unsigned int " + Consts.RIO_PREFIX + "i=0; " + 
-          Consts.RIO_PREFIX + "i<p" + Consts.RTI_FILTER + 
-          "->getFieldTypeInfos().size(); " + Consts.RIO_PREFIX + "i++) {\n");
-      int ct = 0;
-      for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {
-        JField<CppType> jf = i.next();
-        String name = jf.getName();
-        CppType type = jf.getType();
-        ct++;
-        if (1 != ct) {
-          cb.append("else ");
-        }
-        cb.append("if (" + ct + " == p" + Consts.RTI_FILTER_FIELDS + "[" +
-            Consts.RIO_PREFIX + "i]) {\n");
-        if (type instanceof JBuffer.CppBuffer) {
-          cb.append("{\nsize_t len=0; " + Consts.RECORD_INPUT + ".deserialize("+
-              name+",len,\""+name+"\");\n}\n");
-        } else {
-          cb.append(Consts.RECORD_INPUT + ".deserialize("+name+",\""+
-              name+"\");\n");
-        }
-        cb.append("}\n");
-      }
-      if (0 != ct) {
-        cb.append("else {\n");
-        cb.append("const std::vector< ::hadoop::FieldTypeInfo* >& typeInfos = p" + 
-            Consts.RTI_FILTER + "->getFieldTypeInfos();\n");
-        cb.append("::hadoop::Utils::skip(" + Consts.RECORD_INPUT + 
-            ", typeInfos[" + Consts.RIO_PREFIX + "i]->getFieldID()->c_str()" + 
-            ", *(typeInfos[" + Consts.RIO_PREFIX + "i]->getTypeID()));\n");
-        cb.append("}\n");
-      }
-      cb.append("}\n");
-      cb.append(Consts.RECORD_INPUT + ".endRecord(*this, " + Consts.TAG+");\n");
-      cb.append("}\n");
-
-      // operator <
+      
       cb.append("bool "+fullName+"::operator< (const "+fullName+"& peer_) const {\n");
       cb.append("return (1\n");
       for (Iterator<JField<CppType>> i = fields.iterator(); i.hasNext();) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java Thu Jan 17 00:55:47 2008
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.record.compiler;
 
+import org.apache.hadoop.record.compiler.JCompType.CCompType;
+import org.apache.hadoop.record.compiler.JCompType.CppCompType;
 
 /**
  */
@@ -26,13 +28,9 @@
   class JavaString extends JavaCompType {
     
     JavaString() {
-      super("String", "String", "String", "TypeID.RIOType.STRING");
+      super("String", "String", "String");
     }
     
-    String getTypeIDObjectString() {
-      return "org.apache.hadoop.record.meta.TypeID.StringTypeID";
-    }
-
     void genSlurpBytes(CodeBuffer cb, String b, String s, String l) {
       cb.append("{\n");
       cb.append("int i = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n");
@@ -55,25 +53,13 @@
     }
     
     void genClone(CodeBuffer cb, String fname) {
-      cb.append(Consts.RIO_PREFIX + "other."+fname+" = this."+fname+";\n");
-    }
-  }
-
-  class CppString extends CppCompType {
-    
-    CppString() {
-      super("::std::string");
-    }
-    
-    String getTypeIDObjectString() {
-      return "new ::hadoop::TypeID(::hadoop::RIOTYPE_STRING)";
+      cb.append("other."+fname+" = this."+fname+";\n");
     }
   }
-  
   /** Creates a new instance of JString */
   public JString() {
     setJavaType(new JavaString());
-    setCppType(new CppString());
+    setCppType(new CppCompType(" ::std::string"));
     setCType(new CCompType());
   }
     

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java Thu Jan 17 00:55:47 2008
@@ -18,9 +18,6 @@
 
 package org.apache.hadoop.record.compiler;
 
-import java.util.Map;
-
-
 /**
  * Abstract Base class for all types supported by Hadoop Record I/O.
  */
@@ -42,49 +39,19 @@
     private String name;
     private String methodSuffix;
     private String wrapper;
-    private String typeIDByteString; // points to TypeID.RIOType 
     
     JavaType(String javaname,
-        String suffix,
-        String wrapper, 
-        String typeIDByteString) { 
+             String suffix,
+             String wrapper) {
       this.name = javaname;
       this.methodSuffix = suffix;
       this.wrapper = wrapper;
-      this.typeIDByteString = typeIDByteString;
     }
-
+    
     void genDecl(CodeBuffer cb, String fname) {
       cb.append("private "+name+" "+fname+";\n");
     }
     
-    void genStaticTypeInfo(CodeBuffer cb, String fname) {
-      cb.append(Consts.RTI_VAR + ".addField(\"" + fname + "\", " +
-          getTypeIDObjectString() + ");\n");
-    }
-    
-    abstract String getTypeIDObjectString();
-    
-    void genSetRTIFilter(CodeBuffer cb, Map<String, Integer> nestedStructMap) {
-      // do nothing by default
-      return;
-    }
-
-    /*void genRtiFieldCondition(CodeBuffer cb, String fname, int ct) {
-      cb.append("if ((tInfo.fieldID.equals(\"" + fname + "\")) && (typeVal ==" +
-          " org.apache.hadoop.record.meta." + getTypeIDByteString() + ")) {\n");
-      cb.append("rtiFilterFields[i] = " + ct + ";\n");
-      cb.append("}\n");
-    }
-
-    void genRtiNestedFieldCondition(CodeBuffer cb, String varName, int ct) {
-      cb.append("if (" + varName + ".getElementTypeID().getTypeVal() == " +
-          "org.apache.hadoop.record.meta." + getTypeIDByteString() + 
-          ") {\n");
-      cb.append("rtiFilterFields[i] = " + ct + ";\n");
-      cb.append("}\n");  
-    }*/
-
     void genConstructorParam(CodeBuffer cb, String fname) {
       cb.append("final "+name+" "+fname);
     }
@@ -110,26 +77,20 @@
       return methodSuffix;
     }
     
-    String getTypeIDByteString() {
-      return typeIDByteString;
-    }
-    
     void genWriteMethod(CodeBuffer cb, String fname, String tag) {
-      cb.append(Consts.RECORD_OUTPUT + ".write"+methodSuffix + 
-          "("+fname+",\""+tag+"\");\n");
+      cb.append("a.write"+methodSuffix+"("+fname+",\""+tag+"\");\n");
     }
     
     void genReadMethod(CodeBuffer cb, String fname, String tag, boolean decl) {
       if (decl) {
         cb.append(name+" "+fname+";\n");
       }
-      cb.append(fname+"=" + Consts.RECORD_INPUT + ".read" + 
-          methodSuffix+"(\""+tag+"\");\n");
+      cb.append(fname+"=a.read"+methodSuffix+"(\""+tag+"\");\n");
     }
     
     void genCompareTo(CodeBuffer cb, String fname, String other) {
-      cb.append(Consts.RIO_PREFIX + "ret = ("+fname+" == "+other+")? 0 :(("+
-          fname+"<"+other+")?-1:1);\n");
+      cb.append("ret = ("+fname+" == "+other+")? 0 :(("+fname+"<"+other+
+                ")?-1:1);\n");
     }
     
     abstract void genCompareBytes(CodeBuffer cb);
@@ -137,11 +98,11 @@
     abstract void genSlurpBytes(CodeBuffer cb, String b, String s, String l);
     
     void genEquals(CodeBuffer cb, String fname, String peer) {
-      cb.append(Consts.RIO_PREFIX + "ret = ("+fname+"=="+peer+");\n");
+      cb.append("ret = ("+fname+"=="+peer+");\n");
     }
     
     void genHashCode(CodeBuffer cb, String fname) {
-      cb.append(Consts.RIO_PREFIX + "ret = (int)"+fname+";\n");
+      cb.append("ret = (int)"+fname+";\n");
     }
     
     void genConstructorSet(CodeBuffer cb, String fname) {
@@ -149,11 +110,11 @@
     }
     
     void genClone(CodeBuffer cb, String fname) {
-      cb.append(Consts.RIO_PREFIX + "other."+fname+" = this."+fname+";\n");
+      cb.append("other."+fname+" = this."+fname+";\n");
     }
   }
   
-  abstract class CppType {
+  class CppType {
     private String name;
     
     CppType(String cppname) {
@@ -164,11 +125,6 @@
       cb.append(name+" "+fname+";\n");
     }
     
-    void genStaticTypeInfo(CodeBuffer cb, String fname) {
-      cb.append("p->addField(new ::std::string(\"" + 
-          fname + "\"), " + getTypeIDObjectString() + ");\n");
-    }
-    
     void genGetSet(CodeBuffer cb, String fname) {
       cb.append("virtual "+name+" get"+toCamelCase(fname)+"() const {\n");
       cb.append("return "+fname+";\n");
@@ -178,13 +134,6 @@
       cb.append("}\n");
     }
     
-    abstract String getTypeIDObjectString();
-
-    void genSetRTIFilter(CodeBuffer cb) {
-      // do nothing by default
-      return;
-    }
-
     String getType() {
       return name;
     }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JVector.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JVector.java?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JVector.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JVector.java Thu Jan 17 00:55:47 2008
@@ -18,7 +18,8 @@
 
 package org.apache.hadoop.record.compiler;
 
-import java.util.Map;
+import org.apache.hadoop.record.compiler.JCompType.CCompType;
+import org.apache.hadoop.record.compiler.JCompType.CppCompType;
 
 /**
  */
@@ -42,46 +43,27 @@
     
     JavaVector(JType.JavaType t) {
       super("java.util.ArrayList<"+t.getWrapperType()+">",
-            "Vector", "java.util.ArrayList<"+t.getWrapperType()+">",
-            "TypeID.RIOType.VECTOR");
+            "Vector", "java.util.ArrayList<"+t.getWrapperType()+">");
       element = t;
     }
     
-    String getTypeIDObjectString() {
-      return "new org.apache.hadoop.record.meta.VectorTypeID(" + 
-      element.getTypeIDObjectString() + ")";
-    }
-
-    void genSetRTIFilter(CodeBuffer cb, Map<String, Integer> nestedStructMap) {
-      element.genSetRTIFilter(cb, nestedStructMap);
-    }
-
     void genCompareTo(CodeBuffer cb, String fname, String other) {
       cb.append("{\n");
-      incrLevel();
-      cb.append("int "+getId(Consts.RIO_PREFIX + "len1")+" = "+fname+
-          ".size();\n");
-      cb.append("int "+getId(Consts.RIO_PREFIX + "len2")+" = "+other+
-          ".size();\n");
-      cb.append("for(int "+getId(Consts.RIO_PREFIX + "vidx")+" = 0; "+
-          getId(Consts.RIO_PREFIX + "vidx")+"<"+getId(Consts.RIO_PREFIX + "len1")+
-          " && "+getId(Consts.RIO_PREFIX + "vidx")+"<"+
-          getId(Consts.RIO_PREFIX + "len2")+"; "+
-          getId(Consts.RIO_PREFIX + "vidx")+"++) {\n");
-      cb.append(element.getType()+" "+getId(Consts.RIO_PREFIX + "e1")+
+      cb.append("int "+getId("len1")+" = "+fname+".size();\n");
+      cb.append("int "+getId("len2")+" = "+other+".size();\n");
+      cb.append("for(int "+getId("vidx")+" = 0; "+getId("vidx")+"<"+
+                getId("len1")+" && "+getId("vidx")+"<"+getId("len2")+"; "+
+                getId("vidx")+"++) {\n");
+      cb.append(element.getType()+" "+getId("e1")+
                 " = "+fname+
-                ".get("+getId(Consts.RIO_PREFIX + "vidx")+");\n");
-      cb.append(element.getType()+" "+getId(Consts.RIO_PREFIX + "e2")+
+                ".get("+getId("vidx")+");\n");
+      cb.append(element.getType()+" "+getId("e2")+
                 " = "+other+
-                ".get("+getId(Consts.RIO_PREFIX + "vidx")+");\n");
-      element.genCompareTo(cb, getId(Consts.RIO_PREFIX + "e1"), 
-          getId(Consts.RIO_PREFIX + "e2"));
-      cb.append("if (" + Consts.RIO_PREFIX + "ret != 0) { return " +
-          Consts.RIO_PREFIX + "ret; }\n");
+                ".get("+getId("vidx")+");\n");
+      element.genCompareTo(cb, getId("e1"), getId("e2"));
+      cb.append("if (ret != 0) { return ret; }\n");
       cb.append("}\n");
-      cb.append(Consts.RIO_PREFIX + "ret = ("+getId(Consts.RIO_PREFIX + "len1")+
-          " - "+getId(Consts.RIO_PREFIX + "len2")+");\n");
-      decrLevel();
+      cb.append("ret = ("+getId("len1")+" - "+getId("len2")+");\n");
       cb.append("}\n");
     }
     
@@ -91,17 +73,13 @@
       }
       cb.append("{\n");
       incrLevel();
-      cb.append("org.apache.hadoop.record.Index "+
-          getId(Consts.RIO_PREFIX + "vidx")+" = " + 
-          Consts.RECORD_INPUT + ".startVector(\""+tag+"\");\n");
+      cb.append("org.apache.hadoop.record.Index "+getId("vidx")+" = a.startVector(\""+tag+"\");\n");
       cb.append(fname+"=new "+getType()+"();\n");
-      cb.append("for (; !"+getId(Consts.RIO_PREFIX + "vidx")+".done(); " + 
-          getId(Consts.RIO_PREFIX + "vidx")+".incr()) {\n");
-      element.genReadMethod(cb, getId(Consts.RIO_PREFIX + "e"), 
-          getId(Consts.RIO_PREFIX + "e"), true);
-      cb.append(fname+".add("+getId(Consts.RIO_PREFIX + "e")+");\n");
+      cb.append("for (; !"+getId("vidx")+".done(); "+getId("vidx")+".incr()) {\n");
+      element.genReadMethod(cb, getId("e"), getId("e"), true);
+      cb.append(fname+".add("+getId("e")+");\n");
       cb.append("}\n");
-      cb.append(Consts.RECORD_INPUT + ".endVector(\""+tag+"\");\n");
+      cb.append("a.endVector(\""+tag+"\");\n");
       decrLevel();
       cb.append("}\n");
     }
@@ -109,17 +87,13 @@
     void genWriteMethod(CodeBuffer cb, String fname, String tag) {
       cb.append("{\n");
       incrLevel();
-      cb.append(Consts.RECORD_OUTPUT + ".startVector("+fname+",\""+tag+"\");\n");
-      cb.append("int "+getId(Consts.RIO_PREFIX + "len")+" = "+fname+".size();\n");
-      cb.append("for(int "+getId(Consts.RIO_PREFIX + "vidx")+" = 0; " + 
-          getId(Consts.RIO_PREFIX + "vidx")+"<"+getId(Consts.RIO_PREFIX + "len")+
-          "; "+getId(Consts.RIO_PREFIX + "vidx")+"++) {\n");
-      cb.append(element.getType()+" "+getId(Consts.RIO_PREFIX + "e")+" = "+
-          fname+".get("+getId(Consts.RIO_PREFIX + "vidx")+");\n");
-      element.genWriteMethod(cb, getId(Consts.RIO_PREFIX + "e"), 
-          getId(Consts.RIO_PREFIX + "e"));
+      cb.append("a.startVector("+fname+",\""+tag+"\");\n");
+      cb.append("int "+getId("len")+" = "+fname+".size();\n");
+      cb.append("for(int "+getId("vidx")+" = 0; "+getId("vidx")+"<"+getId("len")+"; "+getId("vidx")+"++) {\n");
+      cb.append(element.getType()+" "+getId("e")+" = "+fname+".get("+getId("vidx")+");\n");
+      element.genWriteMethod(cb, getId("e"), getId("e"));
       cb.append("}\n");
-      cb.append(Consts.RECORD_OUTPUT + ".endVector("+fname+",\""+tag+"\");\n");
+      cb.append("a.endVector("+fname+",\""+tag+"\");\n");
       cb.append("}\n");
       decrLevel();
     }
@@ -163,31 +137,11 @@
     }
   }
   
-  class CppVector extends CppCompType {
-    
-    private JType.CppType element;
-    
-    CppVector(JType.CppType t) {
-      super("::std::vector< "+t.getType()+" >");
-      element = t;
-    }
-    
-    String getTypeIDObjectString() {
-      return "new ::hadoop::VectorTypeID(" +    
-      element.getTypeIDObjectString() + ")";
-    }
-
-    void genSetRTIFilter(CodeBuffer cb) {
-      element.genSetRTIFilter(cb);
-    }
-
-  }
-  
   /** Creates a new instance of JVector */
   public JVector(JType t) {
     type = t;
     setJavaType(new JavaVector(t.getJavaType()));
-    setCppType(new CppVector(t.getCppType()));
+    setCppType(new CppCompType(" ::std::vector<"+t.getCppType().getType()+">"));
     setCType(new CCompType());
   }
   

Modified: lucene/hadoop/trunk/src/test/ddl/test.jr
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/ddl/test.jr?rev=612765&r1=612764&r2=612765&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/ddl/test.jr (original)
+++ lucene/hadoop/trunk/src/test/ddl/test.jr Thu Jan 17 00:55:47 2008
@@ -17,30 +17,5 @@
         map<ustring, ustring>   mapVal;
         RecRecord0      recordVal;
     }
-    
-  class RecRecordOld {
-  	ustring name;
-  	vector<long> ivec;
-  	vector<vector<RecRecord0>> svec;
-  	RecRecord0 inner;
-  	vector<vector<vector<ustring>>> strvec;
-  	float i1;
-  	map<byte, ustring> map1;
-  	vector<map<int, long>> mvec1;
-  	vector<map<int, long>> mvec2;
-  }
-  
-  /* RecRecordNew is a lot like RecRecordOld. Helps test for versioning. */
-  class RecRecordNew {
-  	ustring name2;
-  	RecRecord0 inner;
-  	vector<int> ivec;
-  	vector<vector<int>> svec;
-  	vector<vector<vector<ustring>>> strvec;
-		int i1;  	
-		map<long, ustring> map1;
-  	vector<map<int, long>> mvec2;	
-  }
-  
 }
 



Mime
View raw message