Author: cutting
Date: Mon Mar 5 14:25:47 2007
New Revision: 514892
URL: http://svn.apache.org/viewvc?view=rev&rev=514892
Log:
HADOOP-1053. Fix VInt representation of negative values & some record cleanups. Contributed
by Milind.
Added:
lucene/hadoop/trunk/src/java/org/apache/hadoop/record/RecordComparator.java
Modified:
lucene/hadoop/trunk/CHANGES.txt
lucene/hadoop/trunk/src/c++/librecordio/binarchive.cc
lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/record/Record.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBuffer.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/Rcc.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/rcc.jj
lucene/hadoop/trunk/src/java/org/apache/hadoop/record/package.html
Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=514892&r1=514891&r2=514892
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Mon Mar 5 14:25:47 2007
@@ -6,6 +6,11 @@
1. HADOOP-1035. Fix a StackOverflowError in FSDataSet.
(Raghu Angadi via cutting)
+ 2. HADOOP-1053. Fix VInt representation of negative values. Also
+ remove references in generated record code to methods outside of
+ the record package and improve some record documentation.
+ (Milind Bhandarkar via cutting)
+
Release 0.12.0 - 2007-03-02
Modified: lucene/hadoop/trunk/src/c++/librecordio/binarchive.cc
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/c%2B%2B/librecordio/binarchive.cc?view=diff&rev=514892&r1=514891&r2=514892
==============================================================================
--- lucene/hadoop/trunk/src/c++/librecordio/binarchive.cc (original)
+++ lucene/hadoop/trunk/src/c++/librecordio/binarchive.cc Mon Mar 5 14:25:47 2007
@@ -48,7 +48,7 @@
int8_t len = -120;
if (t < 0) {
- t &= 0x7FFFFFFF; // reset the sign bit
+ t ^= 0xFFFFFFFF; // take one's complement
len = -124;
}
@@ -91,7 +91,7 @@
t |= (barr[idx] & 0xFF);
}
if (isNegative) {
- t |= 0x80000000;
+ t ^= 0xFFFFFFFF;
}
}
@@ -105,7 +105,7 @@
int8_t len = -112;
if (t < 0) {
- t &= 0x7FFFFFFFFFFFFFFFLL; // reset the sign bit
+ t &= 0xFFFFFFFFFFFFFFFFLL; // take one's complement
len = -120;
}
@@ -149,7 +149,7 @@
t |= (barr[idx] & 0xFF);
}
if (isNegative) {
- t |= 0x8000000000000000L;
+ t ^= 0xFFFFFFFFFFFFFFFFL;
}
}
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java?view=diff&rev=514892&r1=514891&r2=514892
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java Mon Mar 5 14:25:47
2007
@@ -187,7 +187,7 @@
i = i << 8;
i = i | (bytes[start+1+idx] & 0xFF);
}
- return (isNegative ? (i | 0x8000000000000000L) : i);
+ return (isNegative ? (i ^ -1L) : i);
}
/**
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java?view=diff&rev=514892&r1=514891&r2=514892
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java Mon Mar 5 14:25:47
2007
@@ -279,7 +279,7 @@
int len = -112;
if (i < 0) {
- i &= 0x7FFFFFFFFFFFFFFFL; // reset the sign bit
+ i ^= -1L; // take one's complement'
len = -120;
}
@@ -320,7 +320,7 @@
i = i << 8;
i = i | (b & 0xFF);
}
- return (isNegative ? (i | 0x8000000000000000L) : i);
+ return (isNegative ? (i ^ -1L) : i);
}
/**
@@ -345,7 +345,7 @@
int len = -112;
if (i < 0) {
- i &= 0x7FFFFFFFFFFFFFFFL; // reset the sign bit
+ i ^= -1L; // take one's complement'
len = -120;
}
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/Record.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/Record.java?view=diff&rev=514892&r1=514891&r2=514892
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/Record.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/Record.java Mon Mar 5 14:25:47
2007
@@ -18,16 +18,47 @@
package org.apache.hadoop.record;
+import java.io.DataInput;
+import java.io.DataOutput;
import java.io.IOException;
+import org.apache.hadoop.io.WritableComparable;
/**
- * Interface that is implemented by generated classes.
+ * Abstract class that is extended by generated classes.
*
* @author Milind Bhandarkar
*/
-public interface Record extends Cloneable {
- void serialize(OutputArchive archive, String tag)
- throws IOException;
- void deserialize(InputArchive archive, String tag)
- throws IOException;
+public abstract class Record implements WritableComparable, Cloneable {
+
+ /**
+ * Serialize a record into archive
+ * @param archive Output Archive
+ * @parram record tag (Used only in tagged serialization e.g. XML)
+ */
+ public abstract void serialize(OutputArchive archive, String tag)
+ throws IOException;
+
+ /**
+ * Deserialize a record from archive
+ * @param archive Input Archive
+ * @param tag Record tag (Used only in tagged serialization e.g. XML)
+ */
+ public abstract void deserialize(InputArchive archive, String tag)
+ throws IOException;
+
+ // inheric javadoc
+ public abstract int compareTo (final Object peer) throws ClassCastException;
+
+ // inherit javadoc
+ public void write(final DataOutput out) throws java.io.IOException {
+ BinaryOutputArchive archive = new BinaryOutputArchive(out);
+ this.serialize(archive, "");
+ }
+
+ // inherit javadoc
+ public void readFields(final DataInput in) throws java.io.IOException {
+ BinaryInputArchive archive = new BinaryInputArchive(in);
+ this.deserialize(archive, "");
+ }
+
}
Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/RecordComparator.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/RecordComparator.java?view=auto&rev=514892
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/RecordComparator.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/RecordComparator.java Mon Mar 5
14:25:47 2007
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.record;
+
+import org.apache.hadoop.io.WritableComparator;
+
+/**
+ * A raw record comparator base class
+ *
+ * @author Milind Bhandarkar
+ */
+public abstract class RecordComparator extends WritableComparator {
+
+ /**
+ * Construct a raw {@link Record} comparison implementation. */
+ protected RecordComparator(Class recordClass) {
+ super(recordClass);
+ }
+
+ // inheric JavaDoc
+ public abstract int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2);
+
+ /**
+ * Register an optimized comparator for a {@link Record} implementation.
+ *
+ * @param recordClass record classs for which a raw comparator is provided
+ * @param comparator Raw comparator instance for recordClass
+ */
+ public static synchronized void define(Class c, RecordComparator comparator) {
+ WritableComparator.define(c, comparator);
+ }
+}
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBuffer.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBuffer.java?view=diff&rev=514892&r1=514891&r2=514892
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBuffer.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBuffer.java Mon Mar 5
14:25:47 2007
@@ -65,7 +65,7 @@
cb.append("int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);\n");
cb.append("int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);\n");
cb.append("s1+=z1; s2+=z2; l1-=z1; l2-=z2;\n");
- cb.append("int r1 = org.apache.hadoop.record.Utils.compareBytes(b1,s1,l1,b2,s2,l2);\n");
+ cb.append("int r1 = org.apache.hadoop.record.Utils.compareBytes(b1,s1,i1,b2,s2,i2);\n");
cb.append("if (r1 != 0) { return (r1<0)?-1:0; }\n");
cb.append("s1+=i1; s2+=i2; l1-=i1; l1-=i2;\n");
cb.append("}\n");
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java?view=diff&rev=514892&r1=514891&r2=514892
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java Mon Mar 5
14:25:47 2007
@@ -100,9 +100,7 @@
cb.append("// File generated by hadoop record compiler. Do not edit.\n");
cb.append("package "+module+";\n\n");
cb.append("public class "+name+
- " implements org.apache.hadoop.record.Record");
- cb.append(", org.apache.hadoop.io.WritableComparable");
- cb.append(" {\n");
+ " extends org.apache.hadoop.record.Record {\n");
for (Iterator<JField<JavaType>> i = fields.iterator(); i.hasNext();) {
JField<JavaType> jf = i.next();
@@ -176,19 +174,6 @@
cb.append("}\n");
cb.append("}\n");
- cb.append("public void write(final java.io.DataOutput out)\n"+
- "throws java.io.IOException {\n");
- cb.append("org.apache.hadoop.record.BinaryOutputArchive archive =\n"+
- "new org.apache.hadoop.record.BinaryOutputArchive(out);\n");
- cb.append("this.serialize(archive, \"\");\n");
- cb.append("}\n");
-
- cb.append("public void readFields(final java.io.DataInput in)\n"+
- "throws java.io.IOException {\n");
- cb.append("org.apache.hadoop.record.BinaryInputArchive archive =\n"+
- "new org.apache.hadoop.record.BinaryInputArchive(in);\n");
- cb.append("this.deserialize(archive, \"\");\n");
- cb.append("}\n");
cb.append("public int compareTo (final Object peer_) throws ClassCastException {\n");
cb.append("if (!(peer_ instanceof "+name+")) {\n");
cb.append("throw new ClassCastException(\"Comparing different types of records.\");\n");
@@ -253,7 +238,7 @@
cb.append("}\n");
cb.append("public static class Comparator extends"+
- " org.apache.hadoop.io.WritableComparator {\n");
+ " org.apache.hadoop.record.RecordComparator {\n");
cb.append("public Comparator() {\n");
cb.append("super("+name+".class);\n");
cb.append("}\n");
@@ -295,7 +280,7 @@
cb.append("}\n");
cb.append("}\n\n");
cb.append("static {\n");
- cb.append("org.apache.hadoop.io.WritableComparator.define("
+ cb.append("org.apache.hadoop.record.RecordComparator.define("
+name+".class, new Comparator());\n");
cb.append("}\n");
cb.append("}\n");
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java?view=diff&rev=514892&r1=514891&r2=514892
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java Mon Mar 5
14:25:47 2007
@@ -48,7 +48,7 @@
cb.append("int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);\n");
cb.append("int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);\n");
cb.append("s1+=z1; s2+=z2; l1-=z1; l2-=z2;\n");
- cb.append("int r1 = org.apache.hadoop.record.Utils.compareBytes(b1,s1,l1,b2,s2,l2);\n");
+ cb.append("int r1 = org.apache.hadoop.record.Utils.compareBytes(b1,s1,i1,b2,s2,i2);\n");
cb.append("if (r1 != 0) { return (r1<0)?-1:0; }\n");
cb.append("s1+=i1; s2+=i2; l1-=i1; l1-=i2;\n");
cb.append("}\n");
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/Rcc.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/Rcc.java?view=diff&rev=514892&r1=514891&r2=514892
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/Rcc.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/Rcc.java Mon
Mar 5 14:25:47 2007
@@ -43,6 +43,10 @@
System.exit(driver(args));
}
+ public static void usage() {
+ System.err.println("Usage: rcc --language [java|c++] ddl-files");
+ }
+
public static int driver(String[] args) {
for (int i=0; i<args.length; i++) {
if ("-l".equalsIgnoreCase(args[i]) ||
@@ -64,7 +68,7 @@
}
}
if (recFiles.size() == 0) {
- System.err.println("No record files specified. Exiting.");
+ usage();
return 1;
}
for (int i=0; i<recFiles.size(); i++) {
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/rcc.jj
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/rcc.jj?view=diff&rev=514892&r1=514891&r2=514892
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/rcc.jj (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/rcc.jj Mon Mar
5 14:25:47 2007
@@ -47,6 +47,10 @@
System.exit(driver(args));
}
+ public static void usage() {
+ System.err.println("Usage: rcc --language [java|c++] ddl-files");
+ }
+
public static int driver(String[] args) {
for (int i=0; i<args.length; i++) {
if ("-l".equalsIgnoreCase(args[i]) ||
@@ -68,7 +72,7 @@
}
}
if (recFiles.size() == 0) {
- System.err.println("No record files specified. Exiting.");
+ usage();
return 1;
}
for (int i=0; i<recFiles.size(); i++) {
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/package.html
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/package.html?view=diff&rev=514892&r1=514891&r2=514892
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/package.html (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/package.html Mon Mar 5 14:25:47
2007
@@ -479,9 +479,9 @@
private:
- int32_t mI32;
- double mD;
- std::string mS;
+ int32_t I32;
+ double D;
+ std::string S;
public:
@@ -491,14 +491,14 @@
virtual bool operator==(const RI& peer) const;
virtual bool operator<(const RI& peer) const;
- virtual int32_t getI32(void) const { return mI32; }
- virtual void setI32(int32_t v) { mI32 = v; }
+ virtual int32_t getI32(void) const { return I32; }
+ virtual void setI32(int32_t v) { I32 = v; }
- virtual double getD(void) const { return mD; }
- virtual void setD(double v) { mD = v; }
+ virtual double getD(void) const { return D; }
+ virtual void setD(double v) { D = v; }
- virtual std::string& getS(void) const { return mS; }
- virtual const std::string& getS(void) const { return mS; }
+ virtual std::string& getS(void) const { return S; }
+ virtual const std::string& getS(void) const { return S; }
virtual std::string type(void) const;
virtual std::string signature(void) const;
@@ -507,8 +507,6 @@
virtual void serialize(hadoop::OArchive& a) const;
virtual void deserialize(hadoop::IArchive& a);
-
- virtual bool validate(void);
};
} // end namespace inclrec
@@ -531,9 +529,9 @@
private:
- std::vector<float> mVF;
- inclrec::RI mRec;
- std::string mBuf;
+ std::vector<float> VF;
+ inclrec::RI Rec;
+ std::string Buf;
public:
@@ -595,11 +593,11 @@
long int64_t long
float float float
double double double
-ustring std::string Text
-buffer std::string java.io.ByteArrayOutputStream
+ustring std::string java.lang.String
+buffer std::string org.apache.hadoop.record.Buffer
class type class type class type
-vector<type> std::vector<type> java.util.ArrayList
-map<type,type> std::map<type,type> java.util.TreeMap
+vector<type> std::vector<type> java.util.ArrayList<type>
+map<type,type> std::map<type,type> java.util.TreeMap<type,type>
</code></pre>
<h2>Data encodings</h2>
|