Return-Path: Delivered-To: apmail-lucene-hadoop-commits-archive@locus.apache.org Received: (qmail 12381 invoked from network); 16 Apr 2007 21:46:13 -0000 Received: from hermes.apache.org (HELO mail.apache.org) (140.211.11.2) by minotaur.apache.org with SMTP; 16 Apr 2007 21:46:13 -0000 Received: (qmail 38613 invoked by uid 500); 16 Apr 2007 21:46:19 -0000 Delivered-To: apmail-lucene-hadoop-commits-archive@lucene.apache.org Received: (qmail 38592 invoked by uid 500); 16 Apr 2007 21:46:19 -0000 Mailing-List: contact hadoop-commits-help@lucene.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hadoop-dev@lucene.apache.org Delivered-To: mailing list hadoop-commits@lucene.apache.org Received: (qmail 38583 invoked by uid 99); 16 Apr 2007 21:46:19 -0000 Received: from herse.apache.org (HELO herse.apache.org) (140.211.11.133) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 16 Apr 2007 14:46:18 -0700 X-ASF-Spam-Status: No, hits=-99.5 required=10.0 tests=ALL_TRUSTED,NO_REAL_NAME X-Spam-Check-By: apache.org Received: from [140.211.11.3] (HELO eris.apache.org) (140.211.11.3) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 16 Apr 2007 14:46:07 -0700 Received: by eris.apache.org (Postfix, from userid 65534) id E7B231A9883; Mon, 16 Apr 2007 14:44:59 -0700 (PDT) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r529410 [21/27] - in /lucene/hadoop/trunk: ./ src/contrib/abacus/src/examples/org/apache/hadoop/abacus/examples/ src/contrib/abacus/src/java/org/apache/hadoop/abacus/ src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/ src/... Date: Mon, 16 Apr 2007 21:44:46 -0000 To: hadoop-commits@lucene.apache.org From: cutting@apache.org X-Mailer: svnmailer-1.1.0 Message-Id: <20070416214459.E7B231A9883@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/BinaryRecordInput.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/BinaryRecordInput.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/BinaryRecordInput.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/BinaryRecordInput.java Mon Apr 16 14:44:35 2007 @@ -29,110 +29,110 @@ */ public class BinaryRecordInput implements RecordInput { - private DataInput in; + private DataInput in; - static private class BinaryIndex implements Index { - private int nelems; - private BinaryIndex(int nelems) { - this.nelems = nelems; - } - public boolean done() { - return (nelems <= 0); - } - public void incr() { - nelems--; - } + static private class BinaryIndex implements Index { + private int nelems; + private BinaryIndex(int nelems) { + this.nelems = nelems; } + public boolean done() { + return (nelems <= 0); + } + public void incr() { + nelems--; + } + } - private BinaryRecordInput() {} + private BinaryRecordInput() {} - private void setDataInput(DataInput inp) { - this.in = inp; - } + private void setDataInput(DataInput inp) { + this.in = inp; + } - private static ThreadLocal bIn = new ThreadLocal() { + private static ThreadLocal bIn = new ThreadLocal() { protected synchronized Object initialValue() { return new BinaryRecordInput(); } }; - /** - * Get a thread-local record input for the supplied DataInput. - * @param inp data input stream - * @return binary record input corresponding to the supplied DataInput. - */ - public static BinaryRecordInput get(DataInput inp) { - BinaryRecordInput bin = (BinaryRecordInput) bIn.get(); - bin.setDataInput(inp); - return bin; - } - - /** Creates a new instance of BinaryRecordInput */ - public BinaryRecordInput(InputStream strm) { - this.in = new DataInputStream(strm); - } - - /** Creates a new instance of BinaryRecordInput */ - public BinaryRecordInput(DataInput din) { - this.in = din; - } - - public byte readByte(final String tag) throws IOException { - return in.readByte(); - } - - public boolean readBool(final String tag) throws IOException { - return in.readBoolean(); - } - - public int readInt(final String tag) throws IOException { - return Utils.readVInt(in); - } - - public long readLong(final String tag) throws IOException { - return Utils.readVLong(in); - } - - public float readFloat(final String tag) throws IOException { - return in.readFloat(); - } - - public double readDouble(final String tag) throws IOException { - return in.readDouble(); - } - - public String readString(final String tag) throws IOException { - return Utils.fromBinaryString(in); - } - - public Buffer readBuffer(final String tag) throws IOException { - final int len = Utils.readVInt(in); - final byte[] barr = new byte[len]; - in.readFully(barr); - return new Buffer(barr); - } - - public void startRecord(final String tag) throws IOException { - // no-op - } - - public void endRecord(final String tag) throws IOException { - // no-op - } - - public Index startVector(final String tag) throws IOException { - return new BinaryIndex(readInt(tag)); - } - - public void endVector(final String tag) throws IOException { - // no-op -} - - public Index startMap(final String tag) throws IOException { - return new BinaryIndex(readInt(tag)); - } - - public void endMap(final String tag) throws IOException { - // no-op - } + /** + * Get a thread-local record input for the supplied DataInput. + * @param inp data input stream + * @return binary record input corresponding to the supplied DataInput. + */ + public static BinaryRecordInput get(DataInput inp) { + BinaryRecordInput bin = (BinaryRecordInput) bIn.get(); + bin.setDataInput(inp); + return bin; + } + + /** Creates a new instance of BinaryRecordInput */ + public BinaryRecordInput(InputStream strm) { + this.in = new DataInputStream(strm); + } + + /** Creates a new instance of BinaryRecordInput */ + public BinaryRecordInput(DataInput din) { + this.in = din; + } + + public byte readByte(final String tag) throws IOException { + return in.readByte(); + } + + public boolean readBool(final String tag) throws IOException { + return in.readBoolean(); + } + + public int readInt(final String tag) throws IOException { + return Utils.readVInt(in); + } + + public long readLong(final String tag) throws IOException { + return Utils.readVLong(in); + } + + public float readFloat(final String tag) throws IOException { + return in.readFloat(); + } + + public double readDouble(final String tag) throws IOException { + return in.readDouble(); + } + + public String readString(final String tag) throws IOException { + return Utils.fromBinaryString(in); + } + + public Buffer readBuffer(final String tag) throws IOException { + final int len = Utils.readVInt(in); + final byte[] barr = new byte[len]; + in.readFully(barr); + return new Buffer(barr); + } + + public void startRecord(final String tag) throws IOException { + // no-op + } + + public void endRecord(final String tag) throws IOException { + // no-op + } + + public Index startVector(final String tag) throws IOException { + return new BinaryIndex(readInt(tag)); + } + + public void endVector(final String tag) throws IOException { + // no-op + } + + public Index startMap(final String tag) throws IOException { + return new BinaryIndex(readInt(tag)); + } + + public void endMap(final String tag) throws IOException { + // no-op + } } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/BinaryRecordOutput.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/BinaryRecordOutput.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/BinaryRecordOutput.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/BinaryRecordOutput.java Mon Apr 16 14:44:35 2007 @@ -31,92 +31,92 @@ */ public class BinaryRecordOutput implements RecordOutput { - private DataOutput out; + private DataOutput out; - private BinaryRecordOutput() {} + private BinaryRecordOutput() {} - private void setDataOutput(DataOutput out) { - this.out = out; - } + private void setDataOutput(DataOutput out) { + this.out = out; + } - private static ThreadLocal bOut = new ThreadLocal() { + private static ThreadLocal bOut = new ThreadLocal() { protected synchronized Object initialValue() { return new BinaryRecordOutput(); } }; - /** - * Get a thread-local record output for the supplied DataOutput. - * @param out data output stream - * @return binary record output corresponding to the supplied DataOutput. - */ - public static BinaryRecordOutput get(DataOutput out) { - BinaryRecordOutput bout = (BinaryRecordOutput) bOut.get(); - bout.setDataOutput(out); - return bout; - } - - /** Creates a new instance of BinaryRecordOutput */ - public BinaryRecordOutput(OutputStream out) { - this.out = new DataOutputStream(out); - } - - /** Creates a new instance of BinaryRecordOutput */ - public BinaryRecordOutput(DataOutput out) { - this.out = out; - } - - - public void writeByte(byte b, String tag) throws IOException { - out.writeByte(b); - } - - public void writeBool(boolean b, String tag) throws IOException { - out.writeBoolean(b); - } - - public void writeInt(int i, String tag) throws IOException { - Utils.writeVInt(out, i); - } - - public void writeLong(long l, String tag) throws IOException { - Utils.writeVLong(out, l); - } - - public void writeFloat(float f, String tag) throws IOException { - out.writeFloat(f); - } - - public void writeDouble(double d, String tag) throws IOException { - out.writeDouble(d); - } - - public void writeString(String s, String tag) throws IOException { - Utils.toBinaryString(out, s); - } + /** + * Get a thread-local record output for the supplied DataOutput. + * @param out data output stream + * @return binary record output corresponding to the supplied DataOutput. + */ + public static BinaryRecordOutput get(DataOutput out) { + BinaryRecordOutput bout = (BinaryRecordOutput) bOut.get(); + bout.setDataOutput(out); + return bout; + } + + /** Creates a new instance of BinaryRecordOutput */ + public BinaryRecordOutput(OutputStream out) { + this.out = new DataOutputStream(out); + } + + /** Creates a new instance of BinaryRecordOutput */ + public BinaryRecordOutput(DataOutput out) { + this.out = out; + } + + + public void writeByte(byte b, String tag) throws IOException { + out.writeByte(b); + } + + public void writeBool(boolean b, String tag) throws IOException { + out.writeBoolean(b); + } + + public void writeInt(int i, String tag) throws IOException { + Utils.writeVInt(out, i); + } + + public void writeLong(long l, String tag) throws IOException { + Utils.writeVLong(out, l); + } + + public void writeFloat(float f, String tag) throws IOException { + out.writeFloat(f); + } + + public void writeDouble(double d, String tag) throws IOException { + out.writeDouble(d); + } + + public void writeString(String s, String tag) throws IOException { + Utils.toBinaryString(out, s); + } - public void writeBuffer(Buffer buf, String tag) + public void writeBuffer(Buffer buf, String tag) throws IOException { - byte[] barr = buf.get(); - int len = buf.getCount(); - Utils.writeVInt(out, len); - out.write(barr, 0, len); - } + byte[] barr = buf.get(); + int len = buf.getCount(); + Utils.writeVInt(out, len); + out.write(barr, 0, len); + } - public void startRecord(Record r, String tag) throws IOException {} + public void startRecord(Record r, String tag) throws IOException {} - public void endRecord(Record r, String tag) throws IOException {} + public void endRecord(Record r, String tag) throws IOException {} - public void startVector(ArrayList v, String tag) throws IOException { - writeInt(v.size(), tag); - } + public void startVector(ArrayList v, String tag) throws IOException { + writeInt(v.size(), tag); + } - public void endVector(ArrayList v, String tag) throws IOException {} + public void endVector(ArrayList v, String tag) throws IOException {} - public void startMap(TreeMap v, String tag) throws IOException { - writeInt(v.size(), tag); - } + public void startMap(TreeMap v, String tag) throws IOException { + writeInt(v.size(), tag); + } - public void endMap(TreeMap v, String tag) throws IOException {} + public void endMap(TreeMap v, String tag) throws IOException {} } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/Buffer.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/Buffer.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/Buffer.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/Buffer.java Mon Apr 16 14:44:35 2007 @@ -123,7 +123,7 @@ */ public void setCapacity(int newCapacity) { if (newCapacity < 0) { - throw new IllegalArgumentException("Invalid capacity argument "+newCapacity); + throw new IllegalArgumentException("Invalid capacity argument "+newCapacity); } if (newCapacity == 0) { this.bytes = null; @@ -164,7 +164,7 @@ * @param offset offset into byte array * @param length length of data - */ + */ public void append(byte[] bytes, int offset, int length) { setCapacity(count+length); System.arraycopy(bytes, offset, this.get(), count, length); @@ -234,7 +234,7 @@ * @param charsetName Valid Java Character Set Name */ public String toString(String charsetName) - throws UnsupportedEncodingException { + throws UnsupportedEncodingException { return new String(this.get(), 0, this.getCount(), charsetName); } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/CsvRecordInput.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/CsvRecordInput.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/CsvRecordInput.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/CsvRecordInput.java Mon Apr 16 14:44:35 2007 @@ -30,173 +30,173 @@ */ public class CsvRecordInput implements RecordInput { - private PushbackReader stream; + private PushbackReader stream; - private class CsvIndex implements Index { - public boolean done() { - char c = '\0'; - try { - c = (char) stream.read(); - stream.unread(c); - } catch (IOException ex) { - } - return (c == '}') ? true : false; - } - public void incr() {} - } - - private void throwExceptionOnError(String tag) throws IOException { - throw new IOException("Error deserializing "+tag); - } - - private String readField(String tag) throws IOException { - try { - StringBuffer buf = new StringBuffer(); - while (true) { - char c = (char) stream.read(); - switch (c) { - case ',': - return buf.toString(); - case '}': - case '\n': - case '\r': - stream.unread(c); - return buf.toString(); - default: - buf.append(c); - } - } - } catch (IOException ex) { - throw new IOException("Error reading "+tag); - } - } - - /** Creates a new instance of CsvRecordInput */ - public CsvRecordInput(InputStream in) { + private class CsvIndex implements Index { + public boolean done() { + char c = '\0'; try { - stream = new PushbackReader(new InputStreamReader(in, "UTF-8")); - } catch (UnsupportedEncodingException ex) { - throw new RuntimeException(ex); + c = (char) stream.read(); + stream.unread(c); + } catch (IOException ex) { } + return (c == '}') ? true : false; } + public void incr() {} + } - public byte readByte(String tag) throws IOException { - return (byte) readLong(tag); - } - - public boolean readBool(String tag) throws IOException { - String sval = readField(tag); - return "T".equals(sval) ? true : false; - } - - public int readInt(String tag) throws IOException { - return (int) readLong(tag); - } - - public long readLong(String tag) throws IOException { - String sval = readField(tag); - try { - long lval = Long.parseLong(sval); - return lval; - } catch (NumberFormatException ex) { - throw new IOException("Error deserializing "+tag); + private void throwExceptionOnError(String tag) throws IOException { + throw new IOException("Error deserializing "+tag); + } + + private String readField(String tag) throws IOException { + try { + StringBuffer buf = new StringBuffer(); + while (true) { + char c = (char) stream.read(); + switch (c) { + case ',': + return buf.toString(); + case '}': + case '\n': + case '\r': + stream.unread(c); + return buf.toString(); + default: + buf.append(c); } + } + } catch (IOException ex) { + throw new IOException("Error reading "+tag); } + } - public float readFloat(String tag) throws IOException { - return (float) readDouble(tag); - } - - public double readDouble(String tag) throws IOException { - String sval = readField(tag); - try { - double dval = Double.parseDouble(sval); - return dval; - } catch (NumberFormatException ex) { - throw new IOException("Error deserializing "+tag); - } + /** Creates a new instance of CsvRecordInput */ + public CsvRecordInput(InputStream in) { + try { + stream = new PushbackReader(new InputStreamReader(in, "UTF-8")); + } catch (UnsupportedEncodingException ex) { + throw new RuntimeException(ex); } + } - public String readString(String tag) throws IOException { - String sval = readField(tag); - return Utils.fromCSVString(sval); - } - - public Buffer readBuffer(String tag) throws IOException { - String sval = readField(tag); - return Utils.fromCSVBuffer(sval); - } - - public void startRecord(String tag) throws IOException { - if (tag != null && !"".equals(tag)) { - char c1 = (char) stream.read(); - char c2 = (char) stream.read(); - if (c1 != 's' || c2 != '{') { - throw new IOException("Error deserializing "+tag); - } - } + public byte readByte(String tag) throws IOException { + return (byte) readLong(tag); + } + + public boolean readBool(String tag) throws IOException { + String sval = readField(tag); + return "T".equals(sval) ? true : false; + } + + public int readInt(String tag) throws IOException { + return (int) readLong(tag); + } + + public long readLong(String tag) throws IOException { + String sval = readField(tag); + try { + long lval = Long.parseLong(sval); + return lval; + } catch (NumberFormatException ex) { + throw new IOException("Error deserializing "+tag); + } + } + + public float readFloat(String tag) throws IOException { + return (float) readDouble(tag); + } + + public double readDouble(String tag) throws IOException { + String sval = readField(tag); + try { + double dval = Double.parseDouble(sval); + return dval; + } catch (NumberFormatException ex) { + throw new IOException("Error deserializing "+tag); + } + } + + public String readString(String tag) throws IOException { + String sval = readField(tag); + return Utils.fromCSVString(sval); + } + + public Buffer readBuffer(String tag) throws IOException { + String sval = readField(tag); + return Utils.fromCSVBuffer(sval); + } + + public void startRecord(String tag) throws IOException { + if (tag != null && !"".equals(tag)) { + char c1 = (char) stream.read(); + char c2 = (char) stream.read(); + if (c1 != 's' || c2 != '{') { + throw new IOException("Error deserializing "+tag); + } } + } - public void endRecord(String tag) throws IOException { - char c = (char) stream.read(); - if (tag == null || "".equals(tag)) { - if (c != '\n' && c != '\r') { - throw new IOException("Error deserializing record."); - } else { - return; - } - } - - if (c != '}') { - throw new IOException("Error deserializing "+tag); - } - c = (char) stream.read(); - if (c != ',') { - stream.unread(c); - } - + public void endRecord(String tag) throws IOException { + char c = (char) stream.read(); + if (tag == null || "".equals(tag)) { + if (c != '\n' && c != '\r') { + throw new IOException("Error deserializing record."); + } else { return; + } } - - public Index startVector(String tag) throws IOException { - char c1 = (char) stream.read(); - char c2 = (char) stream.read(); - if (c1 != 'v' || c2 != '{') { - throw new IOException("Error deserializing "+tag); - } - return new CsvIndex(); - } - - public void endVector(String tag) throws IOException { - char c = (char) stream.read(); - if (c != '}') { - throw new IOException("Error deserializing "+tag); - } - c = (char) stream.read(); - if (c != ',') { - stream.unread(c); - } - return; + + if (c != '}') { + throw new IOException("Error deserializing "+tag); } - - public Index startMap(String tag) throws IOException { - char c1 = (char) stream.read(); - char c2 = (char) stream.read(); - if (c1 != 'm' || c2 != '{') { - throw new IOException("Error deserializing "+tag); - } - return new CsvIndex(); + c = (char) stream.read(); + if (c != ',') { + stream.unread(c); } + + return; + } - public void endMap(String tag) throws IOException { - char c = (char) stream.read(); - if (c != '}') { - throw new IOException("Error deserializing "+tag); - } - c = (char) stream.read(); - if (c != ',') { - stream.unread(c); - } - return; + public Index startVector(String tag) throws IOException { + char c1 = (char) stream.read(); + char c2 = (char) stream.read(); + if (c1 != 'v' || c2 != '{') { + throw new IOException("Error deserializing "+tag); + } + return new CsvIndex(); + } + + public void endVector(String tag) throws IOException { + char c = (char) stream.read(); + if (c != '}') { + throw new IOException("Error deserializing "+tag); + } + c = (char) stream.read(); + if (c != ',') { + stream.unread(c); + } + return; + } + + public Index startMap(String tag) throws IOException { + char c1 = (char) stream.read(); + char c2 = (char) stream.read(); + if (c1 != 'm' || c2 != '{') { + throw new IOException("Error deserializing "+tag); + } + return new CsvIndex(); + } + + public void endMap(String tag) throws IOException { + char c = (char) stream.read(); + if (c != '}') { + throw new IOException("Error deserializing "+tag); + } + c = (char) stream.read(); + if (c != ',') { + stream.unread(c); } + return; + } } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/CsvRecordOutput.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/CsvRecordOutput.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/CsvRecordOutput.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/CsvRecordOutput.java Mon Apr 16 14:44:35 2007 @@ -31,112 +31,112 @@ */ public class CsvRecordOutput implements RecordOutput { - private PrintStream stream; - private boolean isFirst = true; + private PrintStream stream; + private boolean isFirst = true; - private void throwExceptionOnError(String tag) throws IOException { - if (stream.checkError()) { - throw new IOException("Error serializing "+tag); - } + private void throwExceptionOnError(String tag) throws IOException { + if (stream.checkError()) { + throw new IOException("Error serializing "+tag); } + } - private void printCommaUnlessFirst() { - if (!isFirst) { - stream.print(","); - } - isFirst = false; - } - - /** Creates a new instance of CsvRecordOutput */ - public CsvRecordOutput(OutputStream out) { - try { - stream = new PrintStream(out, true, "UTF-8"); - } catch (UnsupportedEncodingException ex) { - throw new RuntimeException(ex); - } - } - - public void writeByte(byte b, String tag) throws IOException { - writeLong((long)b, tag); - } - - public void writeBool(boolean b, String tag) throws IOException { - printCommaUnlessFirst(); - String val = b ? "T" : "F"; - stream.print(val); - throwExceptionOnError(tag); - } - - public void writeInt(int i, String tag) throws IOException { - writeLong((long)i, tag); - } - - public void writeLong(long l, String tag) throws IOException { - printCommaUnlessFirst(); - stream.print(l); - throwExceptionOnError(tag); - } - - public void writeFloat(float f, String tag) throws IOException { - writeDouble((double)f, tag); - } - - public void writeDouble(double d, String tag) throws IOException { - printCommaUnlessFirst(); - stream.print(d); - throwExceptionOnError(tag); - } + private void printCommaUnlessFirst() { + if (!isFirst) { + stream.print(","); + } + isFirst = false; + } + + /** Creates a new instance of CsvRecordOutput */ + public CsvRecordOutput(OutputStream out) { + try { + stream = new PrintStream(out, true, "UTF-8"); + } catch (UnsupportedEncodingException ex) { + throw new RuntimeException(ex); + } + } + + public void writeByte(byte b, String tag) throws IOException { + writeLong((long)b, tag); + } + + public void writeBool(boolean b, String tag) throws IOException { + printCommaUnlessFirst(); + String val = b ? "T" : "F"; + stream.print(val); + throwExceptionOnError(tag); + } + + public void writeInt(int i, String tag) throws IOException { + writeLong((long)i, tag); + } + + public void writeLong(long l, String tag) throws IOException { + printCommaUnlessFirst(); + stream.print(l); + throwExceptionOnError(tag); + } + + public void writeFloat(float f, String tag) throws IOException { + writeDouble((double)f, tag); + } + + public void writeDouble(double d, String tag) throws IOException { + printCommaUnlessFirst(); + stream.print(d); + throwExceptionOnError(tag); + } + + public void writeString(String s, String tag) throws IOException { + printCommaUnlessFirst(); + stream.print(Utils.toCSVString(s)); + throwExceptionOnError(tag); + } - public void writeString(String s, String tag) throws IOException { - printCommaUnlessFirst(); - stream.print(Utils.toCSVString(s)); - throwExceptionOnError(tag); - } - - public void writeBuffer(Buffer buf, String tag) + public void writeBuffer(Buffer buf, String tag) throws IOException { - printCommaUnlessFirst(); - stream.print(Utils.toCSVBuffer(buf)); - throwExceptionOnError(tag); - } - - public void startRecord(Record r, String tag) throws IOException { - if (tag != null && !"".equals(tag)) { - printCommaUnlessFirst(); - stream.print("s{"); - isFirst = true; - } - } - - public void endRecord(Record r, String tag) throws IOException { - if (tag == null || "".equals(tag)) { - stream.print("\n"); - isFirst = true; - } else { - stream.print("}"); - isFirst = false; - } - } - - public void startVector(ArrayList v, String tag) throws IOException { - printCommaUnlessFirst(); - stream.print("v{"); - isFirst = true; - } - - public void endVector(ArrayList v, String tag) throws IOException { - stream.print("}"); - isFirst = false; - } - - public void startMap(TreeMap v, String tag) throws IOException { - printCommaUnlessFirst(); - stream.print("m{"); - isFirst = true; - } - - public void endMap(TreeMap v, String tag) throws IOException { - stream.print("}"); - isFirst = false; - } + printCommaUnlessFirst(); + stream.print(Utils.toCSVBuffer(buf)); + throwExceptionOnError(tag); + } + + public void startRecord(Record r, String tag) throws IOException { + if (tag != null && !"".equals(tag)) { + printCommaUnlessFirst(); + stream.print("s{"); + isFirst = true; + } + } + + public void endRecord(Record r, String tag) throws IOException { + if (tag == null || "".equals(tag)) { + stream.print("\n"); + isFirst = true; + } else { + stream.print("}"); + isFirst = false; + } + } + + public void startVector(ArrayList v, String tag) throws IOException { + printCommaUnlessFirst(); + stream.print("v{"); + isFirst = true; + } + + public void endVector(ArrayList v, String tag) throws IOException { + stream.print("}"); + isFirst = false; + } + + public void startMap(TreeMap v, String tag) throws IOException { + printCommaUnlessFirst(); + stream.print("m{"); + isFirst = true; + } + + public void endMap(TreeMap v, String tag) throws IOException { + stream.print("}"); + isFirst = false; + } } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/Record.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/Record.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/Record.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/Record.java Mon Apr 16 14:44:35 2007 @@ -37,7 +37,7 @@ * @param tag record tag (Used only in tagged serialization e.g. XML) */ public abstract void serialize(RecordOutput rout, String tag) - throws IOException; + throws IOException; /** * Deserialize a record with a tag (usually field name) @@ -45,7 +45,7 @@ * @param tag Record tag (Used only in tagged serialization e.g. XML) */ public abstract void deserialize(RecordInput rin, String tag) - throws IOException; + throws IOException; // inheric javadoc public abstract int compareTo (final Object peer) throws ClassCastException; Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/RecordOutput.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/RecordOutput.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/RecordOutput.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/RecordOutput.java Mon Apr 16 14:44:35 2007 @@ -91,7 +91,7 @@ * @throws IOException Indicates error in serialization */ public void writeBuffer(Buffer buf, String tag) - throws IOException; + throws IOException; /** * Mark the start of a record to be serialized. Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/Utils.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/Utils.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/Utils.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/Utils.java Mon Apr 16 14:44:35 2007 @@ -35,8 +35,8 @@ } public static final char[] hexchars = { '0', '1', '2', '3', '4', '5', - '6', '7', '8', '9', 'A', 'B', - 'C', 'D', 'E', 'F' }; + '6', '7', '8', '9', 'A', 'B', + 'C', 'D', 'E', 'F' }; /** * * @param s @@ -53,8 +53,8 @@ } else if (ch == '%') { sb.append("%0025"); } else if (ch < 0x20 || - (ch > 0xD7FF && ch < 0xE000) || - (ch > 0xFFFD)) { + (ch > 0xD7FF && ch < 0xE000) || + (ch > 0xFFFD)) { sb.append("%"); sb.append(hexchars[(ch & 0xF000) >> 12]); sb.append(hexchars[(ch & 0x0F00) >> 8]); @@ -113,26 +113,26 @@ for (int i = 0; i < len; i++) { char c = s.charAt(i); switch(c) { - case '\0': - sb.append("%00"); - break; - case '\n': - sb.append("%0A"); - break; - case '\r': - sb.append("%0D"); - break; - case ',': - sb.append("%2C"); - break; - case '}': - sb.append("%7D"); - break; - case '%': - sb.append("%25"); - break; - default: - sb.append(c); + case '\0': + sb.append("%00"); + break; + case '\n': + sb.append("%0A"); + break; + case '\r': + sb.append("%0D"); + break; + case ',': + sb.append("%2C"); + break; + case '}': + sb.append("%7D"); + break; + case '%': + sb.append("%25"); + break; + default: + sb.append(c); } } return sb.toString(); @@ -194,7 +194,7 @@ * @return */ static Buffer fromXMLBuffer(String s) - throws IOException { + throws IOException { if (s.length() == 0) { return new Buffer(); } int blen = s.length()/2; byte[] barr = new byte[blen]; @@ -225,7 +225,7 @@ * @return Deserialized Buffer */ static Buffer fromCSVBuffer(String s) - throws IOException { + throws IOException { if (s.charAt(0) != '#') { throw new IOException("Error deserializing buffer."); } @@ -255,7 +255,7 @@ return 4; } throw new IOException("Illegal Unicode Codepoint "+ - Integer.toHexString(cpt)+" in string."); + Integer.toHexString(cpt)+" in string."); } private static final int B10 = Integer.parseInt("10000000", 2); @@ -268,7 +268,7 @@ private static final int B11111 = Integer.parseInt("11111000", 2); private static int writeUtf8(int cpt, final byte[] bytes, final int offset) - throws IOException { + throws IOException { if (cpt >=0 && cpt <= 0x7F) { bytes[offset] = (byte) cpt; return 1; @@ -299,11 +299,11 @@ return 4; } throw new IOException("Illegal Unicode Codepoint "+ - Integer.toHexString(cpt)+" in string."); + Integer.toHexString(cpt)+" in string."); } static void toBinaryString(final DataOutput out, final String str) - throws IOException { + throws IOException { final int strlen = str.length(); byte[] bytes = new byte[strlen*4]; // Codepoints expand to 4 bytes max int utf8Len = 0; @@ -319,16 +319,16 @@ static boolean isValidCodePoint(int cpt) { return !((cpt > 0x10FFFF) || - (cpt >= 0xD800 && cpt <= 0xDFFF) || - (cpt >= 0xFFFE && cpt <=0xFFFF)); + (cpt >= 0xD800 && cpt <= 0xDFFF) || + (cpt >= 0xFFFE && cpt <=0xFFFF)); } private static int utf8ToCodePoint(int b1, int b2, int b3, int b4) { int cpt = 0; cpt = (((b1 & ~B11111) << 18) | - ((b2 & ~B11) << 12) | - ((b3 & ~B11) << 6) | - (b4 & ~B11)); + ((b2 & ~B11) << 12) | + ((b3 & ~B11) << 6) | + (b4 & ~B11)); return cpt; } @@ -382,11 +382,11 @@ cpt = utf8ToCodePoint(b1, b2); } else { throw new IOException("Invalid UTF-8 byte "+Integer.toHexString(b1)+ - " at offset "+(len-1)+" in length of "+utf8Len); + " at offset "+(len-1)+" in length of "+utf8Len); } if (!isValidCodePoint(cpt)) { throw new IOException("Illegal Unicode Codepoint "+ - Integer.toHexString(cpt)+" in stream."); + Integer.toHexString(cpt)+" in stream."); } sb.appendCodePoint(cpt); } @@ -485,7 +485,7 @@ /** Lexicographic order of binary data. */ public static int compareBytes(byte[] b1, int s1, int l1, - byte[] b2, int s2, int l2) { + byte[] b2, int s2, int l2) { return WritableComparator.compareBytes(b1, s1, l1, b2, s2, l2); } } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordInput.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordInput.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordInput.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordInput.java Mon Apr 16 14:44:35 2007 @@ -33,212 +33,212 @@ */ public class XmlRecordInput implements RecordInput { - static private class Value { - private String type; - private StringBuffer sb; - - public Value(String t) { - type = t; - sb = new StringBuffer(); - } - public void addChars(char[] buf, int offset, int len) { - sb.append(buf, offset, len); - } - public String getValue() { return sb.toString(); } - public String getType() { return type; } - } - - private class XMLParser extends DefaultHandler { - private boolean charsValid = false; - - private ArrayList valList; - - private XMLParser(ArrayList vlist) { - valList = vlist; - } - - public void startDocument() throws SAXException {} - - public void endDocument() throws SAXException {} - - public void startElement(String ns, - String sname, - String qname, - Attributes attrs) throws SAXException { - charsValid = false; - if ("boolean".equals(qname) || - "i4".equals(qname) || - "int".equals(qname) || - "string".equals(qname) || - "double".equals(qname) || - "ex:i1".equals(qname) || - "ex:i8".equals(qname) || - "ex:float".equals(qname)) { - charsValid = true; - valList.add(new Value(qname)); - } else if ("struct".equals(qname) || - "array".equals(qname)) { - valList.add(new Value(qname)); - } - } - - public void endElement(String ns, - String sname, - String qname) throws SAXException { - charsValid = false; - if ("struct".equals(qname) || - "array".equals(qname)) { - valList.add(new Value("/"+qname)); - } - } - - public void characters(char buf[], int offset, int len) - throws SAXException { - if (charsValid) { - Value v = valList.get(valList.size()-1); - v.addChars(buf, offset,len); - } - } - - } - - private class XmlIndex implements Index { - public boolean done() { - Value v = valList.get(vIdx); - if ("/array".equals(v.getType())) { - valList.set(vIdx, null); - vIdx++; - return true; - } else { - return false; - } - } - public void incr() {} - } + static private class Value { + private String type; + private StringBuffer sb; + + public Value(String t) { + type = t; + sb = new StringBuffer(); + } + public void addChars(char[] buf, int offset, int len) { + sb.append(buf, offset, len); + } + public String getValue() { return sb.toString(); } + public String getType() { return type; } + } + private class XMLParser extends DefaultHandler { + private boolean charsValid = false; + private ArrayList valList; - private int vLen; - private int vIdx; - - private Value next() throws IOException { - if (vIdx < vLen) { - Value v = valList.get(vIdx); - valList.set(vIdx, null); - vIdx++; - return v; - } else { - throw new IOException("Error in deserialization."); - } - } - - /** Creates a new instance of XmlRecordInput */ - public XmlRecordInput(InputStream in) { - try{ - valList = new ArrayList(); - DefaultHandler handler = new XMLParser(valList); - SAXParserFactory factory = SAXParserFactory.newInstance(); - SAXParser parser = factory.newSAXParser(); - parser.parse(in, handler); - vLen = valList.size(); - vIdx = 0; - } catch (Exception ex) { - throw new RuntimeException(ex); - } - } - - public byte readByte(String tag) throws IOException { - Value v = next(); - if (!"ex:i1".equals(v.getType())) { - throw new IOException("Error deserializing "+tag+"."); - } - return Byte.parseByte(v.getValue()); - } - - public boolean readBool(String tag) throws IOException { - Value v = next(); - if (!"boolean".equals(v.getType())) { - throw new IOException("Error deserializing "+tag+"."); - } - return "1".equals(v.getValue()); - } - - public int readInt(String tag) throws IOException { - Value v = next(); - if (!"i4".equals(v.getType()) && - !"int".equals(v.getType())) { - throw new IOException("Error deserializing "+tag+"."); - } - return Integer.parseInt(v.getValue()); - } - - public long readLong(String tag) throws IOException { - Value v = next(); - if (!"ex:i8".equals(v.getType())) { - throw new IOException("Error deserializing "+tag+"."); - } - return Long.parseLong(v.getValue()); - } - - public float readFloat(String tag) throws IOException { - Value v = next(); - if (!"ex:float".equals(v.getType())) { - throw new IOException("Error deserializing "+tag+"."); - } - return Float.parseFloat(v.getValue()); - } - - public double readDouble(String tag) throws IOException { - Value v = next(); - if (!"double".equals(v.getType())) { - throw new IOException("Error deserializing "+tag+"."); - } - return Double.parseDouble(v.getValue()); - } - - public String readString(String tag) throws IOException { - Value v = next(); - if (!"string".equals(v.getType())) { - throw new IOException("Error deserializing "+tag+"."); - } - return Utils.fromXMLString(v.getValue()); + + private XMLParser(ArrayList vlist) { + valList = vlist; } - - public Buffer readBuffer(String tag) throws IOException { - Value v = next(); - if (!"string".equals(v.getType())) { - throw new IOException("Error deserializing "+tag+"."); - } - return Utils.fromXMLBuffer(v.getValue()); + + public void startDocument() throws SAXException {} + + public void endDocument() throws SAXException {} + + public void startElement(String ns, + String sname, + String qname, + Attributes attrs) throws SAXException { + charsValid = false; + if ("boolean".equals(qname) || + "i4".equals(qname) || + "int".equals(qname) || + "string".equals(qname) || + "double".equals(qname) || + "ex:i1".equals(qname) || + "ex:i8".equals(qname) || + "ex:float".equals(qname)) { + charsValid = true; + valList.add(new Value(qname)); + } else if ("struct".equals(qname) || + "array".equals(qname)) { + valList.add(new Value(qname)); + } } - - public void startRecord(String tag) throws IOException { - Value v = next(); - if (!"struct".equals(v.getType())) { - throw new IOException("Error deserializing "+tag+"."); - } + + public void endElement(String ns, + String sname, + String qname) throws SAXException { + charsValid = false; + if ("struct".equals(qname) || + "array".equals(qname)) { + valList.add(new Value("/"+qname)); + } } - - public void endRecord(String tag) throws IOException { - Value v = next(); - if (!"/struct".equals(v.getType())) { - throw new IOException("Error deserializing "+tag+"."); - } + + public void characters(char buf[], int offset, int len) + throws SAXException { + if (charsValid) { + Value v = valList.get(valList.size()-1); + v.addChars(buf, offset,len); + } } + + } - public Index startVector(String tag) throws IOException { - Value v = next(); - if (!"array".equals(v.getType())) { - throw new IOException("Error deserializing "+tag+"."); - } - return new XmlIndex(); + private class XmlIndex implements Index { + public boolean done() { + Value v = valList.get(vIdx); + if ("/array".equals(v.getType())) { + valList.set(vIdx, null); + vIdx++; + return true; + } else { + return false; + } } + public void incr() {} + } - public void endVector(String tag) throws IOException {} - - public Index startMap(String tag) throws IOException { - return startVector(tag); - } + private ArrayList valList; + private int vLen; + private int vIdx; + + private Value next() throws IOException { + if (vIdx < vLen) { + Value v = valList.get(vIdx); + valList.set(vIdx, null); + vIdx++; + return v; + } else { + throw new IOException("Error in deserialization."); + } + } + + /** Creates a new instance of XmlRecordInput */ + public XmlRecordInput(InputStream in) { + try{ + valList = new ArrayList(); + DefaultHandler handler = new XMLParser(valList); + SAXParserFactory factory = SAXParserFactory.newInstance(); + SAXParser parser = factory.newSAXParser(); + parser.parse(in, handler); + vLen = valList.size(); + vIdx = 0; + } catch (Exception ex) { + throw new RuntimeException(ex); + } + } + + public byte readByte(String tag) throws IOException { + Value v = next(); + if (!"ex:i1".equals(v.getType())) { + throw new IOException("Error deserializing "+tag+"."); + } + return Byte.parseByte(v.getValue()); + } + + public boolean readBool(String tag) throws IOException { + Value v = next(); + if (!"boolean".equals(v.getType())) { + throw new IOException("Error deserializing "+tag+"."); + } + return "1".equals(v.getValue()); + } + + public int readInt(String tag) throws IOException { + Value v = next(); + if (!"i4".equals(v.getType()) && + !"int".equals(v.getType())) { + throw new IOException("Error deserializing "+tag+"."); + } + return Integer.parseInt(v.getValue()); + } + + public long readLong(String tag) throws IOException { + Value v = next(); + if (!"ex:i8".equals(v.getType())) { + throw new IOException("Error deserializing "+tag+"."); + } + return Long.parseLong(v.getValue()); + } + + public float readFloat(String tag) throws IOException { + Value v = next(); + if (!"ex:float".equals(v.getType())) { + throw new IOException("Error deserializing "+tag+"."); + } + return Float.parseFloat(v.getValue()); + } + + public double readDouble(String tag) throws IOException { + Value v = next(); + if (!"double".equals(v.getType())) { + throw new IOException("Error deserializing "+tag+"."); + } + return Double.parseDouble(v.getValue()); + } + + public String readString(String tag) throws IOException { + Value v = next(); + if (!"string".equals(v.getType())) { + throw new IOException("Error deserializing "+tag+"."); + } + return Utils.fromXMLString(v.getValue()); + } + + public Buffer readBuffer(String tag) throws IOException { + Value v = next(); + if (!"string".equals(v.getType())) { + throw new IOException("Error deserializing "+tag+"."); + } + return Utils.fromXMLBuffer(v.getValue()); + } + + public void startRecord(String tag) throws IOException { + Value v = next(); + if (!"struct".equals(v.getType())) { + throw new IOException("Error deserializing "+tag+"."); + } + } + + public void endRecord(String tag) throws IOException { + Value v = next(); + if (!"/struct".equals(v.getType())) { + throw new IOException("Error deserializing "+tag+"."); + } + } + + public Index startVector(String tag) throws IOException { + Value v = next(); + if (!"array".equals(v.getType())) { + throw new IOException("Error deserializing "+tag+"."); + } + return new XmlIndex(); + } + + public void endVector(String tag) throws IOException {} + + public Index startMap(String tag) throws IOException { + return startVector(tag); + } - public void endMap(String tag) throws IOException { endVector(tag); } + public void endMap(String tag) throws IOException { endVector(tag); } } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordOutput.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordOutput.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordOutput.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/XmlRecordOutput.java Mon Apr 16 14:44:35 2007 @@ -32,218 +32,218 @@ */ public class XmlRecordOutput implements RecordOutput { - private PrintStream stream; + private PrintStream stream; - private int indent = 0; + private int indent = 0; - private Stack compoundStack; + private Stack compoundStack; - private void putIndent() { - StringBuffer sb = new StringBuffer(""); - for (int idx = 0; idx < indent; idx++) { - sb.append(" "); - } - stream.print(sb.toString()); - } - - private void addIndent() { - indent++; - } - - private void closeIndent() { - indent--; - } - - private void printBeginEnvelope(String tag) { - if (!compoundStack.empty()) { - String s = compoundStack.peek(); - if ("struct".equals(s)) { - putIndent(); - stream.print("\n"); - addIndent(); - putIndent(); - stream.print(""+tag+"\n"); - putIndent(); - stream.print(""); - } else if ("vector".equals(s)) { - stream.print(""); - } else if ("map".equals(s)) { - stream.print(""); - } - } else { - stream.print(""); - } - } - - private void printEndEnvelope(String tag) { - if (!compoundStack.empty()) { - String s = compoundStack.peek(); - if ("struct".equals(s)) { - stream.print("\n"); - closeIndent(); - putIndent(); - stream.print("\n"); - } else if ("vector".equals(s)) { - stream.print("\n"); - } else if ("map".equals(s)) { - stream.print("\n"); - } - } else { - stream.print("\n"); - } - } - - private void insideVector(String tag) { - printBeginEnvelope(tag); - compoundStack.push("vector"); - } - - private void outsideVector(String tag) throws IOException { - String s = compoundStack.pop(); - if (!"vector".equals(s)) { - throw new IOException("Error serializing vector."); - } - printEndEnvelope(tag); - } - - private void insideMap(String tag) { - printBeginEnvelope(tag); - compoundStack.push("map"); - } - - private void outsideMap(String tag) throws IOException { - String s = compoundStack.pop(); - if (!"map".equals(s)) { - throw new IOException("Error serializing map."); - } - printEndEnvelope(tag); - } - - private void insideRecord(String tag) { - printBeginEnvelope(tag); - compoundStack.push("struct"); - } - - private void outsideRecord(String tag) throws IOException { - String s = compoundStack.pop(); - if (!"struct".equals(s)) { - throw new IOException("Error serializing record."); - } - printEndEnvelope(tag); - } - - /** Creates a new instance of XmlRecordOutput */ - public XmlRecordOutput(OutputStream out) { - try { - stream = new PrintStream(out, true, "UTF-8"); - compoundStack = new Stack(); - } catch (UnsupportedEncodingException ex) { - throw new RuntimeException(ex); - } - } - - public void writeByte(byte b, String tag) throws IOException { - printBeginEnvelope(tag); - stream.print(""); - stream.print(Byte.toString(b)); - stream.print(""); - printEndEnvelope(tag); - } - - public void writeBool(boolean b, String tag) throws IOException { - printBeginEnvelope(tag); - stream.print(""); - stream.print(b ? "1" : "0"); - stream.print(""); - printEndEnvelope(tag); - } - - public void writeInt(int i, String tag) throws IOException { - printBeginEnvelope(tag); - stream.print(""); - stream.print(Integer.toString(i)); - stream.print(""); - printEndEnvelope(tag); - } - - public void writeLong(long l, String tag) throws IOException { - printBeginEnvelope(tag); - stream.print(""); - stream.print(Long.toString(l)); - stream.print(""); - printEndEnvelope(tag); - } - - public void writeFloat(float f, String tag) throws IOException { - printBeginEnvelope(tag); - stream.print(""); - stream.print(Float.toString(f)); - stream.print(""); - printEndEnvelope(tag); - } - - public void writeDouble(double d, String tag) throws IOException { - printBeginEnvelope(tag); - stream.print(""); - stream.print(Double.toString(d)); - stream.print(""); - printEndEnvelope(tag); - } - - public void writeString(String s, String tag) throws IOException { - printBeginEnvelope(tag); - stream.print(""); - stream.print(Utils.toXMLString(s)); - stream.print(""); - printEndEnvelope(tag); + private void putIndent() { + StringBuffer sb = new StringBuffer(""); + for (int idx = 0; idx < indent; idx++) { + sb.append(" "); } + stream.print(sb.toString()); + } - public void writeBuffer(Buffer buf, String tag) - throws IOException { - printBeginEnvelope(tag); - stream.print(""); - stream.print(Utils.toXMLBuffer(buf)); - stream.print(""); - printEndEnvelope(tag); - } + private void addIndent() { + indent++; + } - public void startRecord(Record r, String tag) throws IOException { - insideRecord(tag); - stream.print("\n"); - addIndent(); - } + private void closeIndent() { + indent--; + } - public void endRecord(Record r, String tag) throws IOException { - closeIndent(); + private void printBeginEnvelope(String tag) { + if (!compoundStack.empty()) { + String s = compoundStack.peek(); + if ("struct".equals(s)) { putIndent(); - stream.print(""); - outsideRecord(tag); - } - - public void startVector(ArrayList v, String tag) throws IOException { - insideVector(tag); - stream.print("\n"); + stream.print("\n"); addIndent(); + putIndent(); + stream.print(""+tag+"\n"); + putIndent(); + stream.print(""); + } else if ("vector".equals(s)) { + stream.print(""); + } else if ("map".equals(s)) { + stream.print(""); + } + } else { + stream.print(""); } + } - public void endVector(ArrayList v, String tag) throws IOException { + private void printEndEnvelope(String tag) { + if (!compoundStack.empty()) { + String s = compoundStack.peek(); + if ("struct".equals(s)) { + stream.print("\n"); closeIndent(); putIndent(); - stream.print(""); - outsideVector(tag); + stream.print("\n"); + } else if ("vector".equals(s)) { + stream.print("\n"); + } else if ("map".equals(s)) { + stream.print("\n"); + } + } else { + stream.print("\n"); } + } - public void startMap(TreeMap v, String tag) throws IOException { - insideMap(tag); - stream.print("\n"); - addIndent(); - } + private void insideVector(String tag) { + printBeginEnvelope(tag); + compoundStack.push("vector"); + } + + private void outsideVector(String tag) throws IOException { + String s = compoundStack.pop(); + if (!"vector".equals(s)) { + throw new IOException("Error serializing vector."); + } + printEndEnvelope(tag); + } + + private void insideMap(String tag) { + printBeginEnvelope(tag); + compoundStack.push("map"); + } + + private void outsideMap(String tag) throws IOException { + String s = compoundStack.pop(); + if (!"map".equals(s)) { + throw new IOException("Error serializing map."); + } + printEndEnvelope(tag); + } + + private void insideRecord(String tag) { + printBeginEnvelope(tag); + compoundStack.push("struct"); + } + + private void outsideRecord(String tag) throws IOException { + String s = compoundStack.pop(); + if (!"struct".equals(s)) { + throw new IOException("Error serializing record."); + } + printEndEnvelope(tag); + } + + /** Creates a new instance of XmlRecordOutput */ + public XmlRecordOutput(OutputStream out) { + try { + stream = new PrintStream(out, true, "UTF-8"); + compoundStack = new Stack(); + } catch (UnsupportedEncodingException ex) { + throw new RuntimeException(ex); + } + } + + public void writeByte(byte b, String tag) throws IOException { + printBeginEnvelope(tag); + stream.print(""); + stream.print(Byte.toString(b)); + stream.print(""); + printEndEnvelope(tag); + } + + public void writeBool(boolean b, String tag) throws IOException { + printBeginEnvelope(tag); + stream.print(""); + stream.print(b ? "1" : "0"); + stream.print(""); + printEndEnvelope(tag); + } + + public void writeInt(int i, String tag) throws IOException { + printBeginEnvelope(tag); + stream.print(""); + stream.print(Integer.toString(i)); + stream.print(""); + printEndEnvelope(tag); + } + + public void writeLong(long l, String tag) throws IOException { + printBeginEnvelope(tag); + stream.print(""); + stream.print(Long.toString(l)); + stream.print(""); + printEndEnvelope(tag); + } + + public void writeFloat(float f, String tag) throws IOException { + printBeginEnvelope(tag); + stream.print(""); + stream.print(Float.toString(f)); + stream.print(""); + printEndEnvelope(tag); + } + + public void writeDouble(double d, String tag) throws IOException { + printBeginEnvelope(tag); + stream.print(""); + stream.print(Double.toString(d)); + stream.print(""); + printEndEnvelope(tag); + } + + public void writeString(String s, String tag) throws IOException { + printBeginEnvelope(tag); + stream.print(""); + stream.print(Utils.toXMLString(s)); + stream.print(""); + printEndEnvelope(tag); + } - public void endMap(TreeMap v, String tag) throws IOException { - closeIndent(); - putIndent(); - stream.print(""); - outsideMap(tag); - } + public void writeBuffer(Buffer buf, String tag) + throws IOException { + printBeginEnvelope(tag); + stream.print(""); + stream.print(Utils.toXMLBuffer(buf)); + stream.print(""); + printEndEnvelope(tag); + } + + public void startRecord(Record r, String tag) throws IOException { + insideRecord(tag); + stream.print("\n"); + addIndent(); + } + + public void endRecord(Record r, String tag) throws IOException { + closeIndent(); + putIndent(); + stream.print(""); + outsideRecord(tag); + } + + public void startVector(ArrayList v, String tag) throws IOException { + insideVector(tag); + stream.print("\n"); + addIndent(); + } + + public void endVector(ArrayList v, String tag) throws IOException { + closeIndent(); + putIndent(); + stream.print(""); + outsideVector(tag); + } + + public void startMap(TreeMap v, String tag) throws IOException { + insideMap(tag); + stream.print("\n"); + addIndent(); + } + + public void endMap(TreeMap v, String tag) throws IOException { + closeIndent(); + putIndent(); + stream.print(""); + outsideMap(tag); + } } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/CGenerator.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/CGenerator.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/CGenerator.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/CGenerator.java Mon Apr 16 14:44:35 2007 @@ -40,8 +40,8 @@ * record-level code is generated by JRecord. */ void genCode(String name, ArrayList ilist, - ArrayList rlist, String destDir, ArrayList options) - throws IOException { + ArrayList rlist, String destDir, ArrayList options) + throws IOException { name = new File(destDir, (new File(name)).getName()).getAbsolutePath(); FileWriter cc = new FileWriter(name+".c"); FileWriter hh = new FileWriter(name+".h"); @@ -56,9 +56,9 @@ cc.write("#include \""+name+".h\"\n"); /* - for (Iterator iter = rlist.iterator(); iter.hasNext();) { + for (Iterator iter = rlist.iterator(); iter.hasNext();) { iter.next().genCppCode(hh, cc); - } + } */ hh.write("#endif //"+name.toUpperCase().replace('.','_')+"__\n"); Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/CppGenerator.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/CppGenerator.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/CppGenerator.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/CppGenerator.java Mon Apr 16 14:44:35 2007 @@ -40,8 +40,8 @@ * record-level code is generated by JRecord. */ void genCode(String name, ArrayList ilist, - ArrayList rlist, String destDir, ArrayList options) - throws IOException { + ArrayList rlist, String destDir, ArrayList options) + throws IOException { name = new File(destDir, (new File(name)).getName()).getAbsolutePath(); FileWriter cc = new FileWriter(name+".cc"); FileWriter hh = new FileWriter(name+".hh"); Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBoolean.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBoolean.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBoolean.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JBoolean.java Mon Apr 16 14:44:35 2007 @@ -46,7 +46,7 @@ cb.append("{\n"); cb.append("if ("+l+"<1) {\n"); cb.append("throw new java.io.IOException(\"Boolean is exactly 1 byte."+ - " Provided buffer is smaller.\");\n"); + " Provided buffer is smaller.\");\n"); cb.append("}\n"); cb.append(s+"++; "+l+"--;\n"); cb.append("}\n"); @@ -57,7 +57,7 @@ cb.append("{\n"); cb.append("if (l1<1 || l2<1) {\n"); cb.append("throw new java.io.IOException(\"Boolean is exactly 1 byte."+ - " Provided buffer is smaller.\");\n"); + " Provided buffer is smaller.\");\n"); cb.append("}\n"); cb.append("if (b1[s1] != b2[s2]) {\n"); cb.append("return (b1[s1] mInclFiles; - /** Ordered list of records declared in this file */ - private ArrayList mRecords; + /** Possibly full name of the file */ + private String mName; + /** Ordered list of included files */ + private ArrayList mInclFiles; + /** Ordered list of records declared in this file */ + private ArrayList mRecords; - /** Creates a new instance of JFile - * - * @param name possibly full pathname to the file - * @param inclFiles included files (as JFile) - * @param recList List of records defined within this file - */ - public JFile(String name, ArrayList inclFiles, - ArrayList recList) { - mName = name; - mInclFiles = inclFiles; - mRecords = recList; - } + /** Creates a new instance of JFile + * + * @param name possibly full pathname to the file + * @param inclFiles included files (as JFile) + * @param recList List of records defined within this file + */ + public JFile(String name, ArrayList inclFiles, + ArrayList recList) { + mName = name; + mInclFiles = inclFiles; + mRecords = recList; + } - /** Strip the other pathname components and return the basename */ - String getName() { - int idx = mName.lastIndexOf('/'); - return (idx > 0) ? mName.substring(idx) : mName; - } + /** Strip the other pathname components and return the basename */ + String getName() { + int idx = mName.lastIndexOf('/'); + return (idx > 0) ? mName.substring(idx) : mName; + } - /** Generate record code in given language. Language should be all - * lowercase. - */ - public int genCode(String language, String destDir, ArrayList options) + /** Generate record code in given language. Language should be all + * lowercase. + */ + public int genCode(String language, String destDir, ArrayList options) throws IOException { - CodeGenerator gen = CodeGenerator.get(language); - if (gen != null) { - gen.genCode(mName, mInclFiles, mRecords, destDir, options); - } else { - System.err.println("Cannnot recognize language:"+language); - return 1; - } - return 0; + CodeGenerator gen = CodeGenerator.get(language); + if (gen != null) { + gen.genCode(mName, mInclFiles, mRecords, destDir, options); + } else { + System.err.println("Cannnot recognize language:"+language); + return 1; } + return 0; + } } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JFloat.java Mon Apr 16 14:44:35 2007 @@ -41,7 +41,7 @@ cb.append("{\n"); cb.append("if ("+l+"<4) {\n"); cb.append("throw new java.io.IOException(\"Float is exactly 4 bytes."+ - " Provided buffer is smaller.\");\n"); + " Provided buffer is smaller.\");\n"); cb.append("}\n"); cb.append(s+"+=4; "+l+"-=4;\n"); cb.append("}\n"); @@ -51,7 +51,7 @@ cb.append("{\n"); cb.append("if (l1<4 || l2<4) {\n"); cb.append("throw new java.io.IOException(\"Float is exactly 4 bytes."+ - " Provided buffer is smaller.\");\n"); + " Provided buffer is smaller.\");\n"); cb.append("}\n"); cb.append("float f1 = org.apache.hadoop.record.Utils.readFloat(b1, s1);\n"); cb.append("float f2 = org.apache.hadoop.record.Utils.readFloat(b2, s2);\n"); Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JMap.java Mon Apr 16 14:44:35 2007 @@ -44,8 +44,8 @@ JavaMap(JType.JavaType key, JType.JavaType value) { super("java.util.TreeMap<"+key.getWrapperType()+","+value.getWrapperType()+">", - "Map", - "java.util.TreeMap<"+key.getWrapperType()+","+value.getWrapperType()+">"); + "Map", + "java.util.TreeMap<"+key.getWrapperType()+","+value.getWrapperType()+">"); this.key = key; this.value = value; } @@ -57,15 +57,15 @@ cb.append(setType+getId("set1")+" = "+fname+".keySet();\n"); cb.append(setType+getId("set2")+" = "+other+".keySet();\n"); cb.append(iterType+getId("miter1")+" = "+ - getId("set1")+".iterator();\n"); + getId("set1")+".iterator();\n"); cb.append(iterType+getId("miter2")+" = "+ - getId("set2")+".iterator();\n"); + getId("set2")+".iterator();\n"); cb.append("for(; "+getId("miter1")+".hasNext() && "+ - getId("miter2")+".hasNext(); ) {\n"); + getId("miter2")+".hasNext(); ) {\n"); cb.append(key.getType()+" "+getId("k1")+ - " = "+getId("miter1")+".next();\n"); + " = "+getId("miter1")+".next();\n"); cb.append(key.getType()+" "+getId("k2")+ - " = "+getId("miter2")+".next();\n"); + " = "+getId("miter2")+".next();\n"); key.genCompareTo(cb, getId("k1"), getId("k2")); cb.append("if (ret != 0) { return ret; }\n"); cb.append("}\n"); @@ -93,11 +93,11 @@ void genWriteMethod(CodeBuffer cb, String fname, String tag) { String setType = "java.util.Set> "; + key.getWrapperType()+","+value.getWrapperType()+">> "; String entryType = "java.util.Map.Entry<"+ - key.getWrapperType()+","+value.getWrapperType()+"> "; + key.getWrapperType()+","+value.getWrapperType()+"> "; String iterType = "java.util.Iterator> "; + key.getWrapperType()+","+value.getWrapperType()+">> "; cb.append("{\n"); incrLevel(); cb.append("a.startMap("+fname+",\""+tag+"\");\n"); @@ -118,12 +118,12 @@ cb.append("{\n"); incrLevel(); cb.append("int "+getId("mi")+ - " = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n"); + " = org.apache.hadoop.record.Utils.readVInt("+b+", "+s+");\n"); cb.append("int "+getId("mz")+ - " = org.apache.hadoop.record.Utils.getVIntSize("+getId("mi")+");\n"); + " = org.apache.hadoop.record.Utils.getVIntSize("+getId("mi")+");\n"); cb.append(s+"+="+getId("mz")+"; "+l+"-="+getId("mz")+";\n"); cb.append("for (int "+getId("midx")+" = 0; "+getId("midx")+ - " < "+getId("mi")+"; "+getId("midx")+"++) {"); + " < "+getId("mi")+"; "+getId("midx")+"++) {"); key.genSlurpBytes(cb, b,s,l); value.genSlurpBytes(cb, b,s,l); cb.append("}\n"); @@ -135,24 +135,24 @@ cb.append("{\n"); incrLevel(); cb.append("int "+getId("mi1")+ - " = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n"); + " = org.apache.hadoop.record.Utils.readVInt(b1, s1);\n"); cb.append("int "+getId("mi2")+ - " = org.apache.hadoop.record.Utils.readVInt(b2, s2);\n"); + " = org.apache.hadoop.record.Utils.readVInt(b2, s2);\n"); cb.append("int "+getId("mz1")+ - " = org.apache.hadoop.record.Utils.getVIntSize("+getId("mi1")+");\n"); + " = org.apache.hadoop.record.Utils.getVIntSize("+getId("mi1")+");\n"); cb.append("int "+getId("mz2")+ - " = org.apache.hadoop.record.Utils.getVIntSize("+getId("mi2")+");\n"); + " = org.apache.hadoop.record.Utils.getVIntSize("+getId("mi2")+");\n"); cb.append("s1+="+getId("mz1")+"; s2+="+getId("mz2")+ - "; l1-="+getId("mz1")+"; l2-="+getId("mz2")+";\n"); + "; l1-="+getId("mz1")+"; l2-="+getId("mz2")+";\n"); cb.append("for (int "+getId("midx")+" = 0; "+getId("midx")+ - " < "+getId("mi1")+" && "+getId("midx")+" < "+getId("mi2")+ - "; "+getId("midx")+"++) {"); + " < "+getId("mi1")+" && "+getId("midx")+" < "+getId("mi2")+ + "; "+getId("midx")+"++) {"); key.genCompareBytes(cb); value.genSlurpBytes(cb, "b1", "s1", "l1"); value.genSlurpBytes(cb, "b2", "s2", "l2"); cb.append("}\n"); cb.append("if ("+getId("mi1")+" != "+getId("mi2")+ - ") { return ("+getId("mi1")+"<"+getId("mi2")+")?-1:0; }\n"); + ") { return ("+getId("mi1")+"<"+getId("mi2")+")?-1:0; }\n"); decrLevel(); cb.append("}\n"); } @@ -162,7 +162,7 @@ public JMap(JType t1, JType t2) { setJavaType(new JavaMap(t1.getJavaType(), t2.getJavaType())); setCppType(new CppCompType(" ::std::map<"+t1.getCppType().getType()+","+ - t2.getCppType().getType()+">")); + t2.getCppType().getType()+">")); setCType(new CType()); keyType = t1; valueType = t2; Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JRecord.java Mon Apr 16 14:44:35 2007 @@ -36,7 +36,7 @@ private String name; private String module; private ArrayList> fields = - new ArrayList>(); + new ArrayList>(); JavaRecord(String name, ArrayList> flist) { super(name, "Record", name); @@ -65,7 +65,7 @@ void genSlurpBytes(CodeBuffer cb, String b, String s, String l) { cb.append("{\n"); cb.append("int r = "+fullName+ - ".Comparator.slurpRaw("+b+","+s+","+l+");\n"); + ".Comparator.slurpRaw("+b+","+s+","+l+");\n"); cb.append(s+"+=r; "+l+"-=r;\n"); cb.append("}\n"); } @@ -73,7 +73,7 @@ void genCompareBytes(CodeBuffer cb) { cb.append("{\n"); cb.append("int r1 = "+fullName+ - ".Comparator.compareRaw(b1,s1,l1,b2,s2,l2);\n"); + ".Comparator.compareRaw(b1,s1,l1,b2,s2,l2);\n"); cb.append("if (r1 <= 0) { return r1; }\n"); cb.append("s1+=r1; s2+=r1; l1-=r1; l2-=r1;\n"); cb.append("}\n"); @@ -100,7 +100,7 @@ cb.append("// File generated by hadoop record compiler. Do not edit.\n"); cb.append("package "+module+";\n\n"); cb.append("public class "+name+ - " extends org.apache.hadoop.record.Record {\n"); + " extends org.apache.hadoop.record.Record {\n"); for (Iterator> i = fields.iterator(); i.hasNext();) { JField jf = i.next(); @@ -136,8 +136,8 @@ type.genGetSet(cb, name); } cb.append("public void serialize("+ - "final org.apache.hadoop.record.RecordOutput a, final String tag)\n"+ - "throws java.io.IOException {\n"); + "final org.apache.hadoop.record.RecordOutput a, final String tag)\n"+ + "throws java.io.IOException {\n"); cb.append("a.startRecord(this,tag);\n"); for (Iterator> i = fields.iterator(); i.hasNext();) { JField jf = i.next(); @@ -149,8 +149,8 @@ cb.append("}\n"); cb.append("public void deserialize("+ - "final org.apache.hadoop.record.RecordInput a, final String tag)\n"+ - "throws java.io.IOException {\n"); + "final org.apache.hadoop.record.RecordInput a, final String tag)\n"+ + "throws java.io.IOException {\n"); cb.append("a.startRecord(tag);\n"); for (Iterator> i = fields.iterator(); i.hasNext();) { JField jf = i.next(); @@ -225,7 +225,7 @@ cb.append("}\n"); cb.append("public static class Comparator extends"+ - " org.apache.hadoop.record.RecordComparator {\n"); + " org.apache.hadoop.record.RecordComparator {\n"); cb.append("public Comparator() {\n"); cb.append("super("+name+".class);\n"); cb.append("}\n"); @@ -268,7 +268,7 @@ cb.append("}\n\n"); cb.append("static {\n"); cb.append("org.apache.hadoop.record.RecordComparator.define(" - +name+".class, new Comparator());\n"); + +name+".class, new Comparator());\n"); cb.append("}\n"); cb.append("}\n"); @@ -283,7 +283,7 @@ private String name; private String module; private ArrayList> fields = - new ArrayList>(); + new ArrayList>(); CppRecord(String name, ArrayList> flist) { super(name.replaceAll("\\.","::")); @@ -302,7 +302,7 @@ } void genCode(FileWriter hh, FileWriter cc, ArrayList options) - throws IOException { + throws IOException { CodeBuffer hb = new CodeBuffer(); String[] ns = module.split("::"); @@ -445,7 +445,7 @@ } void genJavaCode(String destDir, ArrayList options) - throws IOException { + throws IOException { ((JavaRecord)getJavaType()).genCode(destDir, options); } } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JString.java Mon Apr 16 14:44:35 2007 @@ -58,14 +58,14 @@ cb.append("other."+fname+" = this."+fname+";\n"); } } - /** Creates a new instance of JString */ - public JString() { - setJavaType(new JavaString()); - setCppType(new CppCompType(" ::std::string")); - setCType(new CCompType()); - } + /** Creates a new instance of JString */ + public JString() { + setJavaType(new JavaString()); + setCppType(new CppCompType(" ::std::string")); + setCType(new CCompType()); + } - String getSignature() { - return "s"; - } + String getSignature() { + return "s"; + } } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java?view=diff&rev=529410&r1=529409&r2=529410 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/JType.java Mon Apr 16 14:44:35 2007 @@ -43,8 +43,8 @@ private String wrapper; JavaType(String javaname, - String suffix, - String wrapper) { + String suffix, + String wrapper) { this.name = javaname; this.methodSuffix = suffix; this.wrapper = wrapper; @@ -92,7 +92,7 @@ void genCompareTo(CodeBuffer cb, String fname, String other) { cb.append("ret = ("+fname+" == "+other+")? 0 :(("+fname+"<"+other+ - ")?-1:1);\n"); + ")?-1:1);\n"); } abstract void genCompareBytes(CodeBuffer cb);