avro-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r825270 - in /hadoop/avro/trunk: ./ src/java/org/apache/avro/ src/java/org/apache/avro/file/ src/java/org/apache/avro/generic/ src/java/org/apache/avro/io/ src/java/org/apache/avro/ipc/ src/test/java/org/apache/avro/ src/test/java/org/apach...
Date Wed, 14 Oct 2009 20:46:57 GMT
Author: cutting
Date: Wed Oct 14 20:46:55 2009
New Revision: 825270

URL: http://svn.apache.org/viewvc?rev=825270&view=rev
Log:
AVRO-142.  Remove some Java unused fields and imports.  Contributed by Philip Zeyliger.

Added:
    hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIOResolving.java   (contents,
props changed)
      - copied, changed from r824967, hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO_resolving.java
Removed:
    hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO_resolving.java
Modified:
    hadoop/avro/trunk/CHANGES.txt
    hadoop/avro/trunk/build.xml
    hadoop/avro/trunk/src/java/org/apache/avro/Schema.java
    hadoop/avro/trunk/src/java/org/apache/avro/file/DataFileReader.java
    hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericRequestor.java
    hadoop/avro/trunk/src/java/org/apache/avro/io/BlockingBinaryEncoder.java
    hadoop/avro/trunk/src/java/org/apache/avro/ipc/HttpTransceiver.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/BarRecord.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/FooRecord.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/GenerateBlockingData.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/RandomData.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestBulkData.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestCompare.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFileReflect.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestNamespaceSpecific.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolGeneric.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolHttp.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolSpecific.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestSchema.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBinaryDecoder.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBlockingIO.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestValidatingIO.java

Modified: hadoop/avro/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/CHANGES.txt?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/CHANGES.txt (original)
+++ hadoop/avro/trunk/CHANGES.txt Wed Oct 14 20:46:55 2009
@@ -8,6 +8,9 @@
 
   IMPROVEMENTS
 
+    AVRO-142. Remove some Java unused fields and imports.  Start
+    running checkstyle on Java test code.  (Philip Zeyliger via cutting)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: hadoop/avro/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/build.xml?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/build.xml (original)
+++ hadoop/avro/trunk/build.xml Wed Oct 14 20:46:55 2009
@@ -207,8 +207,11 @@
     <checkstyle config="${java.src.dir}/checkstyle.xml"
 		classpathref="test.java.classpath">
 	<fileset dir="${java.src.dir}">
-        <include name="**/*.java"/>
-    </fileset>
+          <include name="**/*.java"/>
+        </fileset>
+	<fileset dir="${test.java.src.dir}">
+          <include name="**/*.java"/>
+        </fileset>
     </checkstyle>
   </target>
 

Modified: hadoop/avro/trunk/src/java/org/apache/avro/Schema.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/Schema.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/Schema.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/Schema.java Wed Oct 14 20:46:55 2009
@@ -266,7 +266,6 @@
   private static class Name {
     private String name;
     private String space;
-    public Name(Schema s) { this(s.getName(), s.getNamespace()); }
     public Name(String name, String space) {
       if (name == null) return;                   // anonymous
       int lastDot = name.lastIndexOf('.');

Modified: hadoop/avro/trunk/src/java/org/apache/avro/file/DataFileReader.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/file/DataFileReader.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/file/DataFileReader.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/file/DataFileReader.java Wed Oct 14 20:46:55
2009
@@ -89,7 +89,16 @@
 
     in.seek(DataFileConstants.MAGIC.length);         // seek to start
   }
+  
 
+  /**
+   * Return the number of records in the file, according
+   * to its metadata.
+   */
+  public long getCount() {
+    return count;
+  }
+  
   /** Return the value of a metadata property. */
   public synchronized byte[] getMeta(String key) {
     return meta.get(key);

Modified: hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericRequestor.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericRequestor.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericRequestor.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericRequestor.java Wed Oct 14 20:46:55
2009
@@ -27,14 +27,9 @@
 import org.apache.avro.ipc.AvroRemoteException;
 import org.apache.avro.ipc.Requestor;
 import org.apache.avro.ipc.Transceiver;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /** {@link Requestor} implementation for generic Java data. */
 public class GenericRequestor extends Requestor {
-  private static final Logger LOG
-    = LoggerFactory.getLogger(GenericRequestor.class);
-
   public GenericRequestor(Protocol protocol, Transceiver transceiver)
     throws IOException {
     super(protocol, transceiver);

Modified: hadoop/avro/trunk/src/java/org/apache/avro/io/BlockingBinaryEncoder.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/io/BlockingBinaryEncoder.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/io/BlockingBinaryEncoder.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/io/BlockingBinaryEncoder.java Wed Oct 14 20:46:55
2009
@@ -118,15 +118,6 @@
       this.start = this.lastFullItem = 0;
       this.items = 1; // Makes various assertions work out
     }
-
-    /** Create a REGULAR instance.  (Gets changed to OVERFLOW by
-     * {@link #compact}.) */
-    public BlockedValue(Schema.Type type, int start) {
-      this.type = type;
-      this.state = State.REGULAR;
-      this.start = this.lastFullItem = start;
-      this.items = 0;
-    }
     
     /**
      * Check invariants of <code>this</code> and also the

Modified: hadoop/avro/trunk/src/java/org/apache/avro/ipc/HttpTransceiver.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/ipc/HttpTransceiver.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/ipc/HttpTransceiver.java (original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/ipc/HttpTransceiver.java Wed Oct 14 20:46:55
2009
@@ -28,14 +28,8 @@
 import java.net.URL;
 import java.net.URLConnection;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 /** An HTTP-based {@link Transceiver} implementation. */
 public class HttpTransceiver extends Transceiver {
-  private static final Logger LOG
-    = LoggerFactory.getLogger(HttpTransceiver.class);
-
   static final String CONTENT_TYPE = "avro/binary"; 
 
   private URL url;

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/BarRecord.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/BarRecord.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/BarRecord.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/BarRecord.java Wed Oct 14 20:46:55 2009
@@ -50,4 +50,4 @@
   public String toString() {
     return BarRecord.class.getSimpleName() + "{msg=" + beerMsg + "}";
   }
-}
\ No newline at end of file
+}

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/FooRecord.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/FooRecord.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/FooRecord.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/FooRecord.java Wed Oct 14 20:46:55 2009
@@ -44,4 +44,4 @@
   public String toString() {
     return FooRecord.class.getSimpleName() + "{count=" + fooCount + "}";
   }
-}
\ No newline at end of file
+}

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/GenerateBlockingData.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/GenerateBlockingData.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/GenerateBlockingData.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/GenerateBlockingData.java Wed Oct 14 20:46:55
2009
@@ -60,10 +60,10 @@
     }
     
     Schema sch = Schema.parse(new File(args[0]));
-    File FILE = new File(args[1]);
+    File outputFile = new File(args[1]);
     int numObjects = Integer.parseInt(args[2]);
     
-    FileOutputStream out = new FileOutputStream(FILE, false);
+    FileOutputStream out = new FileOutputStream(outputFile, false);
     DatumWriter<Object> dout = new GenericDatumWriter<Object>();
     dout.setSchema(sch);
     Encoder vout = new BinaryEncoder(out);

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/RandomData.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/RandomData.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/RandomData.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/RandomData.java Wed Oct 14 20:46:55 2009
@@ -17,6 +17,15 @@
  */
 package org.apache.avro;
 
+import java.io.File;
+import java.io.FileOutputStream;
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
 import org.apache.avro.file.DataFileWriter;
 import org.apache.avro.generic.GenericArray;
 import org.apache.avro.generic.GenericData;
@@ -24,11 +33,6 @@
 import org.apache.avro.generic.GenericRecord;
 import org.apache.avro.util.Utf8;
 
-import java.io.File;
-import java.io.FileOutputStream;
-import java.nio.ByteBuffer;
-import java.util.*;
-
 /** Generates schema data as Java objects with random values. */
 public class RandomData implements Iterable<Object> {
   private final Schema root;

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestBulkData.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestBulkData.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestBulkData.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestBulkData.java Wed Oct 14 20:46:55
2009
@@ -19,9 +19,15 @@
 
 import org.apache.avro.specific.SpecificRequestor;
 import org.apache.avro.specific.SpecificResponder;
-import org.apache.avro.ipc.*;
-import org.apache.avro.util.Utf8;
-import org.junit.*;
+import org.apache.avro.ipc.HttpServer;
+import org.apache.avro.ipc.HttpTransceiver;
+import org.apache.avro.ipc.Server;
+import org.apache.avro.ipc.Transceiver;
+
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.net.URL;
@@ -54,7 +60,6 @@
 
   private static Server server;
   private static Transceiver client;
-  private static Requestor requestor;
   private static BulkData proxy;
 
   @Before

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestCompare.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestCompare.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestCompare.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestCompare.java Wed Oct 14 20:46:55 2009
@@ -19,13 +19,10 @@
 
 import org.junit.Test;
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
 
-import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
-import java.util.HashMap;
 
 import org.apache.avro.generic.GenericArray;
 import org.apache.avro.generic.GenericData;
@@ -34,12 +31,10 @@
 import org.apache.avro.specific.SpecificDatumWriter;
 import org.apache.avro.io.BinaryData;
 import org.apache.avro.io.DatumWriter;
-import org.apache.avro.io.Encoder;
 import org.apache.avro.io.BinaryEncoder;
 import org.apache.avro.util.Utf8;
 
 import org.apache.avro.test.TestRecord;
-import org.apache.avro.test.Simple;
 import org.apache.avro.test.Kind;
 import org.apache.avro.test.MD5;
 

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java Wed Oct 14 20:46:55
2009
@@ -72,6 +72,7 @@
       new DataFileReader<Object>(new SeekableFileInput(FILE),
                                  new GenericDatumReader<Object>());
     try {
+      assertEquals(COUNT, reader.getCount());
       Object datum = null;
       if (VALIDATE) {
         for (Object expected : new RandomData(SCHEMA, COUNT, SEED)) {

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFileReflect.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFileReflect.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFileReflect.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFileReflect.java Wed Oct 14 20:46:55
2009
@@ -206,6 +206,7 @@
   private static class BazRecord {
     private int nbr;
 
+    @SuppressWarnings("unused")
     public BazRecord() {
     }
 

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestNamespaceSpecific.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestNamespaceSpecific.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestNamespaceSpecific.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestNamespaceSpecific.java Wed Oct 14
20:46:55 2009
@@ -33,18 +33,11 @@
 import org.junit.Test;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
-import java.io.*;
+import java.io.IOException;
 import java.net.InetSocketAddress;
-import java.nio.ByteBuffer;
-import java.util.Random;
-
 
 public class TestNamespaceSpecific {
-  private static final Logger LOG
-    = LoggerFactory.getLogger(TestNamespaceSpecific.class);
 
   public static class TestImpl implements TestNamespace {
     public TestRecord echo(TestRecord record) { return record; }

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolGeneric.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolGeneric.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolGeneric.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolGeneric.java Wed Oct 14 20:46:55
2009
@@ -23,7 +23,11 @@
 import org.apache.avro.generic.GenericRecord;
 import org.apache.avro.generic.GenericRequestor;
 import org.apache.avro.generic.GenericResponder;
-import org.apache.avro.ipc.*;
+import org.apache.avro.ipc.AvroRemoteException;
+import org.apache.avro.ipc.Requestor;
+import org.apache.avro.ipc.SocketServer;
+import org.apache.avro.ipc.SocketTransceiver;
+import org.apache.avro.ipc.Transceiver;
 import org.apache.avro.util.Utf8;
 import org.junit.After;
 import org.junit.Before;

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolHttp.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolHttp.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolHttp.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolHttp.java Wed Oct 14 20:46:55
2009
@@ -17,7 +17,6 @@
  */
 package org.apache.avro;
 
-import java.util.Random;
 import org.apache.avro.ipc.HttpServer;
 import org.apache.avro.ipc.HttpTransceiver;
 import org.apache.avro.specific.SpecificRequestor;

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolSpecific.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolSpecific.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolSpecific.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolSpecific.java Wed Oct 14 20:46:55
2009
@@ -35,18 +35,18 @@
 import org.junit.Test;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
-import java.io.*;
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.LineNumberReader;
 import java.net.InetSocketAddress;
 import java.nio.ByteBuffer;
 import java.util.Random;
 
 
 public class TestProtocolSpecific {
-  private static final Logger LOG
-    = LoggerFactory.getLogger(TestProtocolSpecific.class);
 
   protected static final File SERVER_PORTS_DIR
   = new File(System.getProperty("test.dir", "/tmp")+"/server-ports/");

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java Wed Oct 14 20:46:55 2009
@@ -33,12 +33,8 @@
 import org.apache.avro.test.Simple;
 import org.apache.avro.test.TestRecord;
 import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public class TestReflect {
-  private static final Logger LOG
-    = LoggerFactory.getLogger(TestProtocolSpecific.class);
 
   private static final File FILE = new File("src/test/schemata/simple.avpr");
   private static final Protocol PROTOCOL;

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestSchema.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestSchema.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestSchema.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestSchema.java Wed Oct 14 20:46:55 2009
@@ -188,9 +188,6 @@
     checkDefault(schemaJson, defaultJson, defaultValue);
   }
 
-  private static void check(String jsonSchema) throws Exception {
-    check(jsonSchema, true);
-  }
   private static void check(String jsonSchema, boolean induce)
     throws Exception {
     Schema schema = Schema.parse(jsonSchema);

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBinaryDecoder.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBinaryDecoder.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBinaryDecoder.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBinaryDecoder.java Wed Oct 14 20:46:55
2009
@@ -29,49 +29,49 @@
   /** Verify EOFException throw at EOF */
 
   @Test(expected=EOFException.class)
-  public void testEOF_boolean() throws IOException {
+  public void testEOFBoolean() throws IOException {
     new BinaryDecoder(new ByteArrayInputStream(new byte[0])).readBoolean();
   }
   
   @Test(expected=EOFException.class)
-  public void testEOF_int() throws IOException {
+  public void testEOFInt() throws IOException {
     new BinaryDecoder(new ByteArrayInputStream(new byte[0])).readInt();
   }
   
   @Test(expected=EOFException.class)
-  public void testEOF_long() throws IOException {
+  public void testEOFLong() throws IOException {
     new BinaryDecoder(new ByteArrayInputStream(new byte[0])).readLong();
   }
   
   @Test(expected=EOFException.class)
-  public void testEOF_float() throws IOException {
+  public void testEOFFloat() throws IOException {
     new BinaryDecoder(new ByteArrayInputStream(new byte[0])).readFloat();
   }
   
   @Test(expected=EOFException.class)
-  public void testEOF_double() throws IOException {
+  public void testEOFDouble() throws IOException {
     new BinaryDecoder(new ByteArrayInputStream(new byte[0])).readDouble();
   }
   
   @Test(expected=EOFException.class)
-  public void testEOF_bytes() throws IOException {
+  public void testEOFBytes() throws IOException {
     new BinaryDecoder(new ByteArrayInputStream(new byte[0])).readBytes(null);
   }
   
   @Test(expected=EOFException.class)
-  public void testEOF_string() throws IOException {
+  public void testEOFString() throws IOException {
     new BinaryDecoder(new ByteArrayInputStream(new byte[0])).
       readString(new Utf8("a"));
   }
   
   @Test(expected=EOFException.class)
-  public void testEOF_fixed() throws IOException {
+  public void testEOFFixed() throws IOException {
     new BinaryDecoder(new ByteArrayInputStream(new byte[0])).
       readFixed(new byte[1]);
   }
 
   @Test(expected=EOFException.class)
-  public void testEOF_enum() throws IOException {
+  public void testEOFEnum() throws IOException {
     new BinaryDecoder(new ByteArrayInputStream(new byte[0])).readEnum();
   }
   

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBlockingIO.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBlockingIO.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBlockingIO.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBlockingIO.java Wed Oct 14 20:46:55
2009
@@ -20,14 +20,12 @@
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
-import java.io.UnsupportedEncodingException;
 import java.nio.ByteBuffer;
 import java.util.Stack;
 import java.util.Collection;
 import java.util.Arrays;
 
 import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.JsonParseException;
 import org.codehaus.jackson.JsonParser;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -55,7 +53,7 @@
     private final Decoder input;
     private final int depth;
     public Tests(int bufferSize, int depth, String input)
-      throws JsonParseException, IOException {
+      throws IOException {
   
       this.depth = depth;
       byte[] in = input.getBytes("UTF-8");
@@ -74,8 +72,7 @@
       this.parser =  f.createJsonParser(new ByteArrayInputStream(in));
     }
     
-    public void scan()
-      throws JsonParseException, UnsupportedEncodingException, IOException {
+    public void scan() throws IOException {
       Stack<S> countStack = new Stack<S>();
       long count = 0;
       while (parser.nextToken() != null) {
@@ -127,8 +124,7 @@
       }
     }
 
-    public void skip(int skipLevel) throws
-      JsonParseException, UnsupportedEncodingException, IOException {
+    public void skip(int skipLevel) throws IOException {
       Stack<S> countStack = new Stack<S>();
       long count = 0;
       while (parser.nextToken() != null) {
@@ -218,39 +214,35 @@
   }
 
   @Test
-  public void testScan()
-    throws JsonParseException, IOException {
+  public void testScan() throws IOException {
     Tests t = new Tests(iSize, iDepth, sInput);
     t.scan();
   }
 
   @Test
-  public void testSkip_1()
-    throws JsonParseException, IOException {
+  public void testSkip1() throws IOException {
     testSkip(iSize, iDepth, sInput, 0);
   }
 
   @Test
-  public void testSkip_2()
-    throws JsonParseException, IOException {
+  public void testSkip2() throws IOException {
     testSkip(iSize, iDepth, sInput, 1);
   }
 
   @Test
-  public void testSkip_3()
-    throws JsonParseException, IOException {
+  public void testSkip3() throws IOException {
     testSkip(iSize, iDepth, sInput, 2);
   }
 
   private void testSkip(int bufferSize, int depth, String input,
       int skipLevel)
-    throws JsonParseException, IOException {
+    throws IOException {
     Tests t = new Tests(bufferSize, depth, input);
     t.skip(skipLevel);
   }
 
   private static void skipMap(JsonParser parser, Decoder input, int depth)
-    throws IOException, JsonParseException {
+    throws IOException {
     for (long l = input.skipMap(); l != 0; l = input.skipMap()) {
       for (long i = 0; i < l; i++) {
         if (depth == 0) {
@@ -264,7 +256,7 @@
   }
 
   private static void skipArray(JsonParser parser, Decoder input, int depth)
-    throws IOException, JsonParseException {
+    throws IOException {
     for (long l = input.skipArray(); l != 0; l = input.skipArray()) {
       for (long i = 0; i < l; i++) {
         if (depth == 1) {
@@ -278,7 +270,7 @@
   }
  
   private static void checkString(String s, Decoder input, int n)
-    throws IOException, UnsupportedEncodingException {
+    throws IOException {
     ByteBuffer buf = input.readBytes(null);
     assertEquals(n, buf.remaining());
     String s2 = new String(buf.array(), buf.position(),
@@ -288,7 +280,7 @@
   
   private static void serialize(Encoder cos, JsonParser p,
       ByteArrayOutputStream os)
-    throws JsonParseException, IOException {
+    throws IOException {
     boolean[] isArray = new boolean[100];
     int[] counts = new int[100];
     int stackTop = -1;

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO.java Wed Oct 14 20:46:55
2009
@@ -53,16 +53,14 @@
   }
   
   @Test
-  public void test_identical()
-  throws IOException {
+  public void testIdentical() throws IOException {
     performTest(eEnc, iSkipL, sJsWrtSchm, sWrtCls, sJsRdrSchm, sRdrCls);
   }
 
   private static final int COUNT = 10;
 
   @Test
-  public void test_compatible()
-  throws IOException {
+  public void testCompatible() throws IOException {
     performTest(eEnc, iSkipL, sJsWrtSchm, sWrtCls, sJsRdrSchm, sRdrCls);
   }
 
@@ -120,7 +118,7 @@
   }
 
   static Object[][] encodings = new Object[][] { { Encoding.BINARY },
-	  { Encoding.BLOCKING_BINARY }, { Encoding.JSON } };
+          { Encoding.BLOCKING_BINARY }, { Encoding.JSON } };
   static Object[][] skipLevels =
     new Object[][] { { -1 }, { 0 }, { 1 }, { 2 }  };
   private static Object[][] testSchemas() {

Copied: hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIOResolving.java (from
r824967, hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO_resolving.java)
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIOResolving.java?p2=hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIOResolving.java&p1=hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO_resolving.java&r1=824967&r2=825270&rev=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO_resolving.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIOResolving.java Wed Oct
14 20:46:55 2009
@@ -27,7 +27,7 @@
 import java.util.Arrays;
 
 @RunWith(Parameterized.class)
-public class TestResolvingIO_resolving {
+public class TestResolvingIOResolving {
   protected TestValidatingIO.Encoding eEnc;
   protected final int iSkipL;
   protected final String sJsWrtSchm;
@@ -38,7 +38,7 @@
   protected final Object[] oaWrtVals;
   protected final Object[] oaRdrVals;
 
-  public TestResolvingIO_resolving(TestValidatingIO.Encoding encoding,
+  public TestResolvingIOResolving(TestValidatingIO.Encoding encoding,
       int skipLevel, String jsonWriterSchema,
       String writerCalls,
       Object[] writerValues,
@@ -56,7 +56,7 @@
   }
 
   @Test
-  public void test_resolving()
+  public void testResolving()
     throws IOException {
     Schema writerSchema = Schema.parse(sJsWrtSchm);
     byte[] bytes = TestValidatingIO.make(writerSchema, sWrtCls,
@@ -70,8 +70,8 @@
   @Parameterized.Parameters
   public static Collection<Object[]> data3() {
     Collection<Object[]> ret = Arrays.asList(
-    		TestValidatingIO.convertTo2dArray(TestResolvingIO.encodings,
-    				TestResolvingIO.skipLevels,
+                TestValidatingIO.convertTo2dArray(TestResolvingIO.encodings,
+                                TestResolvingIO.skipLevels,
         dataForResolvingTests()));
     return ret;
   }
@@ -104,13 +104,13 @@
           new Object[] { 10, 101 } },
         { "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
             + "{\"name\": \"g1\", " +
-            		"\"type\":{\"type\":\"record\",\"name\":\"inner\",\"fields\":["
+                        "\"type\":{\"type\":\"record\",\"name\":\"inner\",\"fields\":["
                 + "{\"name\":\"f2\", \"type\":\"int\"}]}}, "
             + "{\"name\": \"g2\", \"type\": \"long\"}]}", "IL",
           new Object[] { 10, 11L },
           "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
             + "{\"name\": \"g1\", " +
-            		"\"type\":{\"type\":\"record\",\"name\":\"inner\",\"fields\":["
+                        "\"type\":{\"type\":\"record\",\"name\":\"inner\",\"fields\":["
                 + "{\"name\":\"f1\", \"type\":\"int\", \"default\": 101},"
                 + "{\"name\":\"f2\", \"type\":\"int\"}]}}, "
           + "{\"name\": \"g2\", \"type\": \"long\"}]}}", "IIL",

Propchange: hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIOResolving.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestValidatingIO.java
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestValidatingIO.java?rev=825270&r1=825269&r2=825270&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestValidatingIO.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestValidatingIO.java Wed Oct 14 20:46:55
2009
@@ -31,7 +31,12 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.nio.ByteBuffer;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Random;
 
 @RunWith(Parameterized.class)
 public class TestValidatingIO {
@@ -46,13 +51,13 @@
   private String sJsSch;
   private String sCl;
 
-  public TestValidatingIO (Encoding _enc, int _skip, String _js, String _cls) {
-    this.eEnc = _enc;
-    this.iSkipL = _skip;
-    this.sJsSch = _js;
-    this.sCl = _cls;
+  public TestValidatingIO (Encoding enc, int skip, String js, String cls) {
+    this.eEnc = enc;
+    this.iSkipL = skip;
+    this.sJsSch = js;
+    this.sCl = cls;
   }
-  private static int COUNT = 1;
+  private static final int COUNT = 1;
   
   @Test
   public void testMain() throws IOException {
@@ -213,7 +218,7 @@
   public static Object[] randomValues(String calls) {
     Random r = new Random();
     InputScanner cs = new InputScanner(calls.toCharArray());
-    Vector<Object> result = new Vector<Object>();
+    List<Object> result = new ArrayList<Object>();
     while (! cs.isDone()) {
       char c = cs.cur();
       cs.next();



Mime
View raw message