avro-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From b...@apache.org
Subject [27/43] avro git commit: AVRO-1828: Add EditorConfig file and cleanup of whitespace violations
Date Sat, 14 May 2016 23:44:00 GMT
http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueRecordWriter.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueRecordWriter.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueRecordWriter.java
index 71baa41..ee68c48 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueRecordWriter.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueRecordWriter.java
@@ -106,10 +106,10 @@ public class AvroKeyValueRecordWriter<K, V> extends RecordWriter<K, V> implement
   public AvroKeyValueRecordWriter(AvroDatumConverter<K, ?> keyConverter,
       AvroDatumConverter<V, ?> valueConverter, GenericData dataModel,
       CodecFactory compressionCodec, OutputStream outputStream) throws IOException {
-    this(keyConverter, valueConverter, dataModel, compressionCodec, outputStream, 
+    this(keyConverter, valueConverter, dataModel, compressionCodec, outputStream,
         DataFileConstants.DEFAULT_SYNC_INTERVAL);
   }
-  
+
   /**
    * Gets the writer schema for the key/value pair generic record.
    *
@@ -137,5 +137,5 @@ public class AvroKeyValueRecordWriter<K, V> extends RecordWriter<K, V> implement
   @Override
   public long sync() throws IOException {
     return mAvroFileWriter.sync();
-  }  
+  }
 }

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroMultipleOutputs.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroMultipleOutputs.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroMultipleOutputs.java
index 9db8c68..3f8d7e0 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroMultipleOutputs.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroMultipleOutputs.java
@@ -41,10 +41,10 @@ import org.apache.hadoop.mapreduce.TaskInputOutputContext;
 import org.apache.hadoop.util.ReflectionUtils;
 
 /**
- * The AvroMultipleOutputs class simplifies writing Avro output data 
+ * The AvroMultipleOutputs class simplifies writing Avro output data
  * to multiple outputs
- * 
- * <p> 
+ *
+ * <p>
  * Case one: writing to additional outputs other than the job default output.
  *
  * Each additional output, or named output, may be configured with its own
@@ -53,14 +53,14 @@ import org.apache.hadoop.util.ReflectionUtils;
  * <p>
  * Case two: to write data to different files provided by user
  * </p>
- * 
+ *
  * <p>
- * AvroMultipleOutputs supports counters, by default they are disabled. The 
- * counters group is the {@link AvroMultipleOutputs} class name. The names of the 
- * counters are the same as the output name. These count the number of records 
- * written to each output name. 
+ * AvroMultipleOutputs supports counters, by default they are disabled. The
+ * counters group is the {@link AvroMultipleOutputs} class name. The names of the
+ * counters are the same as the output name. These count the number of records
+ * written to each output name.
  * </p>
- * 
+ *
  * Usage pattern for job submission:
  * <pre>
  *
@@ -72,18 +72,18 @@ import org.apache.hadoop.util.ReflectionUtils;
  * job.setMapperClass(MyAvroMapper.class);
  * job.setReducerClass(MyAvroReducer.class);
  * ...
- *  
+ *
  * Schema schema;
  * ...
  * // Defines additional single output 'avro1' for the job
  * AvroMultipleOutputs.addNamedOutput(job, "avro1", AvroKeyValueOutputFormat.class,
- * keyschema, valueSchema);  // valueSchema can be set to null if there only Key to be written 
+ * keyschema, valueSchema);  // valueSchema can be set to null if there only Key to be written
                                    to file in the RecordWriter
  *
  * // Defines additional output 'avro2' with different schema for the job
  * AvroMultipleOutputs.addNamedOutput(job, "avro2",
  *   AvroKeyOutputFormat.class,
- *   schema,null); 
+ *   schema,null);
  * ...
  *
  * job.waitForCompletion(true);
@@ -92,7 +92,7 @@ import org.apache.hadoop.util.ReflectionUtils;
  * <p>
  * Usage in Reducer:
  * <pre>
- * 
+ *
  * public class MyAvroReducer extends
  *   Reducer&lt;K, V, T, NullWritable&gt; {
  * private MultipleOutputs amos;
@@ -126,18 +126,18 @@ public class AvroMultipleOutputs{
 
   private static final String MULTIPLE_OUTPUTS = "avro.mapreduce.multipleoutputs";
 
-  private static final String MO_PREFIX = 
+  private static final String MO_PREFIX =
     "avro.mapreduce.multipleoutputs.namedOutput.";
 
   private static final String FORMAT = ".format";
-  private static final String COUNTERS_ENABLED = 
+  private static final String COUNTERS_ENABLED =
     "avro.mapreduce.multipleoutputs.counters";
 
   /**
    * Counters group used by the counters of MultipleOutputs.
    */
   private static final String COUNTERS_GROUP = AvroMultipleOutputs.class.getName();
-  
+
   /**
    * Cache for the taskContexts
    */
@@ -181,7 +181,7 @@ public class AvroMultipleOutputs{
       throw new IllegalArgumentException("output name cannot be 'part'");
     }
   }
-  
+
   /**
    * Checks if a named output name is valid.
    *
@@ -270,7 +270,7 @@ public class AvroMultipleOutputs{
 
   /**
    * Enables or disables counters for the named outputs.
-   * 
+   *
    * The counters group is the {@link AvroMultipleOutputs} class name.
    * The names of the counters are the same as the named outputs. These
    * counters count the number records written to each output name.
@@ -287,7 +287,7 @@ public class AvroMultipleOutputs{
    * Returns if the counters for the named outputs are enabled or not.
    * By default these counters are disabled.
    *
-   * @param job    the job 
+   * @param job    the job
    * @return TRUE if the counters are enabled, FALSE if they are disabled.
    */
   public static boolean getCountersEnabled(JobContext job) {
@@ -295,7 +295,7 @@ public class AvroMultipleOutputs{
   }
 
   /**
-   * Wraps RecordWriter to increment counters. 
+   * Wraps RecordWriter to increment counters.
    */
   @SuppressWarnings("unchecked")
   private static class RecordWriterWithCounter extends RecordWriter {
@@ -311,13 +311,13 @@ public class AvroMultipleOutputs{
     }
 
     @SuppressWarnings({"unchecked"})
-    public void write(Object key, Object value) 
+    public void write(Object key, Object value)
         throws IOException, InterruptedException {
       context.getCounter(COUNTERS_GROUP, counterName).increment(1);
       writer.write(key, value);
     }
 
-    public void close(TaskAttemptContext context) 
+    public void close(TaskAttemptContext context)
         throws IOException, InterruptedException {
       writer.close(context);
     }
@@ -329,7 +329,7 @@ public class AvroMultipleOutputs{
   private Set<String> namedOutputs;
   private Map<String, RecordWriter<?, ?>> recordWriters;
   private boolean countersEnabled;
-  
+
   /**
    * Creates and initializes multiple outputs support,
    * it should be instantiated in the Mapper/Reducer setup method.
@@ -350,7 +350,7 @@ public class AvroMultipleOutputs{
    *
    * Output path is a unique file generated for the namedOutput.
    * For example, {namedOutput}-(m|r)-{part-number}
-   * 
+   *
    * @param namedOutput the named output name
    * @param key         the key , value is NullWritable
    */
@@ -367,7 +367,7 @@ public class AvroMultipleOutputs{
    *
    * Output path is a unique file generated for the namedOutput.
    * For example, {namedOutput}-(m|r)-{part-number}
-   * 
+   *
    * @param namedOutput the named output name
    * @param key         the key
    * @param value       the value
@@ -380,7 +380,7 @@ public class AvroMultipleOutputs{
 
   /**
    * Write key and value to baseOutputPath using the namedOutput.
-   * 
+   *
    * @param namedOutput    the named output name
    * @param key            the key
    * @param value          the value
@@ -402,26 +402,26 @@ public class AvroMultipleOutputs{
 
   /**
    * Write key value to an output file name.
-   * 
-   * Gets the record writer from job's output format.  
+   *
+   * Gets the record writer from job's output format.
    * Job's output format should be a FileOutputFormat.
-   * 
+   *
    * @param key       the key
    * @param value     the value
    * @param baseOutputPath base-output path to write the record to.
    * Note: Framework will generate unique filename for the baseOutputPath
    */
-  public void write(Object key, Object value, String baseOutputPath) 
+  public void write(Object key, Object value, String baseOutputPath)
       throws IOException, InterruptedException {
         write(key, value, null, null, baseOutputPath);
   }
-  
+
   /**
    * Write key value to an output file name.
-   * 
+   *
    * Gets the record writer from job's output format. Job's output format should
    * be a FileOutputFormat.
-   * 
+   *
    * @param key   the key
    * @param value the value
    * @param keySchema   keySchema to use
@@ -441,13 +441,13 @@ public class AvroMultipleOutputs{
   }
 
   /**
-   * 
+   *
    * Gets the record writer from job's output format. Job's output format should
-   * be a FileOutputFormat.If the record writer implements Syncable then returns 
+   * be a FileOutputFormat.If the record writer implements Syncable then returns
    * the current position as a value that may be passed to DataFileReader.seek(long)
-   * otherwise returns -1. 
+   * otherwise returns -1.
    * Forces the end of the current block, emitting a synchronization marker.
-   * 
+   *
    * @param namedOutput   the namedOutput
    * @param baseOutputPath base-output path to write the record to. Note: Framework will
    *          generate unique filename for the baseOutputPath
@@ -472,12 +472,12 @@ public class AvroMultipleOutputs{
   // MultithreadedMapper.
   @SuppressWarnings("unchecked")
   private synchronized RecordWriter getRecordWriter(
-      TaskAttemptContext taskContext, String baseFileName) 
+      TaskAttemptContext taskContext, String baseFileName)
       throws IOException, InterruptedException {
-    
+
     // look for record-writer in the cache
     RecordWriter writer = recordWriters.get(baseFileName);
-    
+
     // If not in cache, create a new one
     if (writer == null) {
       // get the record writer from context output format
@@ -490,13 +490,13 @@ public class AvroMultipleOutputs{
       } catch (ClassNotFoundException e) {
         throw new IOException(e);
       }
- 
-      // if counters are enabled, wrap the writer with context 
-      // to increment counters 
+
+      // if counters are enabled, wrap the writer with context
+      // to increment counters
       if (countersEnabled) {
         writer = new RecordWriterWithCounter(writer, baseFileName, context);
       }
-      
+
       // add the record-writer to the cache
       recordWriters.put(baseFileName, writer);
     }
@@ -521,7 +521,7 @@ public class AvroMultipleOutputs{
 
   }
 
-   // Create a taskAttemptContext for the named output with 
+   // Create a taskAttemptContext for the named output with
    // output format and output key/value types put in the context
   @SuppressWarnings("deprecation")
   private TaskAttemptContext getContext(String nameOutput) throws IOException {
@@ -547,13 +547,13 @@ public class AvroMultipleOutputs{
     setSchema(job, keySchema, valSchema);
     taskContext = createTaskAttemptContext(
       job.getConfiguration(), context.getTaskAttemptID());
-    
+
     taskContexts.put(nameOutput, taskContext);
-    
+
     return taskContext;
   }
-  
-  private TaskAttemptContext createTaskAttemptContext(Configuration conf, 
+
+  private TaskAttemptContext createTaskAttemptContext(Configuration conf,
       TaskAttemptID taskId) {
     // Use reflection since the context types changed incompatibly between 1.0
     // and 2.0.
@@ -566,7 +566,7 @@ public class AvroMultipleOutputs{
       throw new IllegalStateException(e);
     }
   }
-  
+
   private Class<?> getTaskAttemptContextClass() {
     try {
       return Class.forName(
@@ -580,14 +580,14 @@ public class AvroMultipleOutputs{
       }
     }
   }
-  
+
   /**
    * Closes all the opened outputs.
-   * 
+   *
    * This should be called from cleanup method of map/reduce task.
    * If overridden subclasses must invoke <code>super.close()</code> at the
    * end of their <code>close()</code>
-   * 
+   *
    */
   @SuppressWarnings("unchecked")
   public void close() throws IOException, InterruptedException {
@@ -597,4 +597,4 @@ public class AvroMultipleOutputs{
   }
 }
 
- 
+

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroOutputFormatBase.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroOutputFormatBase.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroOutputFormatBase.java
index 5f77190..c702c9b 100644
--- a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroOutputFormatBase.java
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroOutputFormatBase.java
@@ -36,7 +36,7 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
  * @param <V> The type of value to write.
  */
 public abstract class AvroOutputFormatBase<K, V> extends FileOutputFormat<K, V> {
-  
+
   /**
    * Gets the configured compression codec from the task context.
    *
@@ -52,7 +52,7 @@ public abstract class AvroOutputFormatBase<K, V> extends FileOutputFormat<K, V>
       int xzLevel = context.getConfiguration().getInt(
               org.apache.avro.mapred.AvroOutputFormat.XZ_LEVEL_KEY,
               CodecFactory.DEFAULT_XZ_LEVEL);
-      
+
       String outputCodec = context.getConfiguration()
         .get(AvroJob.CONF_OUTPUT_CODEC);
 
@@ -72,7 +72,7 @@ public abstract class AvroOutputFormatBase<K, V> extends FileOutputFormat<K, V>
         } else {
           return CodecFactory.fromString(outputCodec);
         }
-      
+
       }
 
     // No compression.

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestHadoopCodecFactory.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestHadoopCodecFactory.java b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestHadoopCodecFactory.java
index af340d8..9132866 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestHadoopCodecFactory.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestHadoopCodecFactory.java
@@ -23,35 +23,35 @@ import org.junit.Test;
 import static org.junit.Assert.assertTrue;
 
 public class TestHadoopCodecFactory {
-  
+
   @Test
   public void testHadoopCodecFactoryDeflate(){
     CodecFactory hadoopDeflateCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.DeflateCodec");
     CodecFactory avroDeflateCodec = CodecFactory.fromString("deflate");
     assertTrue(hadoopDeflateCodec.getClass().equals(avroDeflateCodec.getClass()));
   }
-  
+
   @Test
   public void testHadoopCodecFactorySnappy(){
     CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.SnappyCodec");
     CodecFactory avroSnappyCodec = CodecFactory.fromString("snappy");
     assertTrue(hadoopSnappyCodec.getClass().equals(avroSnappyCodec.getClass()));
   }
-  
+
   @Test
   public void testHadoopCodecFactoryBZip2(){
     CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.BZip2Codec");
     CodecFactory avroSnappyCodec = CodecFactory.fromString("bzip2");
     assertTrue(hadoopSnappyCodec.getClass().equals(avroSnappyCodec.getClass()));
   }
-  
+
   @Test
   public void testHadoopCodecFactoryGZip(){
     CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.GZipCodec");
     CodecFactory avroSnappyCodec = CodecFactory.fromString("deflate");
     assertTrue(hadoopSnappyCodec.getClass().equals(avroSnappyCodec.getClass()));
   }
-  
+
   @Test
   public void testHadoopCodecFactoryFail(){
     CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.FooCodec");

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroInputFormat.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroInputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroInputFormat.java
index 81d35ff..9961814 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroInputFormat.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroInputFormat.java
@@ -34,26 +34,26 @@ import org.junit.Before;
 import org.junit.Test;
 
 public class TestAvroInputFormat {
-  
+
   private static final String TEST_DIR = System.getProperty("test.dir", ".") +
       File.separator + TestAvroInputFormat.class.getName();
   private JobConf conf;
   private FileSystem fs;
   private Path inputDir;
-  
+
   @Before
   public void setUp() throws Exception {
     conf = new JobConf();
     fs = FileSystem.getLocal(conf);
     inputDir = new Path(TEST_DIR);
   }
-  
-  
+
+
   @After
   public void tearDown() throws Exception {
     fs.delete(inputDir, true);
   }
-  
+
   @SuppressWarnings("rawtypes")
   @Test
   public void testIgnoreFilesWithoutExtension() throws Exception {
@@ -62,15 +62,15 @@ public class TestAvroInputFormat {
     Path textFile = new Path(inputDir, "someotherfile.txt");
     fs.create(avroFile).close();
     fs.create(textFile).close();
-    
+
     FileInputFormat.setInputPaths(conf, inputDir);
 
-    
+
     AvroInputFormat inputFormat = new AvroInputFormat();
     FileStatus[] statuses = inputFormat.listStatus(conf);
     Assert.assertEquals(1, statuses.length);
     Assert.assertEquals("somefile.avro", statuses[0].getPath().getName());
-    
+
     conf.setBoolean(AvroInputFormat.IGNORE_FILES_WITHOUT_EXTENSION_KEY, false);
     statuses = inputFormat.listStatus(conf);
     Assert.assertEquals(2, statuses.length);

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleOutputs.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleOutputs.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleOutputs.java
index e520c87..98205ba 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleOutputs.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleOutputs.java
@@ -64,22 +64,22 @@ public class TestAvroMultipleOutputs {
         amos.getCollector("myavro2",reporter)
           .collect(new Pair<Utf8,Long>(new Utf8(tok),1L).toString());
       }
-        
+
     }
     public void close() throws IOException {
       amos.close();
     }
 
   }
-  
+
   public static class ReduceImpl
     extends AvroReducer<Utf8, Long, Pair<Utf8, Long> > {
     private AvroMultipleOutputs amos;
-    
+
     public void configure(JobConf Job)
     {
         amos=new AvroMultipleOutputs(Job);
-    }    
+    }
 
     @Override
     public void reduce(Utf8 word, Iterable<Long> counts,
@@ -99,7 +99,7 @@ public class TestAvroMultipleOutputs {
     {
       amos.close();
     }
-  }    
+  }
 
   @Test public void runTestsInOrder() throws Exception {
     testJob();
@@ -110,70 +110,70 @@ public class TestAvroMultipleOutputs {
     testJob_noreducer();
     testProjection_noreducer();
   }
-  
+
   @SuppressWarnings("deprecation")
   public void testJob() throws Exception {
     JobConf job = new JobConf();
-    
+
 //    private static final String UTF8 = "UTF-8";
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path outputPath = new Path(dir + "/out");
-    
+
     outputPath.getFileSystem(job).delete(outputPath);
     WordCountUtil.writeLinesFile();
-    
+
     job.setJobName("AvroMultipleOutputs");
-    
+
     AvroJob.setInputSchema(job, Schema.create(Schema.Type.STRING));
     AvroJob.setOutputSchema(job,
                             new Pair<Utf8,Long>(new Utf8(""), 0L).getSchema());
-    
-    AvroJob.setMapperClass(job, MapImpl.class);        
+
+    AvroJob.setMapperClass(job, MapImpl.class);
     AvroJob.setReducerClass(job, ReduceImpl.class);
-    
+
     FileInputFormat.setInputPaths(job, new Path(dir + "/in"));
     FileOutputFormat.setOutputPath(job, outputPath);
     FileOutputFormat.setCompressOutput(job, false);
     AvroMultipleOutputs.addNamedOutput(job,"myavro",AvroOutputFormat.class, new Pair<Utf8,Long>(new Utf8(""), 0L).getSchema());
     AvroMultipleOutputs.addNamedOutput(job,"myavro1",AvroOutputFormat.class, Schema.create(Schema.Type.STRING));
-    AvroMultipleOutputs.addNamedOutput(job,"myavro2",AvroOutputFormat.class, Schema.create(Schema.Type.STRING));   
+    AvroMultipleOutputs.addNamedOutput(job,"myavro2",AvroOutputFormat.class, Schema.create(Schema.Type.STRING));
     WordCountUtil.setMeta(job);
 
 
     JobClient.runJob(job);
-    
+
     WordCountUtil.validateCountsFile();
   }
-  
+
   @SuppressWarnings("deprecation")
   public void testProjection() throws Exception {
     JobConf job = new JobConf();
-    
+
     Integer defaultRank = new Integer(-1);
-    
-    String jsonSchema = 
+
+    String jsonSchema =
       "{\"type\":\"record\"," +
       "\"name\":\"org.apache.avro.mapred.Pair\","+
-      "\"fields\": [ " + 
+      "\"fields\": [ " +
         "{\"name\":\"rank\", \"type\":\"int\", \"default\": -1}," +
-        "{\"name\":\"value\", \"type\":\"long\"}" + 
+        "{\"name\":\"value\", \"type\":\"long\"}" +
       "]}";
-    
+
     Schema readerSchema = Schema.parse(jsonSchema);
-    
+
     AvroJob.setInputSchema(job, readerSchema);
-    
+
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path inputPath = new Path(dir + "/out" + "/myavro-r-00000.avro");
     FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath);
     FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job);
 
-    
+
     AvroRecordReader<Pair<Integer, Long>> recordReader = new AvroRecordReader<Pair<Integer, Long>>(job, fileSplit);
-    
+
     AvroWrapper<Pair<Integer, Long>> inputPair = new AvroWrapper<Pair<Integer, Long>>(null);
     NullWritable ignore = NullWritable.get();
-    
+
     long sumOfCounts = 0;
     long numOfCounts = 0;
     while(recordReader.next(inputPair, ignore)) {
@@ -181,47 +181,47 @@ public class TestAvroMultipleOutputs {
       sumOfCounts += (Long) inputPair.datum().get(1);
       numOfCounts++;
     }
-    
+
     Assert.assertEquals(numOfCounts, WordCountUtil.COUNTS.size());
-    
+
     long actualSumOfCounts = 0;
     for(Long count : WordCountUtil.COUNTS.values()) {
       actualSumOfCounts += count;
     }
-    
+
     Assert.assertEquals(sumOfCounts, actualSumOfCounts);
 
   }
-  
+
   @SuppressWarnings("deprecation")
   public void testProjection_newmethods() throws Exception {
     JobConf job = new JobConf();
-    
+
     Integer defaultRank = new Integer(-1);
-    
-    String jsonSchema = 
+
+    String jsonSchema =
       "{\"type\":\"record\"," +
       "\"name\":\"org.apache.avro.mapred.Pair\","+
-      "\"fields\": [ " + 
+      "\"fields\": [ " +
         "{\"name\":\"rank\", \"type\":\"int\", \"default\": -1}," +
-        "{\"name\":\"value\", \"type\":\"long\"}" + 
+        "{\"name\":\"value\", \"type\":\"long\"}" +
       "]}";
-    
+
     Schema readerSchema = Schema.parse(jsonSchema);
-    
+
     AvroJob.setInputSchema(job, readerSchema);
-    
+
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path inputPath = new Path(dir + "/out" + "/testavrofile-r-00000.avro");
     FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath);
     FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job);
 
-    
+
     AvroRecordReader<Pair<Integer, Long>> recordReader = new AvroRecordReader<Pair<Integer, Long>>(job, fileSplit);
-    
+
     AvroWrapper<Pair<Integer, Long>> inputPair = new AvroWrapper<Pair<Integer, Long>>(null);
     NullWritable ignore = NullWritable.get();
-    
+
     long sumOfCounts = 0;
     long numOfCounts = 0;
     while(recordReader.next(inputPair, ignore)) {
@@ -229,18 +229,18 @@ public class TestAvroMultipleOutputs {
       sumOfCounts += (Long) inputPair.datum().get(1);
       numOfCounts++;
     }
-    
+
     Assert.assertEquals(numOfCounts, WordCountUtil.COUNTS.size());
-    
+
     long actualSumOfCounts = 0;
     for(Long count : WordCountUtil.COUNTS.values()) {
       actualSumOfCounts += count;
     }
-    
+
     Assert.assertEquals(sumOfCounts, actualSumOfCounts);
 
   }
-  
+
 
   @SuppressWarnings("deprecation")
   // Test for a differnt schema output
@@ -269,7 +269,7 @@ public class TestAvroMultipleOutputs {
     }
     Assert.assertEquals(sumOfCounts, actualSumOfCounts);
   }
-  
+
   @SuppressWarnings("deprecation")
   // Test for a differnt schema output
   public void testProjection_newmethods_1() throws Exception {
@@ -323,7 +323,7 @@ public class TestAvroMultipleOutputs {
     AvroMultipleOutputs.addNamedOutput(job,"myavro2",AvroOutputFormat.class, Schema.create(Schema.Type.STRING));
     JobClient.runJob(job);
   }
-  
+
   public void testProjection_noreducer() throws Exception {
     JobConf job = new JobConf();
     long onel = 1;

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroOutputFormat.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroOutputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroOutputFormat.java
index 6de4710..6f5c0a0 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroOutputFormat.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroOutputFormat.java
@@ -35,107 +35,107 @@ public class TestAvroOutputFormat extends TestCase {
     assertEquals(newSyncInterval, jobConf.getInt(
         AvroOutputFormat.SYNC_INTERVAL_KEY, -1));
   }
-  
+
   @Test
   public void testNoCodec() throws UnsupportedEncodingException {
-    
-    
+
+
     JobConf job = new JobConf();
     assertTrue(AvroOutputFormat.getCodecFactory(job) == null);
-    
+
     job = new JobConf();
     job.set("mapred.output.compress", "false");
     job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.BZip2Codec");
     assertTrue(AvroOutputFormat.getCodecFactory(job) == null);
-    
+
     job = new JobConf();
     job.set("mapred.output.compress", "false");
     job.set(AvroJob.OUTPUT_CODEC, "bzip2");
     assertTrue(AvroOutputFormat.getCodecFactory(job) == null);
   }
-  
+
   @Test
   public void testBZip2CodecUsingHadoopClass() throws UnsupportedEncodingException {
     CodecFactory avroBZip2Codec = CodecFactory.fromString("bzip2");
-    
+
     JobConf job = new JobConf();
     job.set("mapred.output.compress", "true");
     job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.BZip2Codec");
     CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
     assertTrue(factory != null);
-    assertTrue(factory.getClass().equals(avroBZip2Codec.getClass()));    
+    assertTrue(factory.getClass().equals(avroBZip2Codec.getClass()));
   }
-  
+
   @Test
   public void testBZip2CodecUsingAvroCodec() throws UnsupportedEncodingException {
     CodecFactory avroBZip2Codec = CodecFactory.fromString("bzip2");
-    
+
     JobConf job = new JobConf();
     job.set("mapred.output.compress", "true");
     job.set(AvroJob.OUTPUT_CODEC, "bzip2");
     CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
     assertTrue(factory != null);
-    assertTrue(factory.getClass().equals(avroBZip2Codec.getClass()));    
+    assertTrue(factory.getClass().equals(avroBZip2Codec.getClass()));
   }
-  
+
   @Test
   public void testDeflateCodecUsingHadoopClass() throws UnsupportedEncodingException {
     CodecFactory avroDeflateCodec = CodecFactory.fromString("deflate");
-    
+
     JobConf job = new JobConf();
     job.set("mapred.output.compress", "true");
     job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.DeflateCodec");
     CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
     assertTrue(factory != null);
-    assertTrue(factory.getClass().equals(avroDeflateCodec.getClass()));    
+    assertTrue(factory.getClass().equals(avroDeflateCodec.getClass()));
   }
-  
+
   @Test
   public void testDeflateCodecUsingAvroCodec() throws UnsupportedEncodingException {
     CodecFactory avroDeflateCodec = CodecFactory.fromString("deflate");
-    
+
     JobConf job = new JobConf();
     job.set("mapred.output.compress", "true");
     job.set(AvroJob.OUTPUT_CODEC, "deflate");
     CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
     assertTrue(factory != null);
-    assertTrue(factory.getClass().equals(avroDeflateCodec.getClass()));    
+    assertTrue(factory.getClass().equals(avroDeflateCodec.getClass()));
   }
-  
+
   @Test
   public void testSnappyCodecUsingHadoopClass() throws UnsupportedEncodingException {
     CodecFactory avroSnappyCodec = CodecFactory.fromString("snappy");
-    
+
     JobConf job = new JobConf();
     job.set("mapred.output.compress", "true");
     job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.SnappyCodec");
     CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
     assertTrue(factory != null);
-    assertTrue(factory.getClass().equals(avroSnappyCodec.getClass()));    
+    assertTrue(factory.getClass().equals(avroSnappyCodec.getClass()));
   }
-  
+
   @Test
   public void testSnappyCodecUsingAvroCodec() throws UnsupportedEncodingException {
     CodecFactory avroSnappyCodec = CodecFactory.fromString("snappy");
-    
+
     JobConf job = new JobConf();
     job.set("mapred.output.compress", "true");
     job.set(AvroJob.OUTPUT_CODEC, "snappy");
     CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
     assertTrue(factory != null);
-    assertTrue(factory.getClass().equals(avroSnappyCodec.getClass()));    
+    assertTrue(factory.getClass().equals(avroSnappyCodec.getClass()));
   }
-  
+
   @Test
   public void testGZipCodecUsingHadoopClass() throws UnsupportedEncodingException {
     CodecFactory avroDeflateCodec = CodecFactory.fromString("deflate");
-    
+
     JobConf job = new JobConf();
     job.set("mapred.output.compress", "true");
     job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GZipCodec");
     CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
     assertTrue(factory != null);
-    assertTrue(factory.getClass().equals(avroDeflateCodec.getClass()));    
+    assertTrue(factory.getClass().equals(avroDeflateCodec.getClass()));
   }
 
 

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextOutputFormat.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextOutputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextOutputFormat.java
index 264251a..e4743cc 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextOutputFormat.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextOutputFormat.java
@@ -38,7 +38,7 @@ import org.apache.hadoop.mapred.RecordWriter;
 import org.junit.Test;
 
 public class TestAvroTextOutputFormat {
-  
+
   private static final String UTF8 = "UTF-8";
 
   @Test
@@ -52,12 +52,12 @@ public class TestAvroTextOutputFormat {
     fileWriter.create(schema, file);
     RecordWriter<Object, Object> rw = new AvroTextOutputFormat<Object, Object>()
       .new AvroTextRecordWriter(fileWriter, "\t".getBytes(UTF8));
-    
+
     rw.write(null, null);
     rw.write(null, NullWritable.get());
     rw.write(NullWritable.get(), null);
     rw.write(NullWritable.get(), NullWritable.get());
-    
+
     rw.write("k1", null);
     rw.write("k2", NullWritable.get());
 
@@ -66,7 +66,7 @@ public class TestAvroTextOutputFormat {
 
     rw.write("k3", "v3");
     rw.write(new Text("k4"), new Text("v4"));
-    
+
     rw.close(null);
 
     DatumReader<ByteBuffer> reader = new GenericDatumReader<ByteBuffer>();
@@ -80,7 +80,7 @@ public class TestAvroTextOutputFormat {
     assertEquals("k4\tv4", asString(fileReader.next()));
     assertFalse("End", fileReader.hasNext());
   }
-  
+
   private String asString(ByteBuffer buf) throws UnsupportedEncodingException {
     byte[] b = new byte[buf.remaining()];
     buf.get(b);

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java
index 7273bdd..17bc18d 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java
@@ -27,7 +27,7 @@ import org.apache.hadoop.mapred.JobConf;
 import org.junit.Test;
 
 public class TestAvroTextSort {
-  
+
   @Test
   /**
    * Run the identity job on a "bytes" Avro file using AvroAsTextInputFormat
@@ -37,19 +37,19 @@ public class TestAvroTextSort {
     JobConf job = new JobConf();
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path outputPath = new Path(dir + "/out");
-    
+
     outputPath.getFileSystem(job).delete(outputPath);
     WordCountUtil.writeLinesBytesFile();
-    
+
     job.setInputFormat(AvroAsTextInputFormat.class);
     job.setOutputFormat(AvroTextOutputFormat.class);
     job.setOutputKeyClass(Text.class);
-    
+
     FileInputFormat.setInputPaths(job, new Path(dir + "/in"));
     FileOutputFormat.setOutputPath(job, outputPath);
-    
+
     JobClient.runJob(job);
-    
+
     WordCountUtil.validateSortedFile();
   }
 

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestGenericJob.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestGenericJob.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestGenericJob.java
index 5dcbb6c..2c61598 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestGenericJob.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestGenericJob.java
@@ -59,7 +59,7 @@ public class TestGenericJob {
   private static Schema createSchema() {
     List<Field> fields = new ArrayList<Schema.Field>();
 
-      
+
     fields.add(new Field("Optional", createArraySchema(), "", new ArrayList<Object>()));
 
     Schema recordSchema =
@@ -73,7 +73,7 @@ public class TestGenericJob {
     for (int i = 0; i < 5; i++) {
       schemas.add(createInnerSchema("optional_field_" + i));
     }
-        
+
     Schema unionSchema = Schema.createUnion(schemas);
     return Schema.createArray(unionSchema);
   }
@@ -96,7 +96,7 @@ public class TestGenericJob {
     file.writeChars("aa bb cc\ndd ee ff\n");
     file.close();
   }
-    
+
   @After
     public void tearDown() throws IOException {
     FileUtil.fullyDelete(new File(dir));
@@ -106,9 +106,9 @@ public class TestGenericJob {
     extends MapReduceBase
     implements Mapper<LongWritable, Text,
                AvroWrapper<Pair<Long, GenericData.Record>>, NullWritable> {
-      
-    public void map(LongWritable key, Text value, 
-                    OutputCollector<AvroWrapper<Pair<Long,GenericData.Record>>,NullWritable> out, 
+
+    public void map(LongWritable key, Text value,
+                    OutputCollector<AvroWrapper<Pair<Long,GenericData.Record>>,NullWritable> out,
                     Reporter reporter) throws IOException {
       GenericData.Record optional_entry =
         new GenericData.Record(createInnerSchema("optional_field_1"));
@@ -124,7 +124,7 @@ public class TestGenericJob {
                   (new Pair<Long,GenericData.Record>(key.get(), container)),
                   NullWritable.get());
     }
-  }  
+  }
 
 
   @Test
@@ -132,10 +132,10 @@ public class TestGenericJob {
     JobConf job = new JobConf();
     Path outputPath = new Path(dir + "/out");
     outputPath.getFileSystem(job).delete(outputPath);
-        
+
     job.setInputFormat(TextInputFormat.class);
     FileInputFormat.setInputPaths(job, dir + "/in");
-        
+
     job.setMapperClass(AvroTestConverter.class);
     job.setNumReduceTasks(0);
 

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestReflectJob.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestReflectJob.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestReflectJob.java
index e8a63f1..1b6ab68 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestReflectJob.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestReflectJob.java
@@ -81,7 +81,7 @@ public class TestReflectJob {
                                                new Count(1L)));
     }
   }
-  
+
   public static class ReduceImpl
     extends AvroReducer<Text, Count, WordCount> {
     @Override
@@ -93,7 +93,7 @@ public class TestReflectJob {
         sum += count.count;
       collector.collect(new WordCount(word.text, sum));
     }
-  }    
+  }
 
   @Test
   @SuppressWarnings("deprecation")
@@ -107,25 +107,25 @@ public class TestReflectJob {
     inputPath.getFileSystem(job).delete(inputPath);
 
     writeLinesFile(new File(dir+"/in"));
-    
+
     job.setJobName("reflect");
-    
+
     AvroJob.setInputSchema(job, ReflectData.get().getSchema(Text.class));
     AvroJob.setMapOutputSchema
       (job, new Pair(new Text(""), new Count(0L)).getSchema());
     AvroJob.setOutputSchema(job, ReflectData.get().getSchema(WordCount.class));
-    
-    AvroJob.setMapperClass(job, MapImpl.class);        
+
+    AvroJob.setMapperClass(job, MapImpl.class);
     //AvroJob.setCombinerClass(job, ReduceImpl.class);
     AvroJob.setReducerClass(job, ReduceImpl.class);
-    
+
     FileInputFormat.setInputPaths(job, inputPath);
     FileOutputFormat.setOutputPath(job, outputPath);
 
     AvroJob.setReflect(job);                      // use reflection
 
     JobClient.runJob(job);
-    
+
     validateCountsFile(new File(new File(dir, "out"), "part-00000.avro"));
   }
 
@@ -139,7 +139,7 @@ public class TestReflectJob {
       out.append(new Text(line));
     out.close();
   }
-  
+
   private void validateCountsFile(File file) throws Exception {
     DatumReader<WordCount> reader = new ReflectDatumReader<WordCount>();
     InputStream in = new BufferedInputStream(new FileInputStream(file));

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestSequenceFileReader.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestSequenceFileReader.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestSequenceFileReader.java
index 1a3c966..9d3bda2 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestSequenceFileReader.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestSequenceFileReader.java
@@ -104,7 +104,7 @@ public class TestSequenceFileReader {
     Path output = new Path(System.getProperty("test.dir",".")+"/seq-out");
 
     output.getFileSystem(job).delete(output);
-    
+
     // configure input for Avro from sequence file
     AvroJob.setInputSequenceFile(job);
     FileInputFormat.setInputPaths(job, FILE.toURI().toString());
@@ -116,7 +116,7 @@ public class TestSequenceFileReader {
     // configure output for avro
     AvroJob.setOutputSchema(job, SCHEMA);
     FileOutputFormat.setOutputPath(job, output);
-    
+
     JobClient.runJob(job);
 
     checkFile(new DataFileReader<Pair<Long,CharSequence>>
@@ -127,9 +127,9 @@ public class TestSequenceFileReader {
   private static class NonAvroMapper
     extends MapReduceBase
     implements Mapper<LongWritable,Text,AvroKey<Long>,AvroValue<Utf8>> {
-    
-    public void map(LongWritable key, Text value, 
-                  OutputCollector<AvroKey<Long>,AvroValue<Utf8>> out, 
+
+    public void map(LongWritable key, Text value,
+                  OutputCollector<AvroKey<Long>,AvroValue<Utf8>> out,
                   Reporter reporter) throws IOException {
       out.collect(new AvroKey<Long>(key.get()),
                   new AvroValue<Utf8>(new Utf8(value.toString())));
@@ -142,7 +142,7 @@ public class TestSequenceFileReader {
     Path output = new Path(System.getProperty("test.dir",".")+"/seq-out");
 
     output.getFileSystem(job).delete(output);
-    
+
     // configure input for non-Avro sequence file
     job.setInputFormat(SequenceFileInputFormat.class);
     FileInputFormat.setInputPaths(job, FILE.toURI().toString());
@@ -166,9 +166,9 @@ public class TestSequenceFileReader {
   private static class NonAvroOnlyMapper
     extends MapReduceBase
     implements Mapper<LongWritable,Text,AvroWrapper<Pair<Long,Utf8>>,NullWritable> {
-    
-    public void map(LongWritable key, Text value, 
-                    OutputCollector<AvroWrapper<Pair<Long,Utf8>>,NullWritable> out, 
+
+    public void map(LongWritable key, Text value,
+                    OutputCollector<AvroWrapper<Pair<Long,Utf8>>,NullWritable> out,
                     Reporter reporter) throws IOException {
       out.collect(new AvroWrapper<Pair<Long,Utf8>>(new Pair<Long,Utf8>(key.get(), new Utf8(value.toString()))),
                   NullWritable.get());
@@ -181,7 +181,7 @@ public class TestSequenceFileReader {
     Path output = new Path(System.getProperty("test.dir",".")+"/seq-out");
 
     output.getFileSystem(job).delete(output);
-    
+
 
     // configure input for non-Avro sequence file
     job.setInputFormat(SequenceFileInputFormat.class);
@@ -205,9 +205,9 @@ public class TestSequenceFileReader {
   private static class NonAvroReducer
     extends MapReduceBase
     implements Reducer<AvroKey<Long>,AvroValue<Utf8>,LongWritable,Text> {
-    
+
     public void reduce(AvroKey<Long> key, Iterator<AvroValue<Utf8>> values,
-                       OutputCollector<LongWritable, Text> out, 
+                       OutputCollector<LongWritable, Text> out,
                        Reporter reporter) throws IOException {
       while (values.hasNext()) {
         AvroValue<Utf8> value = values.next();
@@ -223,7 +223,7 @@ public class TestSequenceFileReader {
     Path output = new Path(System.getProperty("test.dir",".")+"/seq-out");
 
     output.getFileSystem(job).delete(output);
-    
+
     // configure input for Avro from sequence file
     AvroJob.setInputSequenceFile(job);
     AvroJob.setInputSchema(job, SCHEMA);

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWeather.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWeather.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWeather.java
index c32c403..49ac89f 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWeather.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWeather.java
@@ -68,18 +68,18 @@ public class TestWeather {
     String inDir = System.getProperty("share.dir","../../../share")+"/test/data";
     Path input = new Path(inDir+"/weather.avro");
     Path output = new Path(System.getProperty("test.dir","target/test")+"/weather-ident");
-    
+
     output.getFileSystem(job).delete(output);
-    
+
     job.setJobName("identity map weather");
-    
+
     AvroJob.setInputSchema(job, Weather.SCHEMA$);
     AvroJob.setOutputSchema(job, Weather.SCHEMA$);
 
     FileInputFormat.setInputPaths(job, input);
     FileOutputFormat.setOutputPath(job, output);
     FileOutputFormat.setCompressOutput(job, true);
-    
+
     job.setNumReduceTasks(0);                     // map-only
 
     JobClient.runJob(job);
@@ -145,24 +145,24 @@ public class TestWeather {
     String inDir = System.getProperty("share.dir","../../../share")+"/test/data";
     Path input = new Path(inDir+"/weather.avro");
     Path output = new Path(System.getProperty("test.dir","target/test")+"/weather-sort");
-    
+
     output.getFileSystem(job).delete(output);
-    
+
     job.setJobName("sort weather");
-    
+
     AvroJob.setInputSchema(job, Weather.SCHEMA$);
     AvroJob.setMapOutputSchema
       (job, Pair.getPairSchema(Weather.SCHEMA$, Schema.create(Type.NULL)));
     AvroJob.setOutputSchema(job, Weather.SCHEMA$);
-    
-    AvroJob.setMapperClass(job, SortMapper.class);        
+
+    AvroJob.setMapperClass(job, SortMapper.class);
     AvroJob.setReducerClass(job, SortReducer.class);
 
     FileInputFormat.setInputPaths(job, input);
     FileOutputFormat.setOutputPath(job, output);
     FileOutputFormat.setCompressOutput(job, true);
     AvroJob.setOutputCodec(job, SNAPPY_CODEC);
-    
+
     JobClient.runJob(job);
 
     // check output is correct

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWordCount.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWordCount.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWordCount.java
index 4e729dc..a0b61cf 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWordCount.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWordCount.java
@@ -49,7 +49,7 @@ public class TestWordCount {
         collector.collect(new Pair<Utf8,Long>(new Utf8(tokens.nextToken()),1L));
     }
   }
-  
+
   public static class ReduceImpl
     extends AvroReducer<Utf8, Long, Pair<Utf8, Long> > {
     @Override
@@ -61,7 +61,7 @@ public class TestWordCount {
         sum += count;
       collector.collect(new Pair<Utf8,Long>(word, sum));
     }
-  }    
+  }
 
   @Test public void runTestsInOrder() throws Exception {
     testJob();
@@ -73,59 +73,59 @@ public class TestWordCount {
     JobConf job = new JobConf();
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path outputPath = new Path(dir + "/out");
-    
+
     outputPath.getFileSystem(job).delete(outputPath);
     WordCountUtil.writeLinesFile();
-    
+
     job.setJobName("wordcount");
-    
+
     AvroJob.setInputSchema(job, Schema.create(Schema.Type.STRING));
     AvroJob.setOutputSchema(job,
                             new Pair<Utf8,Long>(new Utf8(""), 0L).getSchema());
-    
-    AvroJob.setMapperClass(job, MapImpl.class);        
+
+    AvroJob.setMapperClass(job, MapImpl.class);
     AvroJob.setCombinerClass(job, ReduceImpl.class);
     AvroJob.setReducerClass(job, ReduceImpl.class);
-    
+
     FileInputFormat.setInputPaths(job, new Path(dir + "/in"));
     FileOutputFormat.setOutputPath(job, outputPath);
     FileOutputFormat.setCompressOutput(job, true);
-    
+
     WordCountUtil.setMeta(job);
 
     JobClient.runJob(job);
-    
+
     WordCountUtil.validateCountsFile();
   }
-  
+
   @SuppressWarnings("deprecation")
   public void testProjection() throws Exception {
     JobConf job = new JobConf();
-    
+
     Integer defaultRank = new Integer(-1);
-    
-    String jsonSchema = 
+
+    String jsonSchema =
       "{\"type\":\"record\"," +
       "\"name\":\"org.apache.avro.mapred.Pair\","+
-      "\"fields\": [ " + 
+      "\"fields\": [ " +
         "{\"name\":\"rank\", \"type\":\"int\", \"default\": -1}," +
-        "{\"name\":\"value\", \"type\":\"long\"}" + 
+        "{\"name\":\"value\", \"type\":\"long\"}" +
       "]}";
-    
+
     Schema readerSchema = Schema.parse(jsonSchema);
-    
+
     AvroJob.setInputSchema(job, readerSchema);
-    
+
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path inputPath = new Path(dir + "/out" + "/part-00000" + AvroOutputFormat.EXT);
     FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath);
     FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job);
-    
+
     AvroRecordReader<Pair<Integer, Long>> recordReader = new AvroRecordReader<Pair<Integer, Long>>(job, fileSplit);
-    
+
     AvroWrapper<Pair<Integer, Long>> inputPair = new AvroWrapper<Pair<Integer, Long>>(null);
     NullWritable ignore = NullWritable.get();
-    
+
     long sumOfCounts = 0;
     long numOfCounts = 0;
     while(recordReader.next(inputPair, ignore)) {
@@ -133,14 +133,14 @@ public class TestWordCount {
       sumOfCounts += (Long) inputPair.datum().get(1);
       numOfCounts++;
     }
-    
+
     Assert.assertEquals(numOfCounts, WordCountUtil.COUNTS.size());
-    
+
     long actualSumOfCounts = 0;
     for(Long count : WordCountUtil.COUNTS.values()) {
       actualSumOfCounts += count;
     }
-    
+
     Assert.assertEquals(sumOfCounts, actualSumOfCounts);
   }
 

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/mapred/WordCountUtil.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/WordCountUtil.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/WordCountUtil.java
index af60f90..8d88bd3 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/WordCountUtil.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/WordCountUtil.java
@@ -104,7 +104,7 @@ public class WordCountUtil {
       out.append(ByteBuffer.wrap(line.getBytes("UTF-8")));
     out.close();
   }
-  
+
   public static void writeLinesTextFile() throws IOException {
     FileUtil.fullyDelete(DIR);
     LINES_FILE.getParentFile().mkdirs();
@@ -130,7 +130,7 @@ public class WordCountUtil {
     in.close();
     assertEquals(COUNTS.size(), numWords);
   }
-  
+
   public static void validateSortedFile() throws Exception {
     DatumReader<ByteBuffer> reader = new GenericDatumReader<ByteBuffer>();
     InputStream in = new BufferedInputStream(
@@ -150,12 +150,12 @@ public class WordCountUtil {
     }
     assertFalse(lines.hasNext());
   }
-  
+
   // metadata tests
   private static final String STRING_KEY = "string-key";
   private static final String LONG_KEY = "long-key";
   private static final String BYTES_KEY = "bytes-key";
-  
+
   private static final String STRING_META_VALUE = "value";
   private static final long LONG_META_VALUE = 666;
   private static final byte[] BYTES_META_VALUE

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java
index b551ef9..f3eb638 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java
@@ -77,7 +77,7 @@ public abstract class TetherTask<IN,MID,OUT> {
     private Buffer buffer = new Buffer();
     private BinaryEncoder encoder = new EncoderFactory()
         .configureBlockSize(512).binaryEncoder(buffer, null);
-    
+
     private Collector(Schema schema) {
       this.writer = new SpecificDatumWriter<T>(schema);
     }
@@ -89,7 +89,7 @@ public abstract class TetherTask<IN,MID,OUT> {
       encoder.flush();
       outputClient.output(buffer.data());
     }
-    
+
     /** Collect a pre-partitioned map output value. */
     public void collect(T record, int partition) throws IOException {
       buffer.reset();

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/WordCountTask.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/WordCountTask.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/WordCountTask.java
index 48cae25..58df52c 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/WordCountTask.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/WordCountTask.java
@@ -31,7 +31,7 @@ import org.slf4j.LoggerFactory;
  * functions for word count. */
 public class WordCountTask
   extends TetherTask<Utf8,Pair<Utf8,Long>,Pair<Utf8,Long>> {
-  
+
   static final Logger LOG = LoggerFactory.getLogger(WordCountTask.class);
   @Override public void map(Utf8 text, Collector<Pair<Utf8,Long>> collector)
     throws IOException {
@@ -39,14 +39,14 @@ public class WordCountTask
     while (tokens.hasMoreTokens())
       collector.collect(new Pair<Utf8,Long>(new Utf8(tokens.nextToken()),1L));
   }
-  
+
   private long sum;
 
   @Override public void reduce(Pair<Utf8,Long> wc,
                                Collector<Pair<Utf8,Long>> c) {
     sum += wc.value();
   }
-    
+
   @Override public void reduceFlush(Pair<Utf8,Long> wc, Collector<Pair<Utf8,Long>> c)
     throws IOException {
     wc.value(sum);

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java
index a867f02..a2779a0 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java
@@ -82,7 +82,7 @@ public class TestAvroKeyRecordWriter {
 
     dataFileReader.close();
   }
-  
+
   @Test
   public void testSycnableWrite() throws IOException {
     Schema writerSchema = Schema.create(Schema.Type.INT);
@@ -105,20 +105,20 @@ public class TestAvroKeyRecordWriter {
     verify(context);
 
     // Verify that the file was written as expected.
-	Configuration conf = new Configuration();
-	conf.set("fs.default.name", "file:///");
-	Path avroFile = new Path("target/temp.avro");
-	DataFileReader<GenericData.Record> dataFileReader = new DataFileReader<GenericData.Record>(new FsInput(avroFile,
-			conf), new SpecificDatumReader<GenericData.Record>());
+    Configuration conf = new Configuration();
+    conf.set("fs.default.name", "file:///");
+    Path avroFile = new Path("target/temp.avro");
+    DataFileReader<GenericData.Record> dataFileReader = new DataFileReader<GenericData.Record>(new FsInput(avroFile,
+      conf), new SpecificDatumReader<GenericData.Record>());
 
     dataFileReader.seek(positionTwo);
     assertTrue(dataFileReader.hasNext());  // Record 2.
     assertEquals(2, dataFileReader.next());
 
-	dataFileReader.seek(positionOne);
+    dataFileReader.seek(positionOne);
     assertTrue(dataFileReader.hasNext());  // Record 1.
     assertEquals(1, dataFileReader.next());
-    
+
     dataFileReader.close();
-  }  
+  }
 }

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
index 1cd1ded..2de056f 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
@@ -168,7 +168,7 @@ public class TestAvroKeyValueRecordWriter {
     assertEquals("reflectionData", firstRecord.getKey().toString());
     assertEquals(record.attribute, firstRecord.getValue().attribute);
   }
- 
+
   @Test
   public void testSyncableWriteRecords() throws IOException {
     Job job = new Job();
@@ -202,14 +202,14 @@ public class TestAvroKeyValueRecordWriter {
 
     verify(context);
 
-	Configuration conf = new Configuration();
-	conf.set("fs.default.name", "file:///");
-	Path avroFile = new Path("target/temp.avro");
-	DataFileReader<GenericData.Record> avroFileReader = new DataFileReader<GenericData.Record>(new FsInput(avroFile,
-			conf), new SpecificDatumReader<GenericData.Record>());
-    
-	
-	avroFileReader.seek(pointTwo);
+    Configuration conf = new Configuration();
+    conf.set("fs.default.name", "file:///");
+    Path avroFile = new Path("target/temp.avro");
+    DataFileReader<GenericData.Record> avroFileReader = new DataFileReader<GenericData.Record>(new FsInput(avroFile,
+      conf), new SpecificDatumReader<GenericData.Record>());
+
+
+    avroFileReader.seek(pointTwo);
     // Verify that the second record was written;
     assertTrue(avroFileReader.hasNext());
     AvroKeyValue<CharSequence, TextStats> secondRecord
@@ -218,8 +218,8 @@ public class TestAvroKeyValueRecordWriter {
     assertEquals("banana", secondRecord.getKey().toString());
     assertEquals("banana", secondRecord.getValue().name.toString());
 
-    
-	avroFileReader.seek(pointOne);
+
+    avroFileReader.seek(pointOne);
     // Verify that the first record was written.
     assertTrue(avroFileReader.hasNext());
     AvroKeyValue<CharSequence, TextStats> firstRecord
@@ -231,5 +231,5 @@ public class TestAvroKeyValueRecordWriter {
 
     // That's all, folks.
     avroFileReader.close();
-  }  
+  }
 }

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputs.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputs.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputs.java
index 83cc8ee..13c7cc1 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputs.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputs.java
@@ -59,10 +59,10 @@ public class TestAvroMultipleOutputs {
       Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
           + "\"fields\":[{\"name\":\"count\",\"type\":\"int\"},"
           + "{\"name\":\"name\",\"type\":\"string\"}]}");
-  public static final Schema STATS_SCHEMA_2 = 
+  public static final Schema STATS_SCHEMA_2 =
       Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
           + "\"fields\":[{\"name\":\"count1\",\"type\":\"int\"},"
-          + "{\"name\":\"name1\",\"type\":\"string\"}]}");  
+          + "{\"name\":\"name1\",\"type\":\"string\"}]}");
 
   private static class LineCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
     private IntWritable mOne;
@@ -122,17 +122,17 @@ public class TestAvroMultipleOutputs {
       record.put("name", new Utf8(line.toString()));
       record.put("count", new Integer(sum));
       mStats.datum(record);
-      context.write(mStats, NullWritable.get()); 
+      context.write(mStats, NullWritable.get());
       amos.write("myavro",mStats,NullWritable.get());
       record2.put("name1", new Utf8(line.toString()));
       record2.put("count1", new Integer(sum));
-      mStats.datum(record2); 
+      mStats.datum(record2);
       amos.write(mStats, NullWritable.get(), STATS_SCHEMA_2, null, "testnewwrite2");
       amos.write("myavro1",mStats);
       amos.write(mStats, NullWritable.get(), STATS_SCHEMA, null, "testnewwrite");
       amos.write(mStats, NullWritable.get(), "testwritenonschema");
     }
-   
+
     @Override
     protected void cleanup(Context context) throws IOException,InterruptedException
     {
@@ -202,9 +202,9 @@ public class TestAvroMultipleOutputs {
     job.setMapOutputValueClass(IntWritable.class);
 
     job.setReducerClass(GenericStatsReducer.class);
-    AvroJob.setOutputKeySchema(job, STATS_SCHEMA);    
+    AvroJob.setOutputKeySchema(job, STATS_SCHEMA);
     AvroMultipleOutputs.addNamedOutput(job,"myavro",AvroKeyOutputFormat.class,STATS_SCHEMA,null);
-    AvroMultipleOutputs.addNamedOutput(job,"myavro1", AvroKeyOutputFormat.class, STATS_SCHEMA_2); 
+    AvroMultipleOutputs.addNamedOutput(job,"myavro1", AvroKeyOutputFormat.class, STATS_SCHEMA_2);
     job.setOutputFormatClass(AvroKeyOutputFormat.class);
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path outputPath = new Path(dir + "/out");
@@ -244,7 +244,7 @@ public class TestAvroMultipleOutputs {
     Assert.assertEquals(3, counts.get("apple").intValue());
     Assert.assertEquals(2, counts.get("banana").intValue());
     Assert.assertEquals(1, counts.get("carrot").intValue());
-  
+
     outputFiles = fileSystem.globStatus(outputPath.suffix("/testnewwrite-r-00000.avro"));
     Assert.assertEquals(1, outputFiles.length);
     reader = new DataFileReader<GenericData.Record>(
@@ -255,11 +255,11 @@ public class TestAvroMultipleOutputs {
        counts.put(((Utf8) record.get("name")).toString(), (Integer) record.get("count"));
     }
     reader.close();
-    
+
     Assert.assertEquals(3, counts.get("apple").intValue());
     Assert.assertEquals(2, counts.get("banana").intValue());
     Assert.assertEquals(1, counts.get("carrot").intValue());
-        
+
     outputFiles = fileSystem.globStatus(outputPath.suffix("/testnewwrite2-r-00000.avro"));
     Assert.assertEquals(1, outputFiles.length);
     reader = new DataFileReader<GenericData.Record>(
@@ -273,7 +273,7 @@ public class TestAvroMultipleOutputs {
     Assert.assertEquals(3, counts.get("apple").intValue());
     Assert.assertEquals(2, counts.get("banana").intValue());
     Assert.assertEquals(1, counts.get("carrot").intValue());
-    
+
     outputFiles = fileSystem.globStatus(outputPath.suffix("/testwritenonschema-r-00000.avro"));
     Assert.assertEquals(1, outputFiles.length);
     reader = new DataFileReader<GenericData.Record>(
@@ -288,8 +288,8 @@ public class TestAvroMultipleOutputs {
     Assert.assertEquals(3, counts.get("apple").intValue());
     Assert.assertEquals(2, counts.get("banana").intValue());
     Assert.assertEquals(1, counts.get("carrot").intValue());
-    
-    
+
+
   }
 
   @Test
@@ -312,7 +312,7 @@ public class TestAvroMultipleOutputs {
     job.setOutputFormatClass(AvroKeyOutputFormat.class);
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path outputPath = new Path(dir + "/out-specific");
-    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath); 
+    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath);
     FileOutputFormat.setOutputPath(job, outputPath);
 
     Assert.assertTrue(job.waitForCompletion(true));

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java
----------------------------------------------------------------------
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java
index 8298984..058f73b 100644
--- a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java
@@ -56,10 +56,10 @@ public class TestAvroMultipleOutputsSyncable {
       Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
           + "\"fields\":[{\"name\":\"count\",\"type\":\"int\"},"
           + "{\"name\":\"name\",\"type\":\"string\"}]}");
-  public static final Schema STATS_SCHEMA_2 = 
+  public static final Schema STATS_SCHEMA_2 =
       Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
           + "\"fields\":[{\"name\":\"count1\",\"type\":\"int\"},"
-          + "{\"name\":\"name1\",\"type\":\"string\"}]}");  
+          + "{\"name\":\"name1\",\"type\":\"string\"}]}");
 
   private static class LineCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
     private IntWritable mOne;
@@ -124,14 +124,14 @@ public class TestAvroMultipleOutputsSyncable {
       amos.write("myavro",mStats,NullWritable.get());
       record2.put("name1", new Utf8(line.toString()));
       record2.put("count1", new Integer(sum));
-      mStats.datum(record2); 
+      mStats.datum(record2);
       amos.write(mStats, NullWritable.get(), STATS_SCHEMA_2, null, "testnewwrite2");
       amos.sync("myavro1","myavro1");
       amos.write("myavro1",mStats);
       amos.write(mStats, NullWritable.get(), STATS_SCHEMA, null, "testnewwrite");
       amos.write(mStats, NullWritable.get(), "testwritenonschema");
     }
-   
+
     @Override
     protected void cleanup(Context context) throws IOException,InterruptedException
     {
@@ -202,9 +202,9 @@ public class TestAvroMultipleOutputsSyncable {
     job.setMapOutputValueClass(IntWritable.class);
 
     job.setReducerClass(GenericStatsReducer.class);
-    AvroJob.setOutputKeySchema(job, STATS_SCHEMA);    
+    AvroJob.setOutputKeySchema(job, STATS_SCHEMA);
     AvroMultipleOutputs.addNamedOutput(job,"myavro",AvroKeyOutputFormat.class,STATS_SCHEMA,null);
-    AvroMultipleOutputs.addNamedOutput(job,"myavro1", AvroKeyOutputFormat.class, STATS_SCHEMA_2); 
+    AvroMultipleOutputs.addNamedOutput(job,"myavro1", AvroKeyOutputFormat.class, STATS_SCHEMA_2);
     job.setOutputFormatClass(AvroKeyOutputFormat.class);
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path outputPath = new Path(dir + "/out");
@@ -244,7 +244,7 @@ public class TestAvroMultipleOutputsSyncable {
     Assert.assertEquals(3, counts.get("apple").intValue());
     Assert.assertEquals(2, counts.get("banana").intValue());
     Assert.assertEquals(1, counts.get("carrot").intValue());
-  
+
     outputFiles = fileSystem.globStatus(outputPath.suffix("/testnewwrite-r-00000.avro"));
     Assert.assertEquals(1, outputFiles.length);
     reader = new DataFileReader<GenericData.Record>(
@@ -255,11 +255,11 @@ public class TestAvroMultipleOutputsSyncable {
        counts.put(((Utf8) record.get("name")).toString(), (Integer) record.get("count"));
     }
     reader.close();
-    
+
     Assert.assertEquals(3, counts.get("apple").intValue());
     Assert.assertEquals(2, counts.get("banana").intValue());
     Assert.assertEquals(1, counts.get("carrot").intValue());
-        
+
     outputFiles = fileSystem.globStatus(outputPath.suffix("/testnewwrite2-r-00000.avro"));
     Assert.assertEquals(1, outputFiles.length);
     reader = new DataFileReader<GenericData.Record>(
@@ -273,7 +273,7 @@ public class TestAvroMultipleOutputsSyncable {
     Assert.assertEquals(3, counts.get("apple").intValue());
     Assert.assertEquals(2, counts.get("banana").intValue());
     Assert.assertEquals(1, counts.get("carrot").intValue());
-    
+
     outputFiles = fileSystem.globStatus(outputPath.suffix("/testwritenonschema-r-00000.avro"));
     Assert.assertEquals(1, outputFiles.length);
     reader = new DataFileReader<GenericData.Record>(
@@ -288,8 +288,8 @@ public class TestAvroMultipleOutputsSyncable {
     Assert.assertEquals(3, counts.get("apple").intValue());
     Assert.assertEquals(2, counts.get("banana").intValue());
     Assert.assertEquals(1, counts.get("carrot").intValue());
-    
-    
+
+
   }
 
   @Test
@@ -312,7 +312,7 @@ public class TestAvroMultipleOutputsSyncable {
     job.setOutputFormatClass(AvroKeyOutputFormat.class);
     String dir = System.getProperty("test.dir", ".") + "/mapred";
     Path outputPath = new Path(dir + "/out-specific");
-    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath); 
+    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath);
     FileOutputFormat.setOutputPath(job, outputPath);
 
     Assert.assertTrue(job.waitForCompletion(true));

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/maven-plugin/pom.xml
----------------------------------------------------------------------
diff --git a/lang/java/maven-plugin/pom.xml b/lang/java/maven-plugin/pom.xml
index 971e5ef..2003ff2 100644
--- a/lang/java/maven-plugin/pom.xml
+++ b/lang/java/maven-plugin/pom.xml
@@ -40,7 +40,7 @@
   <properties>
     <pluginTestingVersion>1.3</pluginTestingVersion>
   </properties>
-  
+
   <build>
     <plugins>
       <plugin>

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/AbstractAvroMojo.java
----------------------------------------------------------------------
diff --git a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/AbstractAvroMojo.java b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/AbstractAvroMojo.java
index a5d8b31..83db1c0 100644
--- a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/AbstractAvroMojo.java
+++ b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/AbstractAvroMojo.java
@@ -76,15 +76,15 @@ public abstract class AbstractAvroMojo extends AbstractMojo {
    * A list of files or directories that should be compiled first thus making
    * them importable by subsequently compiled schemas. Note that imported files
    * should not reference each other.
-   * @parameter 
+   * @parameter
    */
   protected String[] imports;
-  
+
   /**
    * A set of Ant-like exclusion patterns used to prevent certain files from
    * being processed. By default, this set is empty such that no files are
    * excluded.
-   * 
+   *
    * @parameter
    */
   protected String[] excludes = new String[0];
@@ -93,7 +93,7 @@ public abstract class AbstractAvroMojo extends AbstractMojo {
    * A set of Ant-like exclusion patterns used to prevent certain files from
    * being processed. By default, this set is empty such that no files are
    * excluded.
-   * 
+   *
    * @parameter
    */
   protected String[] testExcludes = new String[0];
@@ -124,7 +124,7 @@ public abstract class AbstractAvroMojo extends AbstractMojo {
 
   /**
    * The current Maven project.
-   * 
+   *
    * @parameter default-value="${project}"
    * @readonly
    * @required
@@ -164,11 +164,11 @@ public abstract class AbstractAvroMojo extends AbstractMojo {
           sourceDirectory.getAbsolutePath(), excludes, getIncludes());
       compileFiles(includedFiles, sourceDirectory, outputDirectory);
     }
-    
+
     if (hasImports || hasSourceDir) {
       project.addCompileSourceRoot(outputDirectory.getAbsolutePath());
     }
-    
+
     if (hasTestDir) {
       String[] includedFiles = getIncludedFiles(
           testSourceDirectory.getAbsolutePath(), testExcludes,
@@ -184,7 +184,7 @@ public abstract class AbstractAvroMojo extends AbstractMojo {
     FileSet fs = new FileSet();
     fs.setDirectory(absPath);
     fs.setFollowSymlinks(false);
-    
+
     //exclude imports directory since it has already been compiled.
     if (imports != null) {
       String importExclude = null;

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/IDLProtocolMojo.java
----------------------------------------------------------------------
diff --git a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/IDLProtocolMojo.java b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/IDLProtocolMojo.java
index 0e9ef9c..48f9050 100644
--- a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/IDLProtocolMojo.java
+++ b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/IDLProtocolMojo.java
@@ -35,7 +35,7 @@ import org.apache.maven.artifact.DependencyResolutionRequiredException;
 
 /**
  * Generate Java classes and interfaces from AvroIDL files (.avdl)
- * 
+ *
  * @goal idl-protocol
  * @requiresDependencyResolution runtime
  * @phase generate-sources
@@ -46,16 +46,16 @@ public class IDLProtocolMojo extends AbstractAvroMojo {
    * A set of Ant-like inclusion patterns used to select files from the source
    * directory for processing. By default, the pattern
    * <code>**&#47;*.avdl</code> is used to select IDL files.
-   * 
+   *
    * @parameter
    */
   private String[] includes = new String[] { "**/*.avdl" };
-  
+
   /**
    * A set of Ant-like inclusion patterns used to select files from the source
    * directory for processing. By default, the pattern
    * <code>**&#47;*.avdl</code> is used to select IDL files.
-   * 
+   *
    * @parameter
    */
   private String[] testIncludes = new String[] { "**/*.avdl" };

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/ProtocolMojo.java
----------------------------------------------------------------------
diff --git a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/ProtocolMojo.java b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/ProtocolMojo.java
index 461559b..d49ec8c 100644
--- a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/ProtocolMojo.java
+++ b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/ProtocolMojo.java
@@ -28,7 +28,7 @@ import org.apache.avro.compiler.specific.SpecificCompiler;
 
 /**
  * Generate Java classes and interfaces from Avro protocol files (.avpr)
- * 
+ *
  * @goal protocol
  * @phase generate-sources
  * @threadSafe
@@ -38,7 +38,7 @@ public class ProtocolMojo extends AbstractAvroMojo {
    * A set of Ant-like inclusion patterns used to select files from the source
    * directory for processing. By default, the pattern
    * <code>**&#47;*.avpr</code> is used to select grammar files.
-   * 
+   *
    * @parameter
    */
   private String[] includes = new String[] { "**/*.avpr" };
@@ -47,11 +47,11 @@ public class ProtocolMojo extends AbstractAvroMojo {
    * A set of Ant-like inclusion patterns used to select files from the source
    * directory for processing. By default, the pattern
    * <code>**&#47;*.avpr</code> is used to select grammar files.
-   * 
+   *
    * @parameter
    */
   private String[] testIncludes = new String[] { "**/*.avpr" };
-  
+
   @Override
   protected void doCompile(String filename, File sourceDirectory, File outputDirectory) throws IOException {
     File src = new File(sourceDirectory, filename);

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/SchemaMojo.java
----------------------------------------------------------------------
diff --git a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/SchemaMojo.java b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/SchemaMojo.java
index 7a7eaf9..6fc3d8d 100644
--- a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/SchemaMojo.java
+++ b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/SchemaMojo.java
@@ -28,32 +28,32 @@ import org.apache.avro.compiler.specific.SpecificCompiler;
 
 /**
  * Generate Java classes from Avro schema files (.avsc)
- * 
+ *
  * @goal schema
  * @phase generate-sources
  * @threadSafe
  */
 public class SchemaMojo extends AbstractAvroMojo {
   /**
-   * A parser used to parse all schema files. Using a common parser will 
+   * A parser used to parse all schema files. Using a common parser will
    * facilitate the import of external schemas.
    */
    private Schema.Parser schemaParser = new Schema.Parser();
-  
+
    /**
    * A set of Ant-like inclusion patterns used to select files from the source
    * directory for processing. By default, the pattern
    * <code>**&#47;*.avsc</code> is used to select grammar files.
-   * 
+   *
    * @parameter
    */
   private String[] includes = new String[] { "**/*.avsc" };
-  
+
   /**
    * A set of Ant-like inclusion patterns used to select files from the source
    * directory for processing. By default, the pattern
    * <code>**&#47;*.avsc</code> is used to select grammar files.
-   * 
+   *
    * @parameter
    */
   private String[] testIncludes = new String[] { "**/*.avsc" };
@@ -63,7 +63,7 @@ public class SchemaMojo extends AbstractAvroMojo {
     File src = new File(sourceDirectory, filename);
     Schema schema;
 
-    // This is necessary to maintain backward-compatibility. If there are  
+    // This is necessary to maintain backward-compatibility. If there are
     // no imported files then isolate the schemas from each other, otherwise
     // allow them to share a single schema so resuse and sharing of schema
     // is possible.
@@ -72,7 +72,7 @@ public class SchemaMojo extends AbstractAvroMojo {
     } else {
       schema = schemaParser.parse(src);
     }
-    
+
     SpecificCompiler compiler = new SpecificCompiler(schema);
     compiler.setTemplateDir(templateDirectory);
     compiler.setStringType(StringType.valueOf(stringType));

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/AbstractAvroMojoTest.java
----------------------------------------------------------------------
diff --git a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/AbstractAvroMojoTest.java b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/AbstractAvroMojoTest.java
index e6f2091..2348781 100644
--- a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/AbstractAvroMojoTest.java
+++ b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/AbstractAvroMojoTest.java
@@ -41,7 +41,7 @@ public class AbstractAvroMojoTest extends AbstractMojoTestCase {
 
   /**
    * Assert the existence files in the given given directory.
-   * 
+   *
    * @param directory the directory being checked
    * @param files the files whose existence is being checked.
    */

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestIDLProtocolMojo.java
----------------------------------------------------------------------
diff --git a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestIDLProtocolMojo.java b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestIDLProtocolMojo.java
index 6442bc8..9316872 100644
--- a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestIDLProtocolMojo.java
+++ b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestIDLProtocolMojo.java
@@ -21,7 +21,7 @@ import java.io.File;
 
 /**
  * Test the IDL Protocol Mojo.
- * 
+ *
  * @author saden
  */
 public class TestIDLProtocolMojo extends AbstractAvroMojoTest {

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestProtocolMojo.java
----------------------------------------------------------------------
diff --git a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestProtocolMojo.java b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestProtocolMojo.java
index 95ebbc6..342c155 100644
--- a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestProtocolMojo.java
+++ b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestProtocolMojo.java
@@ -21,7 +21,7 @@ import java.io.File;
 
 /**
  * Test the Protocol Mojo.
- * 
+ *
  * @author saden
  */
 public class TestProtocolMojo extends AbstractAvroMojoTest {

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestSchemaMojo.java
----------------------------------------------------------------------
diff --git a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestSchemaMojo.java b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestSchemaMojo.java
index 8a47be0..3e7fe74 100644
--- a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestSchemaMojo.java
+++ b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestSchemaMojo.java
@@ -21,7 +21,7 @@ import java.io.File;
 
 /**
  * Test the Schema Mojo.
- * 
+ *
  * @author saden
  */
 public class TestSchemaMojo extends AbstractAvroMojoTest {

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/maven-plugin/src/test/resources/unit/idl/pom.xml
----------------------------------------------------------------------
diff --git a/lang/java/maven-plugin/src/test/resources/unit/idl/pom.xml b/lang/java/maven-plugin/src/test/resources/unit/idl/pom.xml
index ec6b9d8..69c504b 100644
--- a/lang/java/maven-plugin/src/test/resources/unit/idl/pom.xml
+++ b/lang/java/maven-plugin/src/test/resources/unit/idl/pom.xml
@@ -28,7 +28,7 @@
   <packaging>jar</packaging>
 
   <name>testproject</name>
- 
+
   <build>
     <plugins>
       <plugin>
@@ -59,7 +59,7 @@
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-mapper-asl</artifactId>
       <version>1.9.10</version>
-    </dependency> 
+    </dependency>
   </dependencies>
-    
+
 </project>

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/maven-plugin/src/test/resources/unit/protocol/pom.xml
----------------------------------------------------------------------
diff --git a/lang/java/maven-plugin/src/test/resources/unit/protocol/pom.xml b/lang/java/maven-plugin/src/test/resources/unit/protocol/pom.xml
index 341bb1a..b484e3d 100644
--- a/lang/java/maven-plugin/src/test/resources/unit/protocol/pom.xml
+++ b/lang/java/maven-plugin/src/test/resources/unit/protocol/pom.xml
@@ -28,7 +28,7 @@
   <packaging>jar</packaging>
 
   <name>testproject</name>
- 
+
   <build>
     <plugins>
       <plugin>
@@ -59,7 +59,7 @@
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-mapper-asl</artifactId>
       <version>1.9.10</version>
-    </dependency> 
+    </dependency>
   </dependencies>
-    
+
 </project>

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/maven-plugin/src/test/resources/unit/schema/pom.xml
----------------------------------------------------------------------
diff --git a/lang/java/maven-plugin/src/test/resources/unit/schema/pom.xml b/lang/java/maven-plugin/src/test/resources/unit/schema/pom.xml
index 0faef29..cc000df 100644
--- a/lang/java/maven-plugin/src/test/resources/unit/schema/pom.xml
+++ b/lang/java/maven-plugin/src/test/resources/unit/schema/pom.xml
@@ -18,12 +18,12 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
-  
+
   <artifactId>avro-maven-plugin-test</artifactId>
   <packaging>jar</packaging>
 
   <name>testproject</name>
- 
+
   <build>
     <plugins>
       <plugin>
@@ -58,7 +58,7 @@
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-mapper-asl</artifactId>
       <version>1.9.10</version>
-    </dependency> 
+    </dependency>
   </dependencies>
-    
+
 </project>

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/pom.xml
----------------------------------------------------------------------
diff --git a/lang/java/pom.xml b/lang/java/pom.xml
index da6e6e2..dd6007b 100644
--- a/lang/java/pom.xml
+++ b/lang/java/pom.xml
@@ -37,7 +37,7 @@
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
 
     <!-- version properties for dependencies -->
-    
+
     <!--
       To build the avro-mapred module against Hadoop 1 specify
       -Dhadoop.version=1 or leave unspecified to build against Hadoop 2
@@ -387,7 +387,7 @@
         <plugins>
           <plugin>
             <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-surefire-plugin</artifactId> 
+            <artifactId>maven-surefire-plugin</artifactId>
             <configuration>
               <systemPropertyVariables>
                 <test.dir>${project.basedir}/target/</test.dir>

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufData.java
----------------------------------------------------------------------
diff --git a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufData.java b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufData.java
index dabe15a..fa8239b 100644
--- a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufData.java
+++ b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufData.java
@@ -58,7 +58,7 @@ public class ProtobufData extends GenericData {
   private static final ProtobufData INSTANCE = new ProtobufData();
 
   protected ProtobufData() {}
-  
+
   /** Return the singleton instance. */
   public static ProtobufData get() { return INSTANCE; }
 
@@ -108,7 +108,7 @@ public class ProtobufData extends GenericData {
     default:
       return m.getField(f);
     }
-  }    
+  }
 
   private final Map<Descriptor,FieldDescriptor[]> fieldCache =
     new ConcurrentHashMap<Descriptor,FieldDescriptor[]>();
@@ -203,7 +203,7 @@ public class ProtobufData extends GenericData {
                             false);
 
       seen.put(descriptor, result);
-        
+
       List<Field> fields = new ArrayList<Field>();
       for (FieldDescriptor f : descriptor.getFields())
         fields.add(new Field(f.getName(), getSchema(f), null, getDefault(f)));
@@ -342,7 +342,7 @@ public class ProtobufData extends GenericData {
     default:
       throw new RuntimeException("Unexpected type: "+f.getType());
     }
-    
+
   }
 
 }

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumReader.java
----------------------------------------------------------------------
diff --git a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumReader.java b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumReader.java
index 0b9bf8a..3ae0ee9 100644
--- a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumReader.java
+++ b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumReader.java
@@ -58,7 +58,7 @@ public class ProtobufDatumReader<T> extends GenericDatumReader<T> {
   }
 
   @Override
-  protected Object readRecord(Object old, Schema expected, 
+  protected Object readRecord(Object old, Schema expected,
                               ResolvingDecoder in) throws IOException {
     Message.Builder b = (Message.Builder)super.readRecord(old, expected, in);
     return b.build();                             // build instance
@@ -78,7 +78,7 @@ public class ProtobufDatumReader<T> extends GenericDatumReader<T> {
   @Override
   protected Object readBytes(Object old, Decoder in) throws IOException {
     return ByteString.copyFrom(((ByteBuffer)super.readBytes(old, in)).array());
-  }    
+  }
 
 }
 

http://git-wip-us.apache.org/repos/asf/avro/blob/ade55151/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumWriter.java
----------------------------------------------------------------------
diff --git a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumWriter.java b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumWriter.java
index 0af30ad..58fab3b 100644
--- a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumWriter.java
+++ b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumWriter.java
@@ -36,19 +36,19 @@ public class ProtobufDatumWriter<T> extends GenericDatumWriter<T> {
   public ProtobufDatumWriter(Class<T> c) {
     super(ProtobufData.get().getSchema(c), ProtobufData.get());
   }
-  
+
   public ProtobufDatumWriter(Schema schema) {
     super(schema, ProtobufData.get());
   }
-  
+
   protected ProtobufDatumWriter(Schema root, ProtobufData protobufData) {
     super(root, protobufData);
   }
-  
+
   protected ProtobufDatumWriter(ProtobufData protobufData) {
     super(protobufData);
   }
-  
+
   @Override
   protected void writeEnum(Schema schema, Object datum, Encoder out)
     throws IOException {
@@ -61,7 +61,7 @@ public class ProtobufDatumWriter<T> extends GenericDatumWriter<T> {
 
   @Override
   protected void writeBytes(Object datum, Encoder out) throws IOException {
-    ByteString bytes = (ByteString)datum; 
+    ByteString bytes = (ByteString)datum;
     out.writeBytes(bytes.toByteArray(), 0, bytes.size());
   }
 


Mime
View raw message