Return-Path: X-Original-To: apmail-avro-commits-archive@www.apache.org Delivered-To: apmail-avro-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 983ACC15C for ; Fri, 13 Apr 2012 19:03:40 +0000 (UTC) Received: (qmail 51988 invoked by uid 500); 13 Apr 2012 19:03:40 -0000 Delivered-To: apmail-avro-commits-archive@avro.apache.org Received: (qmail 51918 invoked by uid 500); 13 Apr 2012 19:03:40 -0000 Mailing-List: contact commits-help@avro.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@avro.apache.org Delivered-To: mailing list commits@avro.apache.org Received: (qmail 51908 invoked by uid 99); 13 Apr 2012 19:03:39 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 13 Apr 2012 19:03:39 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 13 Apr 2012 19:03:37 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 4C4A62388A67 for ; Fri, 13 Apr 2012 19:03:17 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1325903 [4/4] - in /avro/trunk: ./ lang/java/ lang/java/mapred/ lang/java/mapred/src/main/java/org/apache/avro/hadoop/ lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/ lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/ la... Date: Fri, 13 Apr 2012 19:03:14 -0000 To: commits@avro.apache.org From: cutting@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20120413190317.4C4A62388A67@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Added: avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java?rev=1325903&view=auto ============================================================================== --- avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java (added) +++ avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java Fri Apr 13 19:03:12 2012 @@ -0,0 +1,133 @@ +/** + * Licensed to Odiago, Inc. under one or more contributor license + * agreements. See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. Odiago, Inc. + * licenses this file to you under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + * implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package org.apache.avro.mapreduce; + +import static org.junit.Assert.*; + +import java.io.File; +import java.io.IOException; +import java.net.URISyntaxException; + +import org.apache.avro.Schema; +import org.apache.avro.file.DataFileReader; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.hadoop.io.AvroKeyValue; +import org.apache.avro.io.DatumReader; +import org.apache.avro.specific.SpecificDatumReader; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.Mapper; +import org.apache.hadoop.mapreduce.Reducer; +import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; +import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; +import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; +import org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +public class TestKeyValueWordCount { + @Rule + public TemporaryFolder mTempDir = new TemporaryFolder(); + + public static class LineCountMapper extends Mapper { + private IntWritable mOne; + + @Override + protected void setup(Context context) { + mOne = new IntWritable(1); + } + + @Override + protected void map(LongWritable fileByteOffset, Text line, Context context) + throws IOException, InterruptedException { + context.write(line, mOne); + } + } + + public static class IntSumReducer extends Reducer { + @Override + protected void reduce(Text word, Iterable counts, Context context) + throws IOException, InterruptedException { + int sum = 0; + for (IntWritable count : counts) { + sum += count.get(); + } + context.write(word, new IntWritable(sum)); + } + } + + @Test + public void testKeyValueMapReduce() + throws ClassNotFoundException, IOException, InterruptedException, URISyntaxException { + // Configure a word count job over our test input file. + Job job = new Job(); + FileInputFormat.setInputPaths(job, new Path(getClass() + .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt") + .toURI().toString())); + job.setInputFormatClass(TextInputFormat.class); + + job.setMapperClass(LineCountMapper.class); + job.setMapOutputKeyClass(Text.class); + job.setMapOutputValueClass(IntWritable.class); + + job.setReducerClass(IntSumReducer.class); + job.setOutputKeyClass(Text.class); + job.setOutputValueClass(IntWritable.class); + + job.setOutputFormatClass(AvroKeyValueOutputFormat.class); + Path outputPath = new Path(mTempDir.getRoot().getPath() + "/out-wordcount"); + FileOutputFormat.setOutputPath(job, outputPath); + + // Run the job. + assertTrue(job.waitForCompletion(true)); + + // Verify that the Avro container file generated had the right KeyValuePair generic records. + File avroFile = new File(outputPath.toString(), "part-r-00000.avro"); + DatumReader datumReader = new SpecificDatumReader( + AvroKeyValue.getSchema(Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.INT))); + DataFileReader avroFileReader + = new DataFileReader(avroFile, datumReader); + + assertTrue(avroFileReader.hasNext()); + AvroKeyValue appleRecord + = new AvroKeyValue(avroFileReader.next()); + assertNotNull(appleRecord.get()); + assertEquals("apple", appleRecord.getKey().toString()); + assertEquals(3, appleRecord.getValue().intValue()); + + assertTrue(avroFileReader.hasNext()); + AvroKeyValue bananaRecord + = new AvroKeyValue(avroFileReader.next()); + assertNotNull(bananaRecord.get()); + assertEquals("banana", bananaRecord.getKey().toString()); + assertEquals(2, bananaRecord.getValue().intValue()); + + assertTrue(avroFileReader.hasNext()); + AvroKeyValue carrotRecord + = new AvroKeyValue(avroFileReader.next()); + assertEquals("carrot", carrotRecord.getKey().toString()); + assertEquals(1, carrotRecord.getValue().intValue()); + + assertFalse(avroFileReader.hasNext()); + avroFileReader.close(); + } +} Propchange: avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java ------------------------------------------------------------------------------ svn:eol-style = native Added: avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestWordCount.java URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestWordCount.java?rev=1325903&view=auto ============================================================================== --- avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestWordCount.java (added) +++ avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestWordCount.java Fri Apr 13 19:03:12 2012 @@ -0,0 +1,320 @@ +/** + * Licensed to Odiago, Inc. under one or more contributor license + * agreements. See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. Odiago, Inc. + * licenses this file to you under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + * implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package org.apache.avro.mapreduce; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import org.apache.avro.Schema; +import org.apache.avro.file.DataFileReader; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericDatumReader; +import org.apache.avro.mapred.AvroKey; +import org.apache.avro.mapred.FsInput; +import org.apache.avro.specific.SpecificDatumReader; +import org.apache.avro.util.Utf8; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.NullWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.Mapper; +import org.apache.hadoop.mapreduce.Reducer; +import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; +import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; +import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; + +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +public class TestWordCount { + @Rule + public TemporaryFolder tmpFolder = new TemporaryFolder(); + public static final Schema STATS_SCHEMA = + Schema.parse("{\"name\":\"stats\",\"type\":\"record\"," + + "\"fields\":[{\"name\":\"count\",\"type\":\"int\"}," + + "{\"name\":\"name\",\"type\":\"string\"}]}"); + + private static class LineCountMapper extends Mapper { + private IntWritable mOne; + + @Override + protected void setup(Context context) { + mOne = new IntWritable(1); + } + + @Override + protected void map(LongWritable fileByteOffset, Text line, Context context) + throws IOException, InterruptedException { + context.write(line, mOne); + } + } + + private static class StatCountMapper + extends Mapper, NullWritable, Text, IntWritable> { + private IntWritable mCount; + private Text mText; + + @Override + protected void setup(Context context) { + mCount = new IntWritable(0); + mText = new Text(""); + } + + @Override + protected void map(AvroKey record, NullWritable ignore, Context context) + throws IOException, InterruptedException { + mCount.set(record.datum().count); + mText.set(record.datum().name.toString()); + context.write(mText, mCount); + } + } + + private static class GenericStatsReducer + extends Reducer, NullWritable> { + private AvroKey mStats; + + @Override + protected void setup(Context context) { + mStats = new AvroKey(null); + } + + @Override + protected void reduce(Text line, Iterable counts, Context context) + throws IOException, InterruptedException { + GenericData.Record record = new GenericData.Record(STATS_SCHEMA); + int sum = 0; + for (IntWritable count : counts) { + sum += count.get(); + } + record.put("name", new Utf8(line.toString())); + record.put("count", new Integer(sum)); + mStats.datum(record); + context.write(mStats, NullWritable.get()); + } + } + + private static class SpecificStatsReducer + extends Reducer, NullWritable> { + private AvroKey mStats; + + @Override + protected void setup(Context context) { + mStats = new AvroKey(null); + } + + @Override + protected void reduce(Text line, Iterable counts, Context context) + throws IOException, InterruptedException { + TextStats record = new TextStats(); + record.count = 0; + for (IntWritable count : counts) { + record.count += count.get(); + } + record.name = line.toString(); + mStats.datum(record); + context.write(mStats, NullWritable.get()); + } + } + + private static class SortMapper + extends Mapper, NullWritable, AvroKey, NullWritable> { + @Override + protected void map(AvroKey key, NullWritable value, Context context) + throws IOException, InterruptedException { + context.write(key, value); + } + } + + private static class SortReducer + extends Reducer, NullWritable, AvroKey, NullWritable> { + @Override + protected void reduce(AvroKey key, Iterable ignore, Context context) + throws IOException, InterruptedException { + context.write(key, NullWritable.get()); + } + } + + @Test + public void testAvroGenericOutput() throws Exception { + Job job = new Job(); + + FileInputFormat.setInputPaths(job, new Path(getClass() + .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt") + .toURI().toString())); + job.setInputFormatClass(TextInputFormat.class); + + job.setMapperClass(LineCountMapper.class); + job.setMapOutputKeyClass(Text.class); + job.setMapOutputValueClass(IntWritable.class); + + job.setReducerClass(GenericStatsReducer.class); + AvroJob.setOutputKeySchema(job, STATS_SCHEMA); + + job.setOutputFormatClass(AvroKeyOutputFormat.class); + Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-generic"); + FileOutputFormat.setOutputPath(job, outputPath); + + Assert.assertTrue(job.waitForCompletion(true)); + + // Check that the results from the MapReduce were as expected. + FileSystem fileSystem = FileSystem.get(job.getConfiguration()); + FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*")); + Assert.assertEquals(1, outputFiles.length); + DataFileReader reader = new DataFileReader( + new FsInput(outputFiles[0].getPath(), job.getConfiguration()), + new GenericDatumReader(STATS_SCHEMA)); + Map counts = new HashMap(); + for (GenericData.Record record : reader) { + counts.put(((Utf8) record.get("name")).toString(), (Integer) record.get("count")); + } + reader.close(); + + Assert.assertEquals(3, counts.get("apple").intValue()); + Assert.assertEquals(2, counts.get("banana").intValue()); + Assert.assertEquals(1, counts.get("carrot").intValue()); + } + + @Test + public void testAvroSpecificOutput() throws Exception { + Job job = new Job(); + + FileInputFormat.setInputPaths(job, new Path(getClass() + .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt") + .toURI().toString())); + job.setInputFormatClass(TextInputFormat.class); + + job.setMapperClass(LineCountMapper.class); + job.setMapOutputKeyClass(Text.class); + job.setMapOutputValueClass(IntWritable.class); + + job.setReducerClass(SpecificStatsReducer.class); + AvroJob.setOutputKeySchema(job, TextStats.SCHEMA$); + + job.setOutputFormatClass(AvroKeyOutputFormat.class); + Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-specific"); + FileOutputFormat.setOutputPath(job, outputPath); + + Assert.assertTrue(job.waitForCompletion(true)); + + // Check that the results from the MapReduce were as expected. + FileSystem fileSystem = FileSystem.get(job.getConfiguration()); + FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*")); + Assert.assertEquals(1, outputFiles.length); + DataFileReader reader = new DataFileReader( + new FsInput(outputFiles[0].getPath(), job.getConfiguration()), + new SpecificDatumReader()); + Map counts = new HashMap(); + for (TextStats record : reader) { + counts.put(record.name.toString(), record.count); + } + reader.close(); + + Assert.assertEquals(3, counts.get("apple").intValue()); + Assert.assertEquals(2, counts.get("banana").intValue()); + Assert.assertEquals(1, counts.get("carrot").intValue()); + } + + @Test + public void testAvroInput() throws Exception { + Job job = new Job(); + + FileInputFormat.setInputPaths(job, new Path(getClass() + .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro") + .toURI().toString())); + job.setInputFormatClass(AvroKeyInputFormat.class); + AvroJob.setInputKeySchema(job, TextStats.SCHEMA$); + + job.setMapperClass(StatCountMapper.class); + job.setMapOutputKeyClass(Text.class); + job.setMapOutputValueClass(IntWritable.class); + + job.setReducerClass(SpecificStatsReducer.class); + AvroJob.setOutputKeySchema(job, TextStats.SCHEMA$); + + job.setOutputFormatClass(AvroKeyOutputFormat.class); + Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-specific-input"); + FileOutputFormat.setOutputPath(job, outputPath); + + Assert.assertTrue(job.waitForCompletion(true)); + + // Check that the results from the MapReduce were as expected. + FileSystem fileSystem = FileSystem.get(job.getConfiguration()); + FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*")); + Assert.assertEquals(1, outputFiles.length); + DataFileReader reader = new DataFileReader( + new FsInput(outputFiles[0].getPath(), job.getConfiguration()), + new SpecificDatumReader()); + Map counts = new HashMap(); + for (TextStats record : reader) { + counts.put(record.name.toString(), record.count); + } + reader.close(); + + Assert.assertEquals(3, counts.get("apple").intValue()); + Assert.assertEquals(2, counts.get("banana").intValue()); + Assert.assertEquals(1, counts.get("carrot").intValue()); + } + + @Test + public void testAvroMapOutput() throws Exception { + Job job = new Job(); + + FileInputFormat.setInputPaths(job, new Path(getClass() + .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro") + .toURI().toString())); + job.setInputFormatClass(AvroKeyInputFormat.class); + AvroJob.setInputKeySchema(job, TextStats.SCHEMA$); + + job.setMapperClass(SortMapper.class); + AvroJob.setMapOutputKeySchema(job, TextStats.SCHEMA$); + job.setMapOutputValueClass(NullWritable.class); + + job.setReducerClass(SortReducer.class); + AvroJob.setOutputKeySchema(job, TextStats.SCHEMA$); + + job.setOutputFormatClass(AvroKeyOutputFormat.class); + Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-specific-input"); + FileOutputFormat.setOutputPath(job, outputPath); + + Assert.assertTrue(job.waitForCompletion(true)); + + // Check that the results from the MapReduce were as expected. + FileSystem fileSystem = FileSystem.get(job.getConfiguration()); + FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*")); + Assert.assertEquals(1, outputFiles.length); + DataFileReader reader = new DataFileReader( + new FsInput(outputFiles[0].getPath(), job.getConfiguration()), + new SpecificDatumReader()); + Map counts = new HashMap(); + for (TextStats record : reader) { + counts.put(record.name.toString(), record.count); + } + reader.close(); + + Assert.assertEquals(3, counts.get("apple").intValue()); + Assert.assertEquals(2, counts.get("banana").intValue()); + Assert.assertEquals(1, counts.get("carrot").intValue()); + } +} Propchange: avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestWordCount.java ------------------------------------------------------------------------------ svn:eol-style = native Added: avro/trunk/lang/java/mapred/src/test/resources/log4j.properties URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/mapred/src/test/resources/log4j.properties?rev=1325903&view=auto ============================================================================== --- avro/trunk/lang/java/mapred/src/test/resources/log4j.properties (added) +++ avro/trunk/lang/java/mapred/src/test/resources/log4j.properties Fri Apr 13 19:03:12 2012 @@ -0,0 +1,8 @@ +log4j.rootLogger=DEBUG,console + +# Define the console appender. +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.err +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c: %m%n + Propchange: avro/trunk/lang/java/mapred/src/test/resources/log4j.properties ------------------------------------------------------------------------------ svn:eol-style = native Added: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/._SUCCESS.crc URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/._SUCCESS.crc?rev=1325903&view=auto ============================================================================== Binary file - no diff available. Propchange: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/._SUCCESS.crc ------------------------------------------------------------------------------ svn:mime-type = application/octet-stream Added: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/.part-r-00000.avro.crc URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/.part-r-00000.avro.crc?rev=1325903&view=auto ============================================================================== Binary file - no diff available. Propchange: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/.part-r-00000.avro.crc ------------------------------------------------------------------------------ svn:mime-type = application/octet-stream Added: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/_SUCCESS URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/_SUCCESS?rev=1325903&view=auto ============================================================================== (empty) Propchange: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/_SUCCESS ------------------------------------------------------------------------------ svn:executable = * Added: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/part-r-00000.avro URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/part-r-00000.avro?rev=1325903&view=auto ============================================================================== Binary file - no diff available. Propchange: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/part-r-00000.avro ------------------------------------------------------------------------------ svn:executable = * Propchange: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/part-r-00000.avro ------------------------------------------------------------------------------ svn:mime-type = application/octet-stream Added: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.txt URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.txt?rev=1325903&view=auto ============================================================================== --- avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.txt (added) +++ avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.txt Fri Apr 13 19:03:12 2012 @@ -0,0 +1,6 @@ +apple +banana +banana +carrot +apple +apple Propchange: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.txt ------------------------------------------------------------------------------ svn:eol-style = native Modified: avro/trunk/lang/java/pom.xml URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/pom.xml?rev=1325903&r1=1325902&r2=1325903&view=diff ============================================================================== --- avro/trunk/lang/java/pom.xml (original) +++ avro/trunk/lang/java/pom.xml Fri Apr 13 19:03:12 2012 @@ -53,6 +53,8 @@ 2.0.10 1.8.2 2.6 + 3.0 + 1.1 2.8 @@ -148,6 +150,7 @@ maven-checkstyle-plugin ${checkstyle-plugin.version} + true checkstyle.xml