avro-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r1325903 [4/4] - in /avro/trunk: ./ lang/java/ lang/java/mapred/ lang/java/mapred/src/main/java/org/apache/avro/hadoop/ lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/ lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/ la...
Date Fri, 13 Apr 2012 19:03:14 GMT
Added: avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java
URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java?rev=1325903&view=auto
==============================================================================
--- avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java
(added)
+++ avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java
Fri Apr 13 19:03:12 2012
@@ -0,0 +1,133 @@
+/**
+ * Licensed to Odiago, Inc. under one or more contributor license
+ * agreements.  See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.  Odiago, Inc.
+ * licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import static org.junit.Assert.*;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URISyntaxException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.hadoop.io.AvroKeyValue;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+public class TestKeyValueWordCount {
+  @Rule
+  public TemporaryFolder mTempDir = new TemporaryFolder();
+
+  public static class LineCountMapper extends Mapper<LongWritable, Text, Text, IntWritable>
{
+    private IntWritable mOne;
+
+    @Override
+    protected void setup(Context context) {
+      mOne = new IntWritable(1);
+    }
+
+    @Override
+    protected void map(LongWritable fileByteOffset, Text line, Context context)
+        throws IOException, InterruptedException {
+      context.write(line, mOne);
+    }
+  }
+
+  public static class IntSumReducer extends Reducer<Text, IntWritable, Text, IntWritable>
{
+    @Override
+    protected void reduce(Text word, Iterable<IntWritable> counts, Context context)
+        throws IOException, InterruptedException {
+      int sum = 0;
+      for (IntWritable count : counts) {
+        sum += count.get();
+      }
+      context.write(word, new IntWritable(sum));
+    }
+  }
+
+  @Test
+  public void testKeyValueMapReduce()
+      throws ClassNotFoundException, IOException, InterruptedException, URISyntaxException
{
+    // Configure a word count job over our test input file.
+    Job job = new Job();
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
+            .toURI().toString()));
+    job.setInputFormatClass(TextInputFormat.class);
+
+    job.setMapperClass(LineCountMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+
+    job.setReducerClass(IntSumReducer.class);
+    job.setOutputKeyClass(Text.class);
+    job.setOutputValueClass(IntWritable.class);
+
+    job.setOutputFormatClass(AvroKeyValueOutputFormat.class);
+    Path outputPath = new Path(mTempDir.getRoot().getPath() + "/out-wordcount");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    // Run the job.
+    assertTrue(job.waitForCompletion(true));
+
+    // Verify that the Avro container file generated had the right KeyValuePair generic records.
+    File avroFile = new File(outputPath.toString(), "part-r-00000.avro");
+    DatumReader<GenericRecord> datumReader = new SpecificDatumReader<GenericRecord>(
+        AvroKeyValue.getSchema(Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.INT)));
+    DataFileReader<GenericRecord> avroFileReader
+        = new DataFileReader<GenericRecord>(avroFile, datumReader);
+
+    assertTrue(avroFileReader.hasNext());
+    AvroKeyValue<CharSequence, Integer> appleRecord
+        = new AvroKeyValue<CharSequence, Integer>(avroFileReader.next());
+    assertNotNull(appleRecord.get());
+    assertEquals("apple", appleRecord.getKey().toString());
+    assertEquals(3, appleRecord.getValue().intValue());
+
+    assertTrue(avroFileReader.hasNext());
+    AvroKeyValue<CharSequence, Integer> bananaRecord
+        = new AvroKeyValue<CharSequence, Integer>(avroFileReader.next());
+    assertNotNull(bananaRecord.get());
+    assertEquals("banana", bananaRecord.getKey().toString());
+    assertEquals(2, bananaRecord.getValue().intValue());
+
+    assertTrue(avroFileReader.hasNext());
+    AvroKeyValue<CharSequence, Integer> carrotRecord
+        = new AvroKeyValue<CharSequence, Integer>(avroFileReader.next());
+    assertEquals("carrot", carrotRecord.getKey().toString());
+    assertEquals(1, carrotRecord.getValue().intValue());
+
+    assertFalse(avroFileReader.hasNext());
+    avroFileReader.close();
+  }
+}

Propchange: avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestWordCount.java
URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestWordCount.java?rev=1325903&view=auto
==============================================================================
--- avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestWordCount.java
(added)
+++ avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestWordCount.java
Fri Apr 13 19:03:12 2012
@@ -0,0 +1,320 @@
+/**
+ * Licensed to Odiago, Inc. under one or more contributor license
+ * agreements.  See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.  Odiago, Inc.
+ * licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.FsInput;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.util.Utf8;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+public class TestWordCount {
+  @Rule
+  public TemporaryFolder tmpFolder = new TemporaryFolder();
+  public static final Schema STATS_SCHEMA =
+      Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
+          + "\"fields\":[{\"name\":\"count\",\"type\":\"int\"},"
+          + "{\"name\":\"name\",\"type\":\"string\"}]}");
+
+  private static class LineCountMapper extends Mapper<LongWritable, Text, Text, IntWritable>
{
+    private IntWritable mOne;
+
+    @Override
+    protected void setup(Context context) {
+      mOne = new IntWritable(1);
+    }
+
+    @Override
+    protected void map(LongWritable fileByteOffset, Text line, Context context)
+        throws IOException, InterruptedException {
+      context.write(line, mOne);
+    }
+  }
+
+  private static class StatCountMapper
+      extends Mapper<AvroKey<TextStats>, NullWritable, Text, IntWritable> {
+    private IntWritable mCount;
+    private Text mText;
+
+    @Override
+    protected void setup(Context context) {
+      mCount = new IntWritable(0);
+      mText = new Text("");
+    }
+
+    @Override
+    protected void map(AvroKey<TextStats> record, NullWritable ignore, Context context)
+        throws IOException, InterruptedException {
+      mCount.set(record.datum().count);
+      mText.set(record.datum().name.toString());
+      context.write(mText, mCount);
+    }
+  }
+
+  private static class GenericStatsReducer
+      extends Reducer<Text, IntWritable, AvroKey<GenericData.Record>, NullWritable>
{
+    private AvroKey<GenericData.Record> mStats;
+
+    @Override
+    protected void setup(Context context) {
+      mStats = new AvroKey<GenericData.Record>(null);
+    }
+
+    @Override
+    protected void reduce(Text line, Iterable<IntWritable> counts, Context context)
+        throws IOException, InterruptedException {
+      GenericData.Record record = new GenericData.Record(STATS_SCHEMA);
+      int sum = 0;
+      for (IntWritable count : counts) {
+        sum += count.get();
+      }
+      record.put("name", new Utf8(line.toString()));
+      record.put("count", new Integer(sum));
+      mStats.datum(record);
+      context.write(mStats, NullWritable.get());
+    }
+  }
+
+  private static class SpecificStatsReducer
+      extends Reducer<Text, IntWritable, AvroKey<TextStats>, NullWritable> {
+    private AvroKey<TextStats> mStats;
+
+    @Override
+    protected void setup(Context context) {
+      mStats = new AvroKey<TextStats>(null);
+    }
+
+    @Override
+    protected void reduce(Text line, Iterable<IntWritable> counts, Context context)
+        throws IOException, InterruptedException {
+      TextStats record = new TextStats();
+      record.count = 0;
+      for (IntWritable count : counts) {
+        record.count += count.get();
+      }
+      record.name = line.toString();
+      mStats.datum(record);
+      context.write(mStats, NullWritable.get());
+    }
+  }
+
+  private static class SortMapper
+      extends Mapper<AvroKey<TextStats>, NullWritable, AvroKey<TextStats>,
NullWritable> {
+    @Override
+    protected void map(AvroKey<TextStats> key, NullWritable value, Context context)
+        throws IOException, InterruptedException {
+      context.write(key, value);
+    }
+  }
+
+  private static class SortReducer
+      extends Reducer<AvroKey<TextStats>, NullWritable, AvroKey<TextStats>,
NullWritable> {
+    @Override
+    protected void reduce(AvroKey<TextStats> key, Iterable<NullWritable> ignore,
Context context)
+        throws IOException, InterruptedException {
+      context.write(key, NullWritable.get());
+    }
+  }
+
+  @Test
+  public void testAvroGenericOutput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
+            .toURI().toString()));
+    job.setInputFormatClass(TextInputFormat.class);
+
+    job.setMapperClass(LineCountMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+
+    job.setReducerClass(GenericStatsReducer.class);
+    AvroJob.setOutputKeySchema(job, STATS_SCHEMA);
+
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-generic");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+
+    // Check that the results from the MapReduce were as expected.
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<GenericData.Record> reader = new DataFileReader<GenericData.Record>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new GenericDatumReader<GenericData.Record>(STATS_SCHEMA));
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (GenericData.Record record : reader) {
+      counts.put(((Utf8) record.get("name")).toString(), (Integer) record.get("count"));
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  }
+
+  @Test
+  public void testAvroSpecificOutput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
+            .toURI().toString()));
+    job.setInputFormatClass(TextInputFormat.class);
+
+    job.setMapperClass(LineCountMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+
+    job.setReducerClass(SpecificStatsReducer.class);
+    AvroJob.setOutputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-specific");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+
+    // Check that the results from the MapReduce were as expected.
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<TextStats> reader = new DataFileReader<TextStats>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new SpecificDatumReader<TextStats>());
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (TextStats record : reader) {
+      counts.put(record.name.toString(), record.count);
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  }
+
+  @Test
+  public void testAvroInput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro")
+            .toURI().toString()));
+    job.setInputFormatClass(AvroKeyInputFormat.class);
+    AvroJob.setInputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setMapperClass(StatCountMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+
+    job.setReducerClass(SpecificStatsReducer.class);
+    AvroJob.setOutputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-specific-input");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+
+    // Check that the results from the MapReduce were as expected.
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<TextStats> reader = new DataFileReader<TextStats>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new SpecificDatumReader<TextStats>());
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (TextStats record : reader) {
+      counts.put(record.name.toString(), record.count);
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  }
+
+  @Test
+  public void testAvroMapOutput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro")
+            .toURI().toString()));
+    job.setInputFormatClass(AvroKeyInputFormat.class);
+    AvroJob.setInputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setMapperClass(SortMapper.class);
+    AvroJob.setMapOutputKeySchema(job, TextStats.SCHEMA$);
+    job.setMapOutputValueClass(NullWritable.class);
+
+    job.setReducerClass(SortReducer.class);
+    AvroJob.setOutputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-specific-input");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+
+    // Check that the results from the MapReduce were as expected.
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<TextStats> reader = new DataFileReader<TextStats>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new SpecificDatumReader<TextStats>());
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (TextStats record : reader) {
+      counts.put(record.name.toString(), record.count);
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  }
+}

Propchange: avro/trunk/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestWordCount.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: avro/trunk/lang/java/mapred/src/test/resources/log4j.properties
URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/mapred/src/test/resources/log4j.properties?rev=1325903&view=auto
==============================================================================
--- avro/trunk/lang/java/mapred/src/test/resources/log4j.properties (added)
+++ avro/trunk/lang/java/mapred/src/test/resources/log4j.properties Fri Apr 13 19:03:12 2012
@@ -0,0 +1,8 @@
+log4j.rootLogger=DEBUG,console
+
+# Define the console appender.
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c: %m%n
+

Propchange: avro/trunk/lang/java/mapred/src/test/resources/log4j.properties
------------------------------------------------------------------------------
    svn:eol-style = native

Added: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/._SUCCESS.crc
URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/._SUCCESS.crc?rev=1325903&view=auto
==============================================================================
Binary file - no diff available.

Propchange: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/._SUCCESS.crc
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/.part-r-00000.avro.crc
URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/.part-r-00000.avro.crc?rev=1325903&view=auto
==============================================================================
Binary file - no diff available.

Propchange: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/.part-r-00000.avro.crc
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/_SUCCESS
URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/_SUCCESS?rev=1325903&view=auto
==============================================================================
    (empty)

Propchange: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/_SUCCESS
------------------------------------------------------------------------------
    svn:executable = *

Added: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/part-r-00000.avro
URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/part-r-00000.avro?rev=1325903&view=auto
==============================================================================
Binary file - no diff available.

Propchange: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/part-r-00000.avro
------------------------------------------------------------------------------
    svn:executable = *

Propchange: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/part-r-00000.avro
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.txt
URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.txt?rev=1325903&view=auto
==============================================================================
--- avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.txt
(added)
+++ avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.txt
Fri Apr 13 19:03:12 2012
@@ -0,0 +1,6 @@
+apple
+banana
+banana
+carrot
+apple
+apple

Propchange: avro/trunk/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.txt
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: avro/trunk/lang/java/pom.xml
URL: http://svn.apache.org/viewvc/avro/trunk/lang/java/pom.xml?rev=1325903&r1=1325902&r2=1325903&view=diff
==============================================================================
--- avro/trunk/lang/java/pom.xml (original)
+++ avro/trunk/lang/java/pom.xml Fri Apr 13 19:03:12 2012
@@ -53,6 +53,8 @@
     <maven.version>2.0.10</maven.version>
     <ant.version>1.8.2</ant.version>
     <commons-lang.version>2.6</commons-lang.version>
+    <easymock.version>3.0</easymock.version>
+    <hamcrest.version>1.1</hamcrest.version>
 
     <!-- version properties for plugins -->
     <checkstyle-plugin.version>2.8</checkstyle-plugin.version>
@@ -148,6 +150,7 @@
           <artifactId>maven-checkstyle-plugin</artifactId>
           <version>${checkstyle-plugin.version}</version>
           <configuration>
+            <consoleOutput>true</consoleOutput>
             <configLocation>checkstyle.xml</configLocation>
           </configuration>
           <!-- Runs by default in the verify phase  (mvn verify or later in the build
cycle)



Mime
View raw message