hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tomwh...@apache.org
Subject svn commit: r632073 [2/2] - in /hadoop/core/trunk: ./ conf/ src/contrib/data_join/src/examples/org/apache/hadoop/contrib/utils/join/ src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/ src/contrib/streaming/src/java/org/apache/hadoop/s...
Date Thu, 28 Feb 2008 17:47:02 GMT
Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/Partitioner.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/Partitioner.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/Partitioner.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/Partitioner.java Thu Feb 28 09:46:49
2008
@@ -18,9 +18,6 @@
 
 package org.apache.hadoop.mapred;
 
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
-
 /** 
  * Partitions the key space.
  * 
@@ -33,9 +30,7 @@
  * 
  * @see Reducer
  */
-public interface Partitioner<K2 extends WritableComparable,
-                             V2 extends Writable>
-  extends JobConfigurable {
+public interface Partitioner<K2, V2> extends JobConfigurable {
   
   /** 
    * Get the paritition number for a given key (hence record) given the total 

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/RecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/RecordReader.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/RecordReader.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/RecordReader.java Thu Feb 28 09:46:49
2008
@@ -21,9 +21,6 @@
 import java.io.IOException;
 import java.io.DataInput;
 
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
-
 /**
  * <code>RecordReader</code> reads &lt;key, value&gt; pairs from an 
  * {@link InputSplit}.
@@ -37,16 +34,13 @@
  * @see InputSplit
  * @see InputFormat
  */
-public interface RecordReader<K extends WritableComparable,
-                              V extends Writable> {
+public interface RecordReader<K, V> {
   /** 
    * Reads the next key/value pair from the input for processing.
    *
    * @param key the key to read data into
    * @param value the value to read data into
    * @return true iff a key/value was read, false if at EOF
-   *
-   * @see Writable#readFields(DataInput)
    */      
   boolean next(K key, V value) throws IOException;
   

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/RecordWriter.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/RecordWriter.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/RecordWriter.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/RecordWriter.java Thu Feb 28 09:46:49
2008
@@ -19,11 +19,8 @@
 package org.apache.hadoop.mapred;
 
 import java.io.IOException;
-import java.io.DataOutput;
 
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.io.Writable;
 
 /**
  * <code>RecordWriter</code> writes the output &lt;key, value&gt; pairs

@@ -34,15 +31,13 @@
  * 
  * @see OutputFormat
  */
-public interface RecordWriter<K extends WritableComparable,
-                              V extends Writable> {
+public interface RecordWriter<K, V> {
   /** 
    * Writes a key/value pair.
    *
    * @param key the key to write.
    * @param value the value to write.
    * @throws IOException
-   * @see Writable#write(DataOutput)
    */      
   void write(K key, V value) throws IOException;
 

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java Thu Feb 28 09:46:49
2008
@@ -49,13 +49,15 @@
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
+import org.apache.hadoop.io.InputBuffer;
 import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.WritableFactories;
 import org.apache.hadoop.io.WritableFactory;
+import org.apache.hadoop.io.serializer.Deserializer;
+import org.apache.hadoop.io.serializer.SerializationFactory;
 import org.apache.hadoop.metrics.MetricsContext;
 import org.apache.hadoop.metrics.MetricsRecord;
 import org.apache.hadoop.metrics.MetricsUtil;
@@ -140,30 +142,32 @@
   /** Iterates values while keys match in sorted input. */
   static class ValuesIterator implements Iterator {
     private SequenceFile.Sorter.RawKeyValueIterator in; //input iterator
-    private WritableComparable key;               // current key
-    private Writable value;                       // current value
+    private Object key;               // current key
+    private Object value;                       // current value
     private boolean hasNext;                      // more w/ this key
     private boolean more;                         // more in file
-    private WritableComparator comparator;
-    private Class keyClass;
-    private Class valClass;
-    private Configuration conf;
+    private RawComparator comparator;
     private DataOutputBuffer valOut = new DataOutputBuffer();
-    private DataInputBuffer valIn = new DataInputBuffer();
-    private DataInputBuffer keyIn = new DataInputBuffer();
+    private InputBuffer valIn = new InputBuffer();
+    private InputBuffer keyIn = new InputBuffer();
     protected Reporter reporter;
+    private Deserializer keyDeserializer;
+    private Deserializer valDeserializer;
 
+    @SuppressWarnings("unchecked")
     public ValuesIterator (SequenceFile.Sorter.RawKeyValueIterator in, 
-                           WritableComparator comparator, Class keyClass,
+                           RawComparator comparator, Class keyClass,
                            Class valClass, Configuration conf, 
                            Reporter reporter)
       throws IOException {
       this.in = in;
-      this.conf = conf;
       this.comparator = comparator;
-      this.keyClass = keyClass;
-      this.valClass = valClass;
       this.reporter = reporter;
+      SerializationFactory serializationFactory = new SerializationFactory(conf);
+      this.keyDeserializer = serializationFactory.getDeserializer(keyClass);
+      this.keyDeserializer.open(keyIn);
+      this.valDeserializer = serializationFactory.getDeserializer(valClass);
+      this.valDeserializer.open(valIn);
       getNext();
     }
 
@@ -196,25 +200,20 @@
     public boolean more() { return more; }
 
     /** The current key. */
-    public WritableComparable getKey() { return key; }
+    public Object getKey() { return key; }
 
+    @SuppressWarnings("unchecked")
     private void getNext() throws IOException {
-      Writable lastKey = key;                     // save previous key
-      try {
-        key = (WritableComparable)ReflectionUtils.newInstance(keyClass, this.conf);
-        value = (Writable)ReflectionUtils.newInstance(valClass, this.conf);
-      } catch (Exception e) {
-        throw new RuntimeException(e);
-      }
+      Object lastKey = key;                     // save previous key
       more = in.next();
       if (more) {
         //de-serialize the raw key/value
         keyIn.reset(in.getKey().getData(), in.getKey().getLength());
-        key.readFields(keyIn);
+        key = keyDeserializer.deserialize(null); // force new object
         valOut.reset();
         (in.getValue()).writeUncompressedBytes(valOut);
         valIn.reset(valOut.getData(), valOut.getLength());
-        value.readFields(valIn);
+        value = valDeserializer.deserialize(null); // force new object
 
         if (lastKey == null) {
           hasNext = true;
@@ -228,7 +227,7 @@
   }
   private class ReduceValuesIterator extends ValuesIterator {
     public ReduceValuesIterator (SequenceFile.Sorter.RawKeyValueIterator in,
-                                 WritableComparator comparator, Class keyClass,
+                                 RawComparator comparator, Class keyClass,
                                  Class valClass,
                                  Configuration conf, Reporter reporter)
       throws IOException {
@@ -293,7 +292,8 @@
     
     // sort the input file
     SequenceFile.Sorter sorter = new SequenceFile.Sorter(lfs, 
-        job.getOutputKeyComparator(), job.getMapOutputValueClass(), job);
+        job.getOutputKeyComparator(), job.getMapOutputKeyClass(),
+        job.getMapOutputValueClass(), job);
     sorter.setProgressable(reporter);
     rIter = sorter.merge(mapFiles, tempDir, 
         !conf.getKeepFailedTaskFiles()); // sort
@@ -310,7 +310,7 @@
     
     OutputCollector collector = new OutputCollector() {
         @SuppressWarnings("unchecked")
-        public void collect(WritableComparable key, Writable value)
+        public void collect(Object key, Object value)
           throws IOException {
           out.write(key, value);
           reduceOutputCounter.increment(1);
@@ -887,7 +887,7 @@
       //create an instance of the sorter
       sorter =
         new SequenceFile.Sorter(inMemFileSys, conf.getOutputKeyComparator(), 
-                                conf.getMapOutputValueClass(), conf);
+            conf.getMapOutputKeyClass(), conf.getMapOutputValueClass(), conf);
       sorter.setProgressable(getReporter(umbilical));
       
       // hosts -> next contact time

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/Reducer.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/Reducer.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/Reducer.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/Reducer.java Thu Feb 28 09:46:49 2008
@@ -24,8 +24,6 @@
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.io.Closeable;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
 
 /** 
  * Reduces a set of intermediate values which share a key to a smaller set of
@@ -89,12 +87,12 @@
  *   <h4 id="Reduce">Reduce</h4>
  *   
  *   <p>In this phase the 
- *   {@link #reduce(WritableComparable, Iterator, OutputCollector, Reporter)}
+ *   {@link #reduce(Object, Iterator, OutputCollector, Reporter)}
  *   method is called for each <code>&lt;key, (list of values)></code>
pair in
  *   the grouped inputs.</p>
  *   <p>The output of the reduce task is typically written to the 
  *   {@link FileSystem} via 
- *   {@link OutputCollector#collect(WritableComparable, Writable)}.</p>
+ *   {@link OutputCollector#collect(Object, Object)}.</p>
  *   </li>
  * </ol>
  * 
@@ -163,9 +161,7 @@
  * @see Reporter
  * @see MapReduceBase
  */
-public interface Reducer<K2 extends WritableComparable, V2 extends Writable,
-                         K3 extends WritableComparable, V3 extends Writable>
-    extends JobConfigurable, Closeable {
+public interface Reducer<K2, V2, K3, V3> extends JobConfigurable, Closeable {
   
   /** 
    * <i>Reduces</i> values for a given key.  
@@ -177,7 +173,7 @@
    * </p>
    *   
    * <p>Output pairs are collected with calls to  
-   * {@link OutputCollector#collect(WritableComparable,Writable)}.</p>
+   * {@link OutputCollector#collect(Object,Object)}.</p>
    *
    * <p>Applications can use the {@link Reporter} provided to report progress 
    * or just indicate that they are alive. In scenarios where the application 

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/SequenceFileOutputFormat.java Thu
Feb 28 09:46:49 2008
@@ -65,7 +65,7 @@
                                 codec,
                                 progress);
 
-    return new RecordWriter() {
+    return new RecordWriter<WritableComparable, Writable>() {
 
         public void write(WritableComparable key, Writable value)
           throws IOException {

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/TextOutputFormat.java Thu Feb 28 09:46:49
2008
@@ -28,19 +28,14 @@
 
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.GzipCodec;
 import org.apache.hadoop.util.*;
 
 /** An {@link OutputFormat} that writes plain text files. */
-public class TextOutputFormat<K extends WritableComparable,
-                              V extends Writable>
-  extends OutputFormatBase<K, V> {
+public class TextOutputFormat<K, V> extends OutputFormatBase<K, V> {
 
-  protected static class LineRecordWriter<K extends WritableComparable,
-                                          V extends Writable>
+  protected static class LineRecordWriter<K, V>
     implements RecordWriter<K, V> {
     private static final String utf8 = "UTF-8";
     private static final byte[] tab;

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java Thu
Feb 28 09:46:49 2008
@@ -25,8 +25,6 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
@@ -63,8 +61,7 @@
  * the key is never ignored.
  * 
  */
-public class FieldSelectionMapReduce<K extends WritableComparable,
-                                     V extends Writable>
+public class FieldSelectionMapReduce<K, V>
     implements Mapper<K, V, Text, Text>, Reducer<Text, Text, Text, Text> {
 
   private String mapOutputKeyValueSpec;

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/HashPartitioner.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/HashPartitioner.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/HashPartitioner.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/HashPartitioner.java Thu Feb 28
09:46:49 2008
@@ -21,13 +21,8 @@
 import org.apache.hadoop.mapred.Partitioner;
 import org.apache.hadoop.mapred.JobConf;
 
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.io.Writable;
-
 /** Partition keys by their {@link Object#hashCode()}. */
-public class HashPartitioner<K2 extends WritableComparable,
-                             V2 extends Writable>
-    implements Partitioner<K2, V2> {
+public class HashPartitioner<K2, V2> implements Partitioner<K2, V2> {
 
   public void configure(JobConf job) {}
 

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityMapper.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityMapper.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityMapper.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityMapper.java Thu Feb 28
09:46:49 2008
@@ -25,11 +25,8 @@
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.MapReduceBase;
 
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
-
 /** Implements the identity function, mapping inputs directly to outputs. */
-public class IdentityMapper<K extends WritableComparable, V extends Writable>
+public class IdentityMapper<K, V>
     extends MapReduceBase implements Mapper<K, V, K, V> {
 
   /** The identify function.  Input key/value pair is written directly to

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityReducer.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityReducer.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityReducer.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityReducer.java Thu Feb 28
09:46:49 2008
@@ -27,11 +27,8 @@
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.MapReduceBase;
 
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
-
 /** Performs no reduction, writing all input values directly to the output. */
-public class IdentityReducer<K extends WritableComparable, V extends Writable>
+public class IdentityReducer<K, V>
     extends MapReduceBase implements Reducer<K, V, K, V> {
 
   /** Writes all keys and values directly to output. */

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/InverseMapper.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/InverseMapper.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/InverseMapper.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/InverseMapper.java Thu Feb 28
09:46:49 2008
@@ -20,16 +20,13 @@
 
 import java.io.IOException;
 
-import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.MapReduceBase;
 import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
 
-
 /** A {@link Mapper} that swaps keys and values. */
-public class InverseMapper<K extends WritableComparable,
-                           V extends WritableComparable>
+public class InverseMapper<K, V>
     extends MapReduceBase implements Mapper<K, V, V, K> {
 
   /** The inverse function.  Input keys and values are swapped.*/

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/KeyFieldBasedPartitioner.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/KeyFieldBasedPartitioner.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/KeyFieldBasedPartitioner.java
(original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/KeyFieldBasedPartitioner.java
Thu Feb 28 09:46:49 2008
@@ -18,14 +18,10 @@
 
 package org.apache.hadoop.mapred.lib;
 
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Partitioner;
 
-public class KeyFieldBasedPartitioner<K2 extends WritableComparable,
-                                      V2 extends Writable>
-    implements Partitioner<K2, V2> {
+public class KeyFieldBasedPartitioner<K2, V2> implements Partitioner<K2, V2>
{
 
   private int numOfPartitionFields;
 

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/LongSumReducer.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/LongSumReducer.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/LongSumReducer.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/LongSumReducer.java Thu Feb 28
09:46:49 2008
@@ -26,12 +26,10 @@
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.MapReduceBase;
 
-import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.LongWritable;
 
 /** A {@link Reducer} that sums long values. */
-public class LongSumReducer<K extends WritableComparable>
-    extends MapReduceBase
+public class LongSumReducer<K> extends MapReduceBase
     implements Reducer<K, LongWritable, K, LongWritable> {
 
   public void reduce(K key, Iterator<LongWritable> values,

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/MultithreadedMapRunner.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/MultithreadedMapRunner.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/MultithreadedMapRunner.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/MultithreadedMapRunner.java Thu
Feb 28 09:46:49 2008
@@ -50,10 +50,7 @@
  * <b>mapred.map.multithreadedrunner.threads</b> property).
  * <p>
  */
-public class MultithreadedMapRunner<K1 extends WritableComparable,
-                                    V1 extends Writable,
-                                    K2 extends WritableComparable,
-                                    V2 extends Writable>
+public class MultithreadedMapRunner<K1, V1, K2, V2>
     implements MapRunnable<K1, V1, K2, V2> {
 
   private static final Log LOG =

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/NullOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/NullOutputFormat.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/NullOutputFormat.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/NullOutputFormat.java Thu Feb
28 09:46:49 2008
@@ -19,8 +19,6 @@
 package org.apache.hadoop.mapred.lib;
 
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputFormat;
 import org.apache.hadoop.mapred.RecordWriter;
@@ -30,9 +28,7 @@
 /**
  * Consume all outputs and put them in /dev/null. 
  */
-public class NullOutputFormat<K extends WritableComparable,
-                              V extends Writable>
-  implements OutputFormat<K, V> {
+public class NullOutputFormat<K, V> implements OutputFormat<K, V> {
   
   public RecordWriter<K, V> getRecordWriter(FileSystem ignored, JobConf job, 
                                       String name, Progressable progress) {

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/RegexMapper.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/RegexMapper.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/RegexMapper.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/RegexMapper.java Thu Feb 28 09:46:49
2008
@@ -24,7 +24,6 @@
 
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MapReduceBase;
 import org.apache.hadoop.mapred.Mapper;
@@ -33,8 +32,7 @@
 
 
 /** A {@link Mapper} that extracts text matching a regular expression. */
-public class RegexMapper<K extends WritableComparable>
-    extends MapReduceBase
+public class RegexMapper<K> extends MapReduceBase
     implements Mapper<K, Text, Text, LongWritable> {
 
   private Pattern pattern;

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/TokenCountMapper.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/TokenCountMapper.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/TokenCountMapper.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/lib/TokenCountMapper.java Thu Feb
28 09:46:49 2008
@@ -23,7 +23,6 @@
 
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.MapReduceBase;
 import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
@@ -32,8 +31,7 @@
 
 /** A {@link Mapper} that maps text values into <token,freq> pairs.  Uses
  * {@link StringTokenizer} to break text into tokens. */
-public class TokenCountMapper<K extends WritableComparable>
-    extends MapReduceBase
+public class TokenCountMapper<K> extends MapReduceBase
     implements Mapper<K, Text, Text, LongWritable> {
 
   public void map(K key, Text value,

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/mapred/package.html
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/mapred/package.html?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/mapred/package.html (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/mapred/package.html Thu Feb 28 09:46:49 2008
@@ -81,9 +81,9 @@
 </pre>
 
 <p>Applications typically implement 
-{@link org.apache.hadoop.mapred.Mapper#map(WritableComparable, Writable, OutputCollector,
Reporter)} 
+{@link org.apache.hadoop.mapred.Mapper#map(Object, Object, OutputCollector, Reporter)} 
 and
-{@link org.apache.hadoop.mapred.Reducer#reduce(WritableComparable, Iterator, OutputCollector,
Reporter)} 
+{@link org.apache.hadoop.mapred.Reducer#reduce(Object, Iterator, OutputCollector, Reporter)}

 methods.  The application-writer also specifies various facets of the job such
 as input and output locations, the <tt>Partitioner</tt>, <tt>InputFormat</tt>

 &amp; <tt>OutputFormat</tt> implementations to be used etc. as 
@@ -94,7 +94,7 @@
 <p>The framework spawns one map task per 
 {@link org.apache.hadoop.mapred.InputSplit} generated by the 
 {@link org.apache.hadoop.mapred.InputFormat} of the job and calls 
-{@link org.apache.hadoop.mapred.Mapper#map(WritableComparable, Writable, OutputCollector,
Reporter)} 
+{@link org.apache.hadoop.mapred.Mapper#map(Object, Object, OutputCollector, Reporter)} 
 with each &lt;key, value&gt; pair read by the 
 {@link org.apache.hadoop.mapred.RecordReader} from the <tt>InputSplit</tt> for

 the task. The intermediate outputs of the maps are then grouped by <tt>key</tt>s
@@ -104,7 +104,7 @@
 
 <p>The reduce tasks fetch the sorted intermediate outputs of the maps, via http, 
 merge the &lt;key, value&gt; pairs and call 
-{@link org.apache.hadoop.mapred.Reducer#reduce(WritableComparable, Iterator, OutputCollector,
Reporter)} 
+{@link org.apache.hadoop.mapred.Reducer#reduce(Object, Iterator, OutputCollector, Reporter)}

 for each &lt;key, list of values&gt; pair. The output of the reduce tasks' is 
 stored on the <tt>FileSystem</tt> by the 
 {@link org.apache.hadoop.mapred.RecordWriter} provided by the
@@ -117,7 +117,7 @@
   // <i>map: Search for the pattern specified by 'grep.mapper.regex' &amp;</i>
   //      <i>'grep.mapper.regex.group'</i>
 
-  class GrepMapper&lt;K extends WritableComparable, Text&gt; 
+  class GrepMapper&lt;K, Text&gt; 
   extends MapReduceBase  implements Mapper&lt;K, Text, Text, LongWritable&gt; {
 
     private Pattern pattern;
@@ -142,7 +142,7 @@
 
   // <i>reduce: Count the number of occurrences of the pattern</i>
 
-  class GrepReducer&lt;K extends WritableComparable&gt; extends MapReduceBase
+  class GrepReducer&lt;K&gt; extends MapReduceBase
   implements Reducer&lt;K, LongWritable, K, LongWritable&gt; {
 
     public void reduce(K key, Iterator&lt;LongWritable&gt; values,

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/util/CopyFiles.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/util/CopyFiles.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/util/CopyFiles.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/util/CopyFiles.java Thu Feb 28 09:46:49 2008
@@ -818,7 +818,7 @@
     SequenceFile.Reader in = null;
     try {
       SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs,
-        new Text.Comparator(), Text.class, conf);
+        new Text.Comparator(), Text.class, Text.class, conf);
       sorter.sort(file, sorted);
       in = new SequenceFile.Reader(fs, sorted, conf);
 

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java Thu Feb 28 09:46:49
2008
@@ -319,7 +319,9 @@
   }
 
 
-  public static class SeekMapper extends Configured implements Mapper {
+  public static class SeekMapper<K> extends Configured
+    implements Mapper<WritableComparable, LongWritable, K, LongWritable> {
+    
     private Random random = new Random();
     private byte[] check  = new byte[BUFFER_SIZE];
     private FileSystem fs;
@@ -342,11 +344,12 @@
       fastCheck = job.getBoolean("fs.test.fastCheck", false);
     }
 
-    public void map(WritableComparable key, Writable value,
-                    OutputCollector collector, Reporter reporter)
+    public void map(WritableComparable key, LongWritable value,
+                    OutputCollector<K, LongWritable> collector,
+                    Reporter reporter)
       throws IOException {
-      String name = ((UTF8)key).toString();
-      long size = ((LongWritable)value).get();
+      String name = key.toString();
+      long size = value.get();
       long seed = Long.parseLong(name);
 
       if (size == 0) return;

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/io/FileBench.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/io/FileBench.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/io/FileBench.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/io/FileBench.java Thu Feb 28 09:46:49 2008
@@ -142,8 +142,8 @@
     RecordReader rr = inf.getRecordReader(
         new FileSplit(pin, 0, in.getLen(), conf), conf, Reporter.NULL);
     try {
-      WritableComparable key = rr.createKey();
-      Writable val = rr.createValue();
+      Object key = rr.createKey();
+      Object val = rr.createValue();
       Date start = new Date();
       while (rr.next(key, val));
       Date end = new Date();

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java Thu Feb 28 09:46:49
2008
@@ -312,7 +312,8 @@
                                                int megabytes, int factor) {
     SequenceFile.Sorter sorter = 
       fast
-      ? new SequenceFile.Sorter(fs, new RandomDatum.Comparator(), RandomDatum.class, conf)
+      ? new SequenceFile.Sorter(fs, new RandomDatum.Comparator(),
+                                RandomDatum.class, RandomDatum.class, conf)
       : new SequenceFile.Sorter(fs, RandomDatum.class, RandomDatum.class, conf);
     sorter.setMemory(megabytes * 1024*1024);
     sorter.setFactor(factor);

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRLocalFS.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRLocalFS.java?rev=632073&r1=632072&r2=632073&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRLocalFS.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRLocalFS.java Thu Feb 28
09:46:49 2008
@@ -255,15 +255,14 @@
   }
 
   static class MyOutputFormat implements OutputFormat {
-    static class MyRecordWriter implements RecordWriter {
+    static class MyRecordWriter implements RecordWriter<Object, Object> {
       private DataOutputStream out;
       
       public MyRecordWriter(Path outputFile, JobConf job) throws IOException {
         out = outputFile.getFileSystem(job).create(outputFile);
       }
       
-      public void write(WritableComparable key, 
-                        Writable value) throws IOException {
+      public void write(Object key, Object value) throws IOException {
         out.writeBytes(key.toString() + "\t" + value.toString() + "\n");
       }
 



Mime
View raw message