hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From acmur...@apache.org
Subject svn commit: r946526 [5/6] - in /hadoop/mapreduce/trunk: ./ src/java/org/apache/hadoop/filecache/ src/java/org/apache/hadoop/mapred/ src/java/org/apache/hadoop/mapred/jobcontrol/ src/java/org/apache/hadoop/mapred/join/ src/java/org/apache/hadoop/mapred/...
Date Thu, 20 May 2010 05:03:29 GMT
Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionMapper.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionMapper.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionMapper.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionMapper.java Thu May 20 05:03:20 2010
@@ -24,6 +24,8 @@ import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Mapper;
@@ -54,6 +56,8 @@ import org.apache.hadoop.mapreduce.lib.i
  * Here is an example: "4,3,0,1:6,5,1-3,7-". It specifies to use fields
  * 4,3,0 and 1 for keys, and use fields 6,5,1,2,3,7 and above for values.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class FieldSelectionMapper<K, V>
     extends Mapper<K, V, Text, Text> {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionReducer.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionReducer.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionReducer.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionReducer.java Thu May 20 05:03:20 2010
@@ -24,6 +24,8 @@ import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Reducer;
@@ -53,6 +55,8 @@ import org.apache.hadoop.mapreduce.Reduc
  * Here is an example: "4,3,0,1:6,5,1-3,7-". It specifies to use fields
  * 4,3,0 and 1 for keys, and use fields 6,5,1,2,3,7 and above for values.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class FieldSelectionReducer<K, V>
     extends Reducer<Text, Text, Text, Text> {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java Thu May 20 05:03:20 2010
@@ -28,6 +28,8 @@ import java.util.Set;
 import java.util.Iterator;
 import java.util.Map;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
@@ -66,6 +68,8 @@ import org.apache.hadoop.net.NetworkTopo
  * 
  * @see CombineFileSplit
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public abstract class CombineFileInputFormat<K, V>
   extends FileInputFormat<K, V> {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileRecordReader.java Thu May 20 05:03:20 2010
@@ -24,6 +24,8 @@ import java.lang.reflect.*;
 import org.apache.hadoop.fs.FileSystem;
 
 import org.apache.hadoop.mapreduce.*;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 
 /**
@@ -34,7 +36,8 @@ import org.apache.hadoop.conf.Configurat
  * these data chunks from different files.
  * @see CombineFileSplit
  */
-
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class CombineFileRecordReader<K, V> extends RecordReader<K, V> {
 
   static final Class [] constructorSignature = new Class [] 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileSplit.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileSplit.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileSplit.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/CombineFileSplit.java Thu May 20 05:03:20 2010
@@ -22,6 +22,8 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -41,6 +43,8 @@ import org.apache.hadoop.mapreduce.Recor
  * @see FileSplit
  * @see CombineFileInputFormat 
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class CombineFileSplit extends InputSplit implements Writable {
 
   private Path[] paths;

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingInputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingInputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingInputFormat.java Thu May 20 05:03:20 2010
@@ -26,6 +26,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.InputFormat;
@@ -43,6 +45,8 @@ import org.apache.hadoop.util.Reflection
  * 
  * @see MultipleInputs#addInputPath(Job, Path, Class, Class)
  */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
 public class DelegatingInputFormat<K, V> extends InputFormat<K, V> {
 
   @SuppressWarnings("unchecked")

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingMapper.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingMapper.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingMapper.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingMapper.java Thu May 20 05:03:20 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -30,6 +32,8 @@ import org.apache.hadoop.util.Reflection
  * 
  * @see MultipleInputs#addInputPath(Job, Path, Class, Class)
  */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
 public class DelegatingMapper<K1, V1, K2, V2> extends Mapper<K1, V1, K2, V2> {
 
   private Mapper<K1, V1, K2, V2> mapper;

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingRecordReader.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingRecordReader.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/DelegatingRecordReader.java Thu May 20 05:03:20 2010
@@ -19,6 +19,8 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.RecordReader;
@@ -29,6 +31,8 @@ import org.apache.hadoop.util.Reflection
  * This is a delegating RecordReader, which delegates the functionality to the
  * underlying record reader in {@link TaggedInputSplit}  
  */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
 public class DelegatingRecordReader<K, V> extends RecordReader<K, V> {
   RecordReader<K, V> originalRR;
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java Thu May 20 05:03:20 2010
@@ -24,6 +24,8 @@ import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -49,6 +51,8 @@ import org.apache.hadoop.util.StringUtil
  * {@link #isSplitable(JobContext, Path)} method to ensure input-files are
  * not split-up and are processed as a whole by {@link Mapper}s.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public abstract class FileInputFormat<K, V> extends InputFormat<K, V> {
   public static final String COUNTER_GROUP = 
                                 "FileInputFormatCounters";

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/FileSplit.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/FileSplit.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/FileSplit.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/FileSplit.java Thu May 20 05:03:20 2010
@@ -25,6 +25,8 @@ import java.io.DataOutput;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -32,6 +34,8 @@ import org.apache.hadoop.io.Writable;
 /** A section of an input file.  Returned by {@link
  * InputFormat#getSplits(JobContext)} and passed to
  * {@link InputFormat#createRecordReader(InputSplit,TaskAttemptContext)}. */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class FileSplit extends InputSplit implements Writable {
   private Path file;
   private long start;

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/InvalidInputException.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/InvalidInputException.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/InvalidInputException.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/InvalidInputException.java Thu May 20 05:03:20 2010
@@ -21,11 +21,16 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Iterator;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * This class wraps a list of problems with the input, so that the user
  * can get a list of problems together instead of finding and fixing them one 
  * by one.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class InvalidInputException extends IOException {
   private static final long serialVersionUID = -380668190578456802L;
   private List<IOException> problems;

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/KeyValueLineRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/KeyValueLineRecordReader.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/KeyValueLineRecordReader.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/KeyValueLineRecordReader.java Thu May 20 05:03:20 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.InputSplit;
@@ -32,6 +34,8 @@ import org.apache.hadoop.mapreduce.TaskA
  * under the attribute name mapreduce.input.keyvaluelinerecordreader.key.value.separator. The default
  * separator is the tab character ('\t').
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class KeyValueLineRecordReader extends RecordReader<Text, Text> {
   public static final String KEY_VALUE_SEPERATOR = 
     "mapreduce.input.keyvaluelinerecordreader.key.value.separator";

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/KeyValueTextInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/KeyValueTextInputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/KeyValueTextInputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/KeyValueTextInputFormat.java Thu May 20 05:03:20 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.compress.CompressionCodec;
@@ -37,6 +39,8 @@ import org.apache.hadoop.mapreduce.TaskA
  * Each line is divided into key and value parts by a separator byte. If no
  * such a byte exists, the key will be the entire line and value will be empty.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class KeyValueTextInputFormat extends FileInputFormat<Text, Text> {
 
   @Override

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/LineRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/LineRecordReader.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/LineRecordReader.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/LineRecordReader.java Thu May 20 05:03:20 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -46,6 +48,8 @@ import org.apache.hadoop.fs.Seekable;
 /**
  * Treats keys as offset in file and value as line. 
  */
+@InterfaceAudience.LimitedPrivate({"MapReduce", "Pig"})
+@InterfaceStability.Evolving
 public class LineRecordReader extends RecordReader<LongWritable, Text> {
   private static final Log LOG = LogFactory.getLog(LineRecordReader.class);
   public static final String MAX_LINE_LENGTH = 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/MultipleInputs.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/MultipleInputs.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/MultipleInputs.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/MultipleInputs.java Thu May 20 05:03:20 2010
@@ -21,6 +21,8 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.InputFormat;
@@ -33,6 +35,8 @@ import org.apache.hadoop.util.Reflection
  * This class supports MapReduce jobs that have multiple input paths with
  * a different {@link InputFormat} and {@link Mapper} for each path 
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class MultipleInputs {
   public static final String DIR_FORMATS = 
     "mapreduce.input.multipleinputs.dir.formats";

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/NLineInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/NLineInputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/NLineInputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/NLineInputFormat.java Thu May 20 05:03:20 2010
@@ -22,6 +22,8 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileStatus;
@@ -54,7 +56,8 @@ import org.apache.hadoop.util.LineReader
  * i.e. (k,v) is (LongWritable, Text).
  * The location hints will span the whole mapred cluster.
  */
-
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class NLineInputFormat extends FileInputFormat<LongWritable, Text> { 
   public static final String LINES_PER_MAP = 
     "mapreduce.input.lineinputformat.linespermap";

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsBinaryInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsBinaryInputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsBinaryInputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsBinaryInputFormat.java Thu May 20 05:03:20 2010
@@ -19,6 +19,8 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -33,6 +35,8 @@ import org.apache.hadoop.mapreduce.TaskA
  * InputFormat reading keys, values from SequenceFiles in binary (raw)
  * format.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class SequenceFileAsBinaryInputFormat
     extends SequenceFileInputFormat<BytesWritable,BytesWritable> {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsTextInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsTextInputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsTextInputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsTextInputFormat.java Thu May 20 05:03:20 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.RecordReader;
@@ -30,6 +32,8 @@ import org.apache.hadoop.mapreduce.TaskA
  * SequenceFileAsTextRecordReader which converts the input keys and values
  * to their String forms by calling toString() method. 
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class SequenceFileAsTextInputFormat
   extends SequenceFileInputFormat<Text, Text> {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsTextRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsTextRecordReader.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsTextRecordReader.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileAsTextRecordReader.java Thu May 20 05:03:20 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
@@ -32,6 +34,8 @@ import org.apache.hadoop.mapreduce.TaskA
  * calling toString() method. This class to SequenceFileAsTextInputFormat
  * class is as LineRecordReader class to TextInputFormat class.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class SequenceFileAsTextRecordReader
   extends RecordReader<Text, Text> {
   

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileInputFilter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileInputFilter.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileInputFilter.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileInputFilter.java Thu May 20 05:03:20 2010
@@ -28,6 +28,8 @@ import java.util.regex.PatternSyntaxExce
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.BytesWritable;
@@ -42,6 +44,8 @@ import org.apache.hadoop.util.Reflection
  * A class that allows a map/red job to work on a sample of sequence files.
  * The sample is decided by the filter class set by the job.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class SequenceFileInputFilter<K, V>
     extends SequenceFileInputFormat<K, V> {
   public static final Log LOG = LogFactory.getLog(FileInputFormat.class);

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileInputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileInputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileInputFormat.java Thu May 20 05:03:20 2010
@@ -21,6 +21,8 @@ package org.apache.hadoop.mapreduce.lib.
 import java.io.IOException;
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -34,6 +36,8 @@ import org.apache.hadoop.mapreduce.Recor
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 
 /** An {@link InputFormat} for {@link SequenceFile}s. */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class SequenceFileInputFormat<K, V> extends FileInputFormat<K, V> {
 
   @Override

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileRecordReader.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileRecordReader.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileRecordReader.java Thu May 20 05:03:20 2010
@@ -21,6 +21,8 @@ package org.apache.hadoop.mapreduce.lib.
 import java.io.IOException;
 
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -32,6 +34,8 @@ import org.apache.hadoop.mapreduce.Recor
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 
 /** An {@link RecordReader} for {@link SequenceFile}s. */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class SequenceFileRecordReader<K, V> extends RecordReader<K, V> {
   private SequenceFile.Reader in;
   private long start;

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java Thu May 20 05:03:20 2010
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.mapreduce.lib.input;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
@@ -33,6 +35,8 @@ import org.apache.hadoop.mapreduce.TaskA
 /** An {@link InputFormat} for plain text files.  Files are broken into lines.
  * Either linefeed or carriage-return are used to signal end of line.  Keys are
  * the position in the file, and values are the line of text.. */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class TextInputFormat extends FileInputFormat<LongWritable, Text> {
 
   @Override

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/jobcontrol/ControlledJob.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/jobcontrol/ControlledJob.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/jobcontrol/ControlledJob.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/jobcontrol/ControlledJob.java Thu May 20 05:03:20 2010
@@ -23,6 +23,8 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -42,7 +44,8 @@ import org.apache.hadoop.util.StringUtil
  *  can get into SUCCESS or FAILED state, depending 
  *  the status of the job execution.
  */
-
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class ControlledJob {
 
   // A job will be in one of the following states

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/jobcontrol/JobControl.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/jobcontrol/JobControl.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/jobcontrol/JobControl.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/jobcontrol/JobControl.java Thu May 20 05:03:20 2010
@@ -25,6 +25,8 @@ import java.util.Hashtable;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob.State;
 
 /** 
@@ -44,6 +46,8 @@ import org.apache.hadoop.mapreduce.lib.j
  *  for stopping the thread.
  *  
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class JobControl implements Runnable {
 
   // The thread can be in one of the following state

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/ArrayListBackedIterator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/ArrayListBackedIterator.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/ArrayListBackedIterator.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/ArrayListBackedIterator.java Thu May 20 05:03:20 2010
@@ -21,6 +21,8 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Iterator;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
@@ -32,6 +34,8 @@ import org.apache.hadoop.util.Reflection
  * added to it, replaying them as requested.
  * Prefer {@link StreamBackedIterator}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class ArrayListBackedIterator<X extends Writable>
     implements ResetableIterator<X> {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/ComposableInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/ComposableInputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/ComposableInputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/ComposableInputFormat.java Thu May 20 05:03:20 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapreduce.InputFormat;
@@ -30,6 +32,8 @@ import org.apache.hadoop.mapreduce.TaskA
  * Refinement of InputFormat requiring implementors to provide
  * ComposableRecordReader instead of RecordReader.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public abstract class ComposableInputFormat<K extends WritableComparable<?>,
                                             V extends Writable>
     extends InputFormat<K,V> {

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/ComposableRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/ComposableRecordReader.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/ComposableRecordReader.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/ComposableRecordReader.java Thu May 20 05:03:20 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapreduce.RecordReader;
@@ -27,6 +29,8 @@ import org.apache.hadoop.mapreduce.Recor
 /**
  * Additional operations required of a RecordReader to participate in a join.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public abstract class ComposableRecordReader<K extends WritableComparable<?>,
                                              V extends Writable>
     extends RecordReader<K,V>

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeInputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeInputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeInputFormat.java Thu May 20 05:03:20 2010
@@ -25,6 +25,8 @@ import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.WritableComparable;
@@ -49,6 +51,8 @@ import org.apache.hadoop.mapreduce.TaskA
  * @see MultiFilterRecordReader
  */
 @SuppressWarnings("unchecked")
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class CompositeInputFormat<K extends WritableComparable>
     extends InputFormat<K, TupleWritable> {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeInputSplit.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeInputSplit.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeInputSplit.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeInputSplit.java Thu May 20 05:03:20 2010
@@ -25,6 +25,8 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.HashSet;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -37,6 +39,8 @@ import org.apache.hadoop.util.Reflection
  * This InputSplit contains a set of child InputSplits. Any InputSplit inserted
  * into this collection must have a public default constructor.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class CompositeInputSplit extends InputSplit implements Writable {
 
   private int fill = 0;

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeRecordReader.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeRecordReader.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/CompositeRecordReader.java Thu May 20 05:03:20 2010
@@ -23,6 +23,8 @@ import java.util.ArrayList;
 import java.util.Comparator;
 import java.util.PriorityQueue;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.NullWritable;
@@ -38,6 +40,8 @@ import org.apache.hadoop.util.Reflection
  * A RecordReader that can effect joins of RecordReaders sharing a common key
  * type and partitioning.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public abstract class CompositeRecordReader<
     K extends WritableComparable<?>, // key type
     V extends Writable,  // accepts RecordReader<K,V> as children

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/InnerJoinRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/InnerJoinRecordReader.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/InnerJoinRecordReader.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/InnerJoinRecordReader.java Thu May 20 05:03:20 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableComparator;
@@ -27,6 +29,8 @@ import org.apache.hadoop.io.WritableComp
 /**
  * Full inner join.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class InnerJoinRecordReader<K extends WritableComparable<?>>
     extends JoinRecordReader<K> {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/JoinRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/JoinRecordReader.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/JoinRecordReader.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/JoinRecordReader.java Thu May 20 05:03:20 2010
@@ -21,6 +21,8 @@ package org.apache.hadoop.mapreduce.lib.
 import java.io.IOException;
 import java.util.PriorityQueue;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
@@ -30,6 +32,8 @@ import org.apache.hadoop.util.Reflection
 /**
  * Base class for Composite joins returning Tuples of arbitrary Writables.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public abstract class JoinRecordReader<K extends WritableComparable<?>>
     extends CompositeRecordReader<K,Writable,TupleWritable> {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/MultiFilterRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/MultiFilterRecordReader.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/MultiFilterRecordReader.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/MultiFilterRecordReader.java Thu May 20 05:03:20 2010
@@ -21,6 +21,8 @@ package org.apache.hadoop.mapreduce.lib.
 import java.io.IOException;
 import java.util.PriorityQueue;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
@@ -33,6 +35,8 @@ import org.apache.hadoop.util.Reflection
  * Base class for Composite join returning values derived from multiple
  * sources, but generally not tuples.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public abstract class MultiFilterRecordReader<K extends WritableComparable<?>,
                                               V extends Writable>
     extends CompositeRecordReader<K,V,V> {

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/OuterJoinRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/OuterJoinRecordReader.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/OuterJoinRecordReader.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/OuterJoinRecordReader.java Thu May 20 05:03:20 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableComparator;
@@ -27,6 +29,8 @@ import org.apache.hadoop.io.WritableComp
 /**
  * Full outer join.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class OuterJoinRecordReader<K extends WritableComparable<?>>
     extends JoinRecordReader<K> {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/OverrideRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/OverrideRecordReader.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/OverrideRecordReader.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/OverrideRecordReader.java Thu May 20 05:03:20 2010
@@ -22,6 +22,8 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.PriorityQueue;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Writable;
@@ -35,6 +37,8 @@ import org.apache.hadoop.util.Reflection
  * from S3 over S2, and values from S2 over S1 for all keys
  * emitted from all sources.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class OverrideRecordReader<K extends WritableComparable<?>,
                                   V extends Writable>
     extends MultiFilterRecordReader<K,V> {

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/Parser.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/Parser.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/Parser.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/Parser.java Thu May 20 05:03:20 2010
@@ -32,6 +32,8 @@ import java.util.ListIterator;
 import java.util.Map;
 import java.util.Stack;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.mapreduce.InputFormat;
@@ -66,13 +68,19 @@ import org.apache.hadoop.util.Reflection
  * {@link CompositeRecordReader#combine}) and include a property to map its
  * value to an identifier in the parser.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class Parser {
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public enum TType { CIF, IDENT, COMMA, LPAREN, RPAREN, QUOT, NUM, }
 
   /**
    * Tagged-union type for tokens from the join expression.
    * @see Parser.TType
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public static class Token {
 
     private TType type;
@@ -96,6 +104,8 @@ public class Parser {
     }
   }
 
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public static class NumToken extends Token {
     private double num;
     public NumToken(double num) {
@@ -105,6 +115,8 @@ public class Parser {
     public double getNum() { return num; }
   }
 
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public static class NodeToken extends Token {
     private Node node;
     NodeToken(Node node) {
@@ -116,6 +128,8 @@ public class Parser {
     }
   }
 
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public static class StrToken extends Token {
     private String str;
     public StrToken(TType type, String str) {
@@ -175,6 +189,8 @@ public class Parser {
   }
 
 @SuppressWarnings("unchecked")
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public abstract static class Node extends ComposableInputFormat {
     /**
      * Return the node type registered for the particular identifier.

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/ResetableIterator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/ResetableIterator.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/ResetableIterator.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/ResetableIterator.java Thu May 20 05:03:20 2010
@@ -19,6 +19,8 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Writable;
 
 /**
@@ -26,6 +28,8 @@ import org.apache.hadoop.io.Writable;
  * added to it directly.
  * Note that this does not extend {@link java.util.Iterator}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public interface ResetableIterator<T extends Writable> {
 
   public static class EMPTY<U extends Writable>

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/StreamBackedIterator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/StreamBackedIterator.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/StreamBackedIterator.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/StreamBackedIterator.java Thu May 20 05:03:20 2010
@@ -23,12 +23,16 @@ import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Writable;
 
 /**
  * This class provides an implementation of ResetableIterator. This
  * implementation uses a byte array to store elements added to it.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class StreamBackedIterator<X extends Writable>
     implements ResetableIterator<X> {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/TupleWritable.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/TupleWritable.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/TupleWritable.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/TupleWritable.java Thu May 20 05:03:20 2010
@@ -25,6 +25,8 @@ import java.util.BitSet;
 import java.util.Iterator;
 import java.util.NoSuchElementException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -42,6 +44,8 @@ import org.apache.hadoop.io.WritableUtil
  *
  * @see org.apache.hadoop.io.Writable
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class TupleWritable implements Writable, Iterable<Writable> {
 
   protected BitSet written;

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/WrappedRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/WrappedRecordReader.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/WrappedRecordReader.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/join/WrappedRecordReader.java Thu May 20 05:03:20 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Writable;
@@ -37,6 +39,8 @@ import org.apache.hadoop.util.Reflection
  * provided RecordReader and keeps a store of values matching a key when
  * this source is participating in a join.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class WrappedRecordReader<K extends WritableComparable<?>,
     U extends Writable> extends ComposableRecordReader<K,U> {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/InverseMapper.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/InverseMapper.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/InverseMapper.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/InverseMapper.java Thu May 20 05:03:20 2010
@@ -20,9 +20,13 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.mapreduce.Mapper;
 
 /** A {@link Mapper} that swaps keys and values. */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class InverseMapper<K, V> extends Mapper<K,V,V,K> {
 
   /** The inverse function.  Input keys and values are swapped.*/

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/MultithreadedMapper.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/MultithreadedMapper.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/MultithreadedMapper.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/MultithreadedMapper.java Thu May 20 05:03:20 2010
@@ -19,6 +19,8 @@
 package org.apache.hadoop.mapreduce.lib.map;
 
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.Counter;
 import org.apache.hadoop.mapreduce.InputSplit;
@@ -54,6 +56,8 @@ import java.util.List;
  * value is 10 threads.
  * <p>
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class MultithreadedMapper<K1, V1, K2, V2> 
   extends Mapper<K1, V1, K2, V2> {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/RegexMapper.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/RegexMapper.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/RegexMapper.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/RegexMapper.java Thu May 20 05:03:20 2010
@@ -22,6 +22,8 @@ import java.io.IOException;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
@@ -29,6 +31,8 @@ import org.apache.hadoop.mapreduce.Mappe
 
 
 /** A {@link Mapper} that extracts text matching a regular expression. */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class RegexMapper<K> extends Mapper<K, Text, Text, LongWritable> {
 
   public static String PATTERN = "mapreduce.mapper.regex";

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/TokenCounterMapper.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/TokenCounterMapper.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/TokenCounterMapper.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/TokenCounterMapper.java Thu May 20 05:03:20 2010
@@ -21,6 +21,8 @@ package org.apache.hadoop.mapreduce.lib.
 import java.io.IOException;
 import java.util.StringTokenizer;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Mapper;
@@ -28,6 +30,8 @@ import org.apache.hadoop.mapreduce.Mappe
 /**
  * Tokenize the input values and emit each word with a count of 1.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class TokenCounterMapper extends Mapper<Object, Text, Text, IntWritable>{
     
   private final static IntWritable one = new IntWritable(1);

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/WrappedMapper.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/WrappedMapper.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/WrappedMapper.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/map/WrappedMapper.java Thu May 20 05:03:20 2010
@@ -21,6 +21,8 @@ package org.apache.hadoop.mapreduce.lib.
 import java.io.IOException;
 import java.net.URI;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration.IntegerRanges;
 import org.apache.hadoop.fs.Path;
@@ -41,6 +43,8 @@ import org.apache.hadoop.mapreduce.TaskA
  * A {@link Mapper} which wraps a given one to allow custom 
  * {@link Mapper.Context} implementations.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class WrappedMapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT> 
     extends Mapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT> {
   
@@ -54,6 +58,7 @@ public class WrappedMapper<KEYIN, VALUEI
     return new Context(mapContext);
   }
   
+  @InterfaceStability.Evolving
   public class Context 
       extends Mapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java Thu May 20 05:03:20 2010
@@ -23,6 +23,8 @@ import java.net.URI;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -38,6 +40,8 @@ import org.apache.hadoop.util.StringUtil
 /** An {@link OutputCommitter} that commits files specified 
  * in job output directory i.e. ${mapreduce.output.fileoutputformat.outputdir}. 
  **/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class FileOutputCommitter extends OutputCommitter {
 
   private static final Log LOG = LogFactory.getLog(FileOutputCommitter.class);

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java Thu May 20 05:03:20 2010
@@ -21,6 +21,8 @@ package org.apache.hadoop.mapreduce.lib.
 import java.io.IOException;
 import java.text.NumberFormat;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -38,6 +40,8 @@ import org.apache.hadoop.mapreduce.TaskI
 import org.apache.hadoop.mapreduce.security.TokenCache;
 
 /** A base class for {@link OutputFormat}s that read from {@link FileSystem}s.*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public abstract class FileOutputFormat<K, V> extends OutputFormat<K, V> {
 
   /** Construct output file names so that, when an output directory listing is

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/FilterOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/FilterOutputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/FilterOutputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/FilterOutputFormat.java Thu May 20 05:03:20 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.OutputCommitter;
 import org.apache.hadoop.mapreduce.OutputFormat;
@@ -29,6 +31,8 @@ import org.apache.hadoop.mapreduce.TaskA
 /**
  * FilterOutputFormat is a convenience class that wraps OutputFormat. 
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class FilterOutputFormat <K,V> extends OutputFormat<K, V> {
 
   protected OutputFormat<K,V> baseOut;

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/LazyOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/LazyOutputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/LazyOutputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/LazyOutputFormat.java Thu May 20 05:03:20 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
@@ -32,6 +34,8 @@ import org.apache.hadoop.util.Reflection
 /**
  * A Convenience class that creates output lazily.  
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class LazyOutputFormat <K,V> extends FilterOutputFormat<K, V> {
   public static String OUTPUT_FORMAT = 
     "mapreduce.output.lazyoutputformat.outputformat";

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/MapFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/MapFileOutputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/MapFileOutputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/MapFileOutputFormat.java Thu May 20 05:03:20 2010
@@ -35,12 +35,16 @@ import org.apache.hadoop.mapreduce.Parti
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 
 /** 
  * An {@link org.apache.hadoop.mapreduce.OutputFormat} that writes 
  * {@link MapFile}s.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class MapFileOutputFormat 
     extends FileOutputFormat<WritableComparable<?>, Writable> {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/MultipleOutputs.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/MultipleOutputs.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/MultipleOutputs.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/MultipleOutputs.java Thu May 20 05:03:20 2010
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.mapreduce.lib.output;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
@@ -108,6 +110,8 @@ import java.util.*;
  * }
  * </pre>
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class MultipleOutputs<KEYOUT, VALUEOUT> {
 
   private static final String MULTIPLE_OUTPUTS = "mapreduce.multipleoutputs";

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/NullOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/NullOutputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/NullOutputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/NullOutputFormat.java Thu May 20 05:03:20 2010
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.mapreduce.lib.output;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.OutputCommitter;
 import org.apache.hadoop.mapreduce.OutputFormat;
@@ -27,6 +29,8 @@ import org.apache.hadoop.mapreduce.TaskA
 /**
  * Consume all outputs and put them in /dev/null. 
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class NullOutputFormat<K, V> extends OutputFormat<K, V> {
   
   @Override

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/SequenceFileAsBinaryOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/SequenceFileAsBinaryOutputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/SequenceFileAsBinaryOutputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/SequenceFileAsBinaryOutputFormat.java Thu May 20 05:03:20 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
 import java.io.IOException;
 import java.io.DataOutputStream;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.BytesWritable;
@@ -36,6 +38,8 @@ import org.apache.hadoop.mapreduce.TaskA
  * An {@link org.apache.hadoop.mapreduce.OutputFormat} that writes keys, 
  * values to {@link SequenceFile}s in binary(raw) format
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class SequenceFileAsBinaryOutputFormat 
     extends SequenceFileOutputFormat <BytesWritable,BytesWritable> {
   public static String KEY_CLASS = "mapreduce.output.seqbinaryoutputformat.key.class"; 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/SequenceFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/SequenceFileOutputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/SequenceFileOutputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/SequenceFileOutputFormat.java Thu May 20 05:03:20 2010
@@ -33,9 +33,13 @@ import org.apache.hadoop.mapreduce.Outpu
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 
 /** An {@link OutputFormat} that writes {@link SequenceFile}s. */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class SequenceFileOutputFormat <K,V> extends FileOutputFormat<K, V> {
 
   protected SequenceFile.Writer getSequenceWriter(TaskAttemptContext context,

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/TextOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/TextOutputFormat.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/TextOutputFormat.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/output/TextOutputFormat.java Thu May 20 05:03:20 2010
@@ -22,6 +22,8 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.UnsupportedEncodingException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -37,6 +39,8 @@ import org.apache.hadoop.mapreduce.TaskA
 import org.apache.hadoop.util.*;
 
 /** An {@link OutputFormat} that writes plain text files. */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class TextOutputFormat<K, V> extends FileOutputFormat<K, V> {
   public static String SEPERATOR = "mapreduce.output.textoutputformat.separator";
   protected static class LineRecordWriter<K, V>

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/BinaryPartitioner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/BinaryPartitioner.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/BinaryPartitioner.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/BinaryPartitioner.java Thu May 20 05:03:20 2010
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.mapreduce.lib.partition;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.BinaryComparable;
@@ -64,6 +66,8 @@ import org.apache.hadoop.mapreduce.Parti
  *   <li>{@link #setRightOffset}</li>
  * </ul></p>
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class BinaryPartitioner<V> extends Partitioner<BinaryComparable, V> 
   implements Configurable {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/HashPartitioner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/HashPartitioner.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/HashPartitioner.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/HashPartitioner.java Thu May 20 05:03:20 2010
@@ -18,9 +18,13 @@
 
 package org.apache.hadoop.mapreduce.lib.partition;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.mapreduce.Partitioner;
 
 /** Partition keys by their {@link Object#hashCode()}. */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class HashPartitioner<K, V> extends Partitioner<K, V> {
 
   /** Use {@link Object#hashCode()} to partition. */

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/InputSampler.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/InputSampler.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/InputSampler.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/InputSampler.java Thu May 20 05:03:20 2010
@@ -27,6 +27,8 @@ import java.util.Random;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
@@ -51,6 +53,8 @@ import org.apache.hadoop.util.ToolRunner
  * Utility for collecting samples and writing a partition file for
  * {@link TotalOrderPartitioner}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class InputSampler<K,V> extends Configured implements Tool  {
 
   private static final Log LOG = LogFactory.getLog(InputSampler.class);

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedComparator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedComparator.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedComparator.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedComparator.java Thu May 20 05:03:20 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.WritableComparator;
@@ -46,7 +48,8 @@ import org.apache.hadoop.mapreduce.lib.p
  * We assume that the fields in the key are separated by 
  * {@link JobContext#MAP_OUTPUT_KEY_FIELD_SEPERATOR}.
  */
-
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class KeyFieldBasedComparator<K, V> extends WritableComparator 
     implements Configurable {
   private KeyFieldHelper keyFieldHelper = new KeyFieldHelper();

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedPartitioner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedPartitioner.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedPartitioner.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedPartitioner.java Thu May 20 05:03:20 2010
@@ -23,6 +23,8 @@ import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.Job;
@@ -44,6 +46,8 @@ import org.apache.hadoop.mapreduce.lib.p
   *  (the end of the field).
   * 
   */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class KeyFieldBasedPartitioner<K2, V2> extends Partitioner<K2, V2> 
     implements Configurable {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/TotalOrderPartitioner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/TotalOrderPartitioner.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/TotalOrderPartitioner.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/partition/TotalOrderPartitioner.java Thu May 20 05:03:20 2010
@@ -23,6 +23,8 @@ import java.lang.reflect.Array;
 import java.util.ArrayList;
 import java.util.Arrays;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -40,6 +42,8 @@ import org.apache.hadoop.util.Reflection
  * Partitioner effecting a total order by reading split points from
  * an externally generated source.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class TotalOrderPartitioner<K extends WritableComparable<?>,V>
     extends Partitioner<K,V> implements Configurable {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/reduce/IntSumReducer.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/reduce/IntSumReducer.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/reduce/IntSumReducer.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/reduce/IntSumReducer.java Thu May 20 05:03:20 2010
@@ -20,9 +20,13 @@ package org.apache.hadoop.mapreduce.lib.
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.mapreduce.Reducer;
 
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class IntSumReducer<Key> extends Reducer<Key,IntWritable,
                                                 Key,IntWritable> {
   private IntWritable result = new IntWritable();

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/reduce/LongSumReducer.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/reduce/LongSumReducer.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/reduce/LongSumReducer.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/reduce/LongSumReducer.java Thu May 20 05:03:20 2010
@@ -19,9 +19,14 @@
 package org.apache.hadoop.mapreduce.lib.reduce;
 
 import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.mapreduce.Reducer;
 
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class LongSumReducer<KEY> extends Reducer<KEY, LongWritable,
                                                  KEY,LongWritable> {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/reduce/WrappedReducer.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/reduce/WrappedReducer.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/reduce/WrappedReducer.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/lib/reduce/WrappedReducer.java Thu May 20 05:03:20 2010
@@ -21,6 +21,8 @@ package org.apache.hadoop.mapreduce.lib.
 import java.io.IOException;
 import java.net.URI;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration.IntegerRanges;
 import org.apache.hadoop.fs.Path;
@@ -40,6 +42,8 @@ import org.apache.hadoop.mapreduce.TaskA
  * A {@link Reducer} which wraps a given one to allow for custom 
  * {@link Reducer.Context} implementations.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class WrappedReducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT> 
     extends Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT> {
 
@@ -53,6 +57,7 @@ public class WrappedReducer<KEYIN, VALUE
     return new Context(reduceContext);
   }
   
+  @InterfaceStability.Evolving
   public class Context 
       extends Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context {
 

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/protocol/ClientProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/protocol/ClientProtocol.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/protocol/ClientProtocol.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/protocol/ClientProtocol.java Thu May 20 05:03:20 2010
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.prot
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSelector;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.ipc.VersionedProtocol;
@@ -50,6 +52,8 @@ import org.apache.hadoop.security.token.
 @KerberosInfo(
     serverPrincipal = JTConfig.JT_USER_NAME)
 @TokenInfo(DelegationTokenSelector.class)
+@InterfaceAudience.Private
+@InterfaceStability.Stable
 public interface ClientProtocol extends VersionedProtocol {
   /* 
    *Changing the versionID to 2L since the getTaskCompletionEvents method has

Added: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/protocol/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/protocol/package-info.java?rev=946526&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/protocol/package-info.java (added)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/protocol/package-info.java Thu May 20 05:03:20 2010
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Stable
+package org.apache.hadoop.mapreduce.protocol;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java Thu May 20 05:03:20 2010
@@ -28,6 +28,7 @@ import javax.servlet.http.HttpServletReq
 
 import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
 import org.apache.hadoop.record.Utils;
 
@@ -37,6 +38,7 @@ import org.apache.hadoop.record.Utils;
  *
  */
 @InterfaceAudience.Private
+@InterfaceStability.Unstable
 public class SecureShuffleUtils {
   public static final String HTTP_HEADER_URL_HASH = "UrlHash";
   public static final String HTTP_HEADER_REPLY_URL_HASH = "ReplyHash";

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/security/TokenCache.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/security/TokenCache.java?rev=946526&r1=946525&r2=946526&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/security/TokenCache.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapreduce/security/TokenCache.java Thu May 20 05:03:20 2010
@@ -51,6 +51,7 @@ import org.apache.hadoop.security.UserGr
  * The secrets can be stored just before submission of jobs and read during
  * the task execution.  
  */
+@InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class TokenCache {
   



Mime
View raw message