hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r1295710 [2/8] - in /hbase/trunk: bin/ src/main/java/org/apache/hadoop/hbase/ src/main/java/org/apache/hadoop/hbase/avro/ src/main/java/org/apache/hadoop/hbase/catalog/ src/main/java/org/apache/hadoop/hbase/client/ src/main/java/org/apache/...
Date Thu, 01 Mar 2012 17:53:33 GMT
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java Thu Mar  1 17:53:03 2012
@@ -19,6 +19,9 @@
  */
 package org.apache.hadoop.hbase;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 
 /**
  * Thrown if a region server is passed an unknown scanner id.
@@ -26,6 +29,8 @@ package org.apache.hadoop.hbase;
  * scanner lease on the serverside has expired OR the serverside is closing
  * down and has cancelled all leases.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class UnknownScannerException extends DoNotRetryIOException {
   private static final long serialVersionUID = 993179627856392526L;
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/VersionAnnotation.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/VersionAnnotation.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/VersionAnnotation.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/VersionAnnotation.java Thu Mar  1 17:53:03 2012
@@ -19,12 +19,15 @@ package org.apache.hadoop.hbase;
 
 import java.lang.annotation.*;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * A package attribute that captures the version of hbase that was compiled.
  * Copied down from hadoop.  All is same except name of interface.
  */
 @Retention(RetentionPolicy.RUNTIME)
 @Target(ElementType.PACKAGE)
+@InterfaceAudience.Private
 public @interface VersionAnnotation {
 
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/YouAreDeadException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/YouAreDeadException.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/YouAreDeadException.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/YouAreDeadException.java Thu Mar  1 17:53:03 2012
@@ -21,12 +21,17 @@ package org.apache.hadoop.hbase;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * This exception is thrown by the master when a region server reports and is
  * already being processed as dead. This can happen when a region server loses
  * its session but didn't figure it yet.
  */
 @SuppressWarnings("serial")
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class YouAreDeadException extends IOException {
   public YouAreDeadException(String message) {
     super(message);

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java Thu Mar  1 17:53:03 2012
@@ -21,9 +21,14 @@ package org.apache.hadoop.hbase;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * Thrown if the client can't connect to zookeeper
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class ZooKeeperConnectionException extends IOException {
   private static final long serialVersionUID = 1L << 23 - 1L;
   /** default constructor */

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/avro/AvroServer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/avro/AvroServer.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/avro/AvroServer.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/avro/AvroServer.java Thu Mar  1 17:53:03 2012
@@ -30,6 +30,7 @@ import org.apache.avro.ipc.specific.Spec
 import org.apache.avro.util.Utf8;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
@@ -59,6 +60,7 @@ import org.apache.hadoop.hbase.util.Byte
 /**
  * Start an Avro server
  */
+@InterfaceAudience.Private
 public class AvroServer {
 
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/avro/AvroUtil.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/avro/AvroUtil.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/avro/AvroUtil.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/avro/AvroUtil.java Thu Mar  1 17:53:03 2012
@@ -27,6 +27,7 @@ import org.apache.avro.Schema;
 import org.apache.avro.generic.GenericArray;
 import org.apache.avro.generic.GenericData;
 import org.apache.avro.util.Utf8;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.ClusterStatus;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HServerAddress;
@@ -59,6 +60,7 @@ import org.apache.hadoop.hbase.client.Sc
 import org.apache.hadoop.hbase.io.hfile.Compression;
 import org.apache.hadoop.hbase.util.Bytes;
 
+@InterfaceAudience.Private
 public class AvroUtil {
 
   //

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/CatalogTracker.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/CatalogTracker.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/CatalogTracker.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/CatalogTracker.java Thu Mar  1 17:53:03 2012
@@ -28,6 +28,7 @@ import java.util.concurrent.atomic.Atomi
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Abortable;
 import org.apache.hadoop.hbase.HRegionInfo;
@@ -57,6 +58,7 @@ import org.apache.hadoop.ipc.RemoteExcep
  * <p>Call {@link #start()} to start up operation.  Call {@link #stop()}} to
  * interrupt waits and close up shop.
  */
+@InterfaceAudience.Private
 public class CatalogTracker {
   // TODO: This class needs a rethink.  The original intent was that it would be
   // the one-stop-shop for root and meta locations and that it would get this

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/MetaEditor.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/MetaEditor.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/MetaEditor.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/MetaEditor.java Thu Mar  1 17:53:03 2012
@@ -24,6 +24,7 @@ import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;
@@ -40,6 +41,7 @@ import org.apache.hadoop.hbase.util.Writ
  * TODO: Put MetaReader and MetaEditor together; doesn't make sense having
  * them distinct.
  */
+@InterfaceAudience.Private
 public class MetaEditor {
   // TODO: Strip CatalogTracker from this class.  Its all over and in the end
   // its only used to get its Configuration so we can get associated

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/MetaMigrationRemovingHTD.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/MetaMigrationRemovingHTD.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/MetaMigrationRemovingHTD.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/MetaMigrationRemovingHTD.java Thu Mar  1 17:53:03 2012
@@ -24,6 +24,7 @@ import java.util.Set;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java Thu Mar  1 17:53:03 2012
@@ -27,6 +27,7 @@ import java.util.TreeMap;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
@@ -46,6 +47,7 @@ import org.apache.hadoop.ipc.RemoteExcep
 /**
  * Reads region and assignment information from <code>.META.</code>.
  */
+@InterfaceAudience.Private
 public class MetaReader {
   // TODO: Strip CatalogTracker from this class.  Its all over and in the end
   // its only used to get its Configuration so we can get associated

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/RootLocationEditor.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/RootLocationEditor.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/RootLocationEditor.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/catalog/RootLocationEditor.java Thu Mar  1 17:53:03 2012
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.catalog;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.zookeeper.ZKUtil;
@@ -30,6 +31,7 @@ import org.apache.zookeeper.KeeperExcept
 /**
  * Makes changes to the location of <code>-ROOT-</code> in ZooKeeper.
  */
+@InterfaceAudience.Private
 public class RootLocationEditor {
   private static final Log LOG = LogFactory.getLog(RootLocationEditor.class);
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/AbstractClientScanner.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/AbstractClientScanner.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/AbstractClientScanner.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/AbstractClientScanner.java Thu Mar  1 17:53:03 2012
@@ -20,9 +20,14 @@ package org.apache.hadoop.hbase.client;
 import java.io.IOException;
 import java.util.Iterator;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * Helper class for custom client scanners.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public abstract class AbstractClientScanner implements ResultScanner {
 
   @Override

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Action.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Action.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Action.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Action.java Thu Mar  1 17:53:03 2012
@@ -23,6 +23,8 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.io.HbaseObjectWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.Writable;
@@ -32,6 +34,8 @@ import org.apache.hadoop.io.Writable;
  * {@link HTable::batch} to associate the action with it's region and maintain 
  * the index from the original request. 
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class Action<R> implements Writable, Comparable {
 
   private Row action;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Append.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Append.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Append.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Append.java Thu Mar  1 17:53:03 2012
@@ -25,6 +25,8 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.Writable;
@@ -41,6 +43,8 @@ import org.apache.hadoop.io.Writable;
  * row to append to. At least one column to append must be specified using the
  * {@link #add(byte[], byte[], byte[])} method.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class Append extends Mutation implements Row {
   // TODO: refactor to derive from Put?
   private static final String RETURN_RESULTS = "_rr_";

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Attributes.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Attributes.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Attributes.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Attributes.java Thu Mar  1 17:53:03 2012
@@ -22,6 +22,11 @@ package org.apache.hadoop.hbase.client;
 
 import java.util.Map;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public interface Attributes {
   /**
    * Sets an attribute.

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java Thu Mar  1 17:53:03 2012
@@ -23,6 +23,8 @@ import java.util.LinkedList;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HConstants;
@@ -40,6 +42,8 @@ import org.apache.hadoop.io.DataOutputBu
  * If there are multiple regions in a table, this scanner will iterate
  * through them all.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class ClientScanner extends AbstractClientScanner {
     private final Log LOG = LogFactory.getLog(this.getClass());
     private Scan scan;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java Thu Mar  1 17:53:03 2012
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.HConstants;
 
 
@@ -24,6 +26,8 @@ import org.apache.hadoop.hbase.HConstant
  * Utility used by client connections such as {@link HConnection} and
  * {@link ServerCallable}
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class ConnectionUtils {
   /**
    * Calculate pause time.

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Delete.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Delete.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Delete.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Delete.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -64,6 +66,8 @@ import java.util.Map;
  * deleteFamily -- then you need to use the method overrides that take a
  * timestamp.  The constructor timestamp is not referenced.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class Delete extends Mutation
   implements Writable, Row, Comparable<Row> {
   private static final byte DELETE_VERSION = (byte)3;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Get.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Get.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Get.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Get.java Thu Mar  1 17:53:03 2012
@@ -19,6 +19,8 @@
  */
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.filter.Filter;
@@ -63,6 +65,8 @@ import java.util.TreeSet;
  * <p>
  * To add a filter, execute {@link #setFilter(Filter) setFilter}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class Get extends OperationWithAttributes
   implements Writable, Row, Comparable<Row> {
   private static final byte GET_VERSION = (byte)2;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java Thu Mar  1 17:53:03 2012
@@ -32,6 +32,8 @@ import java.util.regex.Pattern;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Abortable;
 import org.apache.hadoop.hbase.ClusterStatus;
@@ -72,6 +74,8 @@ import org.apache.hadoop.util.StringUtil
  * <p>Currently HBaseAdmin instances are not expected to be long-lived.  For
  * example, an HBaseAdmin instance will not ride over a Master restart.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class HBaseAdmin implements Abortable, Closeable {
   private final Log LOG = LogFactory.getLog(this.getClass().getName());
 //  private final HConnection connection;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HConnection.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HConnection.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HConnection.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HConnection.java Thu Mar  1 17:53:03 2012
@@ -25,6 +25,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ExecutorService;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Abortable;
 import org.apache.hadoop.hbase.HRegionInfo;
@@ -57,6 +59,8 @@ import org.apache.hadoop.hbase.zookeeper
  *
  * @see HConnectionManager
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public interface HConnection extends Abortable, Closeable {
   /**
    * @return Configuration instance being used by this HConnection instance.

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java Thu Mar  1 17:53:03 2012
@@ -47,6 +47,8 @@ import java.util.concurrent.atomic.Atomi
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -131,6 +133,8 @@ import org.apache.zookeeper.KeeperExcept
  * cleanup to the client.
  */
 @SuppressWarnings("serial")
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class HConnectionManager {
   // An LRU Map of HConnectionKey -> HConnection (TableServer).  All
   // access must be synchronized.  This map is not private because tests

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTable.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTable.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTable.java Thu Mar  1 17:53:03 2012
@@ -39,6 +39,8 @@ import java.util.concurrent.atomic.Atomi
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
@@ -99,6 +101,8 @@ import org.apache.hadoop.hbase.util.Writ
  * @see HConnection
  * @see HConnectionManager
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class HTable implements HTableInterface {
   private static final Log LOG = LogFactory.getLog(HTable.class);
   private HConnection connection;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTableFactory.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTableFactory.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTableFactory.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTableFactory.java Thu Mar  1 17:53:03 2012
@@ -19,6 +19,8 @@
  */
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 
 import java.io.IOException;
@@ -28,6 +30,8 @@ import java.io.IOException;
  *
  * @since 0.21.0
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class HTableFactory implements HTableInterfaceFactory {
   @Override
   public HTableInterface createHTableInterface(Configuration config,

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java Thu Mar  1 17:53:03 2012
@@ -23,6 +23,8 @@ import java.io.Closeable;
 import java.io.IOException;
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
@@ -36,6 +38,8 @@ import java.util.Map;
  *
  * @since 0.21.0
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public interface HTableInterface extends Closeable {
 
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTableInterfaceFactory.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTableInterfaceFactory.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTableInterfaceFactory.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTableInterfaceFactory.java Thu Mar  1 17:53:03 2012
@@ -21,6 +21,8 @@ package org.apache.hadoop.hbase.client;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 
 
@@ -29,6 +31,8 @@ import org.apache.hadoop.conf.Configurat
  *
  * @since 0.21.0
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public interface HTableInterfaceFactory {
 
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java Thu Mar  1 17:53:03 2012
@@ -25,6 +25,8 @@ import java.util.Collection;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
@@ -56,6 +58,8 @@ import org.apache.hadoop.hbase.util.Pool
  * Pool will manage its own connections to the cluster. See
  * {@link HConnectionManager}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class HTablePool implements Closeable {
   private final PoolMap<String, HTableInterface> tables;
   private final int maxSize;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTableUtil.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTableUtil.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTableUtil.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/HTableUtil.java Thu Mar  1 17:53:03 2012
@@ -25,6 +25,9 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
@@ -35,6 +38,8 @@ import org.apache.hadoop.hbase.client.Ro
  * 
  *
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class HTableUtil {
 
   private static final int INITIAL_LIST_SIZE = 250;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Increment.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Increment.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Increment.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Increment.java Thu Mar  1 17:53:03 2012
@@ -27,6 +27,8 @@ import java.util.NavigableMap;
 import java.util.Set;
 import java.util.TreeMap;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.Writable;
@@ -43,6 +45,8 @@ import org.apache.hadoop.io.Writable;
  * to increment.  At least one column to increment must be specified using the
  * {@link #addColumn(byte[], byte[], long)} method.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class Increment implements Row {
   private static final byte INCREMENT_VERSION = (byte)2;
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,9 @@
 
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * Specify Isolation levels in Scan operations.
  * <p>
@@ -29,6 +32,8 @@ package org.apache.hadoop.hbase.client;
  * should return data that is being modified by transactions that might
  * not have been committed yet.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public enum IsolationLevel {
 
   READ_COMMITTED(1),

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java Thu Mar  1 17:53:03 2012
@@ -28,6 +28,8 @@ import java.util.TreeMap;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
@@ -45,6 +47,8 @@ import org.apache.hadoop.hbase.util.Writ
  * Although public visibility, this is not a public-facing API and may evolve in
  * minor releases.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class MetaScanner {
   private static final Log LOG = LogFactory.getLog(MetaScanner.class);
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/MultiAction.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/MultiAction.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/MultiAction.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/MultiAction.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 package org.apache.hadoop.hbase.client;
 
 import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.io.HbaseObjectWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -36,6 +38,8 @@ import java.util.TreeMap;
  * Container for Actions (i.e. Get, Delete, or Put), which are grouped by
  * regionName. Intended to be used with HConnectionManager.processBatch()
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public final class MultiAction<R> implements Writable {
 
   // map of regions to lists of puts/gets/deletes for that region.

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/MultiPut.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/MultiPut.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/MultiPut.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/MultiPut.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HServerAddress;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -42,6 +44,8 @@ import java.util.TreeSet;
  * @deprecated Use MultiAction instead
  * Data type class for putting multiple regions worth of puts in one RPC.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class MultiPut extends Operation implements Writable {
   public HServerAddress address; // client code ONLY
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/MultiResponse.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/MultiResponse.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/MultiResponse.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/MultiResponse.java Thu Mar  1 17:53:03 2012
@@ -21,6 +21,8 @@
 package org.apache.hadoop.hbase.client;
 
 import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.io.HbaseObjectWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
@@ -42,6 +44,8 @@ import java.util.TreeMap;
 /**
  * A container for Result objects, grouped by regionName.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class MultiResponse implements Writable {
 
   // map of regionName to list of (Results paired to the original index for that

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Mutation.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Mutation.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Mutation.java Thu Mar  1 17:53:03 2012
@@ -27,10 +27,14 @@ import java.util.Map;
 import java.util.TreeMap;
 import java.util.UUID;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.util.Bytes;
 
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public abstract class Mutation extends OperationWithAttributes {
   // Attribute used in Mutations to indicate the originating cluster.
   private static final String CLUSTER_ID_ATTR = "_c.id_";

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java Thu Mar  1 17:53:03 2012
@@ -19,11 +19,15 @@
  */
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.RegionException;
 
 /**
  * Thrown when no region server can be found for a region
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class NoServerForRegionException extends RegionException {
   private static final long serialVersionUID = 1L << 11 - 1L;
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Operation.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Operation.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Operation.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Operation.java Thu Mar  1 17:53:03 2012
@@ -22,6 +22,8 @@ package org.apache.hadoop.hbase.client;
 import java.io.IOException;
 import java.util.Map;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.codehaus.jackson.map.ObjectMapper;
 
 /**
@@ -29,6 +31,8 @@ import org.codehaus.jackson.map.ObjectMa
  * (e.g. Put, Get, Delete, Scan, Next, etc.)
  * Contains methods for exposure to logging and debugging tools.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public abstract class Operation {
   // TODO make this configurable
   private static final int DEFAULT_MAX_COLS = 5;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java Thu Mar  1 17:53:03 2012
@@ -27,10 +27,14 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;
 import org.apache.hadoop.io.WritableUtils;
 
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public abstract class OperationWithAttributes extends Operation implements Attributes {
   // a opaque blob of attributes
   private Map<String, byte[]> attributes;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Put.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Put.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Put.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Put.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.io.HeapSize;
@@ -43,6 +45,8 @@ import java.util.TreeMap;
  * for each column to be inserted, execute {@link #add(byte[], byte[], byte[]) add} or
  * {@link #add(byte[], byte[], long, byte[]) add} if setting the timestamp.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class Put extends Mutation
   implements HeapSize, Writable, Row, Comparable<Row> {
   private static final byte PUT_VERSION = (byte)2;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java Thu Mar  1 17:53:03 2012
@@ -19,9 +19,13 @@
  */
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.RegionException;
 
 /** Thrown when a table can not be located */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class RegionOfflineException extends RegionException {
   private static final long serialVersionUID = 466008402L;
   /** default constructor */

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Result.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Result.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Result.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Result.java Thu Mar  1 17:53:03 2012
@@ -31,6 +31,8 @@ import java.util.Map;
 import java.util.NavigableMap;
 import java.util.TreeMap;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.SplitKeyValue;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -65,6 +67,8 @@ import org.apache.hadoop.io.Writable;
  * through {@link KeyValue#getRow()}, {@link KeyValue#getFamily()}, {@link KeyValue#getQualifier()},
  * {@link KeyValue#getTimestamp()}, and {@link KeyValue#getValue()}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class Result implements Writable, WritableWithSize {
   private static final byte RESULT_VERSION = (byte)1;
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java Thu Mar  1 17:53:03 2012
@@ -22,10 +22,15 @@ package org.apache.hadoop.hbase.client;
 import java.io.Closeable;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * Interface for client-side scanning.
  * Go to {@link HTable} to obtain instances.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public interface ResultScanner extends Closeable, Iterable<Result> {
 
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java Thu Mar  1 17:53:03 2012
@@ -19,10 +19,15 @@ import java.io.IOException;
 import java.util.Date;
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * Exception thrown by HTable methods when an attempt to do something (like
  * commit changes) fails after a bunch of retries.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class RetriesExhaustedException extends IOException {
   private static final long serialVersionUID = 1876775844L;
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HServerAddress;
 import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException;
@@ -42,6 +44,8 @@ import java.util.Set;
  * {@link #getCause(int)}, {@link #getRow(int)} and {@link #getAddress(int)}.
  */
 @SuppressWarnings("serial")
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class RetriesExhaustedWithDetailsException
 extends RetriesExhaustedException {
   List<Throwable> exceptions;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Row.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Row.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Row.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Row.java Thu Mar  1 17:53:03 2012
@@ -19,11 +19,15 @@
  */
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.WritableComparable;
 
 /**
  * Has a row.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public interface Row extends WritableComparable<Row> {
   /**
    * @return The row.

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RowLock.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RowLock.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RowLock.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RowLock.java Thu Mar  1 17:53:03 2012
@@ -19,9 +19,14 @@
  */
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * Holds row name and lock id.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class RowLock {
   private byte [] row = null;
   private long lockId = -1L;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java Thu Mar  1 17:53:03 2012
@@ -25,6 +25,8 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.io.HbaseObjectWritable;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -36,6 +38,8 @@ import org.apache.hadoop.hbase.util.Byte
  * The mutations are performed in the order in which they
  * were added.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class RowMutations implements Row {
   private List<Mutation> mutations = new ArrayList<Mutation>();
   private byte [] row;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Scan.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Scan.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Scan.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/Scan.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.filter.Filter;
@@ -80,6 +82,8 @@ import java.util.TreeSet;
  * Expert: To explicitly disable server-side block caching for this scan,
  * execute {@link #setCacheBlocks(boolean)}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class Scan extends OperationWithAttributes implements Writable {
   private static final String RAW_ATTR = "_raw_";
   private static final String ISOLATION_LEVEL = "_isolationlevel_";

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java Thu Mar  1 17:53:03 2012
@@ -23,6 +23,8 @@ import java.net.UnknownHostException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HRegionInfo;
@@ -36,6 +38,8 @@ import org.apache.hadoop.net.DNS;
  * Retries scanner operations such as create, next, etc.
  * Used by {@link ResultScanner}s made by {@link HTable}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class ScannerCallable extends ServerCallable<Result[]> {
   private static final Log LOG = LogFactory.getLog(ScannerCallable.class);
   private long scannerId = -1L;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java Thu Mar  1 17:53:03 2012
@@ -20,11 +20,15 @@
 
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 
 /**
  * Thrown when a scanner has timed out.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class ScannerTimeoutException extends DoNotRetryIOException {
 
   private static final long serialVersionUID = 8788838690290688313L;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java Thu Mar  1 17:53:03 2012
@@ -28,6 +28,8 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.Callable;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HConstants;
@@ -48,6 +50,8 @@ import org.apache.hadoop.ipc.RemoteExcep
  * @see HConnection#getRegionServerWithoutRetries(ServerCallable)
  * @param <T> the class that the ServerCallable handles
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public abstract class ServerCallable<T> implements Callable<T> {
   protected final HConnection connection;
   protected final byte [] tableName;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java Thu Mar  1 17:53:03 2012
@@ -20,12 +20,16 @@
 
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.io.hfile.Compression;
 
 /**
  * Immutable HColumnDescriptor
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class UnmodifyableHColumnDescriptor extends HColumnDescriptor {
 
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java Thu Mar  1 17:53:03 2012
@@ -20,8 +20,12 @@
 
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.HRegionInfo;
 
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 class UnmodifyableHRegionInfo extends HRegionInfo {
   /*
    * Creates an unmodifyable copy of an HRegionInfo

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java Thu Mar  1 17:53:03 2012
@@ -20,12 +20,16 @@
 
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 
 /**
  * Read-only table descriptor.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class UnmodifyableHTableDescriptor extends HTableDescriptor {
   /** Default constructor */
   public UnmodifyableHTableDescriptor() {

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java Thu Mar  1 17:53:03 2012
@@ -31,6 +31,8 @@ import java.util.concurrent.atomic.Atomi
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
@@ -62,6 +64,8 @@ import org.apache.hadoop.hbase.util.Pair
  * parameter type. For average and std, it returns a double value. For row
  * count, it returns a long value.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class AggregationClient {
 
   private static final Log log = LogFactory.getLog(AggregationClient.class);

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java Thu Mar  1 17:53:03 2012
@@ -23,6 +23,8 @@ package org.apache.hadoop.hbase.client.c
 import org.apache.commons.lang.reflect.MethodUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
 
 import java.io.IOException;
@@ -36,6 +38,8 @@ import java.lang.reflect.Proxy;
  * A collection of interfaces and utilities used for interacting with custom RPC
  * interfaces exposed by Coprocessors.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public abstract class Batch {
   private static Log LOG = LogFactory.getLog(Batch.class);
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Exec.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Exec.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Exec.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Exec.java Thu Mar  1 17:53:03 2012
@@ -19,6 +19,8 @@
  */
 package org.apache.hadoop.hbase.client.coprocessor;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.client.Row;
@@ -51,6 +53,8 @@ import java.lang.reflect.Method;
  * @see org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call)
  * @see org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call, org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class Exec extends Invocation implements Row {
   /** Row key used as a reference for any region lookups */
   private byte[] referenceRow;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/ExecResult.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/ExecResult.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/ExecResult.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/ExecResult.java Thu Mar  1 17:53:03 2012
@@ -19,6 +19,8 @@
  */
 package org.apache.hadoop.hbase.client.coprocessor;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.io.HbaseObjectWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Classes;
@@ -46,6 +48,8 @@ import java.io.Serializable;
  * @see org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call)
  * @see org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call, org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class ExecResult implements Writable {
   private byte[] regionName;
   private Object value;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java Thu Mar  1 17:53:03 2012
@@ -23,6 +23,8 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.coprocessor.ColumnInterpreter;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -35,6 +37,8 @@ import org.apache.hadoop.hbase.util.Byte
  * TestAggregateProtocol methods for its sample usage.
  * Its methods handle null arguments gracefully. 
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class LongColumnInterpreter implements ColumnInterpreter<Long, Long> {
 
   public Long getValue(byte[] colFamily, byte[] colQualifier, KeyValue kv)

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java Thu Mar  1 17:53:03 2012
@@ -26,6 +26,8 @@ import java.util.Collection;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.metrics.util.MetricsBase;
@@ -45,6 +47,8 @@ import org.apache.hadoop.metrics.util.Me
  * However, there is no need for this. So they are defined under scan operation
  * for now.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class ScanMetrics implements Writable {
 
   private static final byte SCANMETRICS_VERSION = (byte)1;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/BaseConstraint.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/BaseConstraint.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/BaseConstraint.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/BaseConstraint.java Thu Mar  1 17:53:03 2012
@@ -17,12 +17,14 @@
  */
 package org.apache.hadoop.hbase.constraint;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configured;
 
 /**
  * Base class to use when actually implementing a {@link Constraint}. It takes
  * care of getting and setting of configuration for the constraint.
  */
+@InterfaceAudience.Private
 public abstract class BaseConstraint extends Configured implements Constraint {
 
 }

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/Constraint.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/Constraint.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/Constraint.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/Constraint.java Thu Mar  1 17:53:03 2012
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase.constraint;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.Put;
@@ -61,6 +62,7 @@ import org.apache.hadoop.hbase.client.Pu
  * @see BaseConstraint
  * @see Constraints
  */
+@InterfaceAudience.Private
 public interface Constraint extends Configurable {
 
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintException.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintException.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintException.java Thu Mar  1 17:53:03 2012
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase.constraint;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.client.Put;
 
@@ -27,6 +28,7 @@ import org.apache.hadoop.hbase.client.Pu
  * <it>should</it> fail every time for the same {@link Put} (it should be
  * idempotent).
  */
+@InterfaceAudience.Private
 public class ConstraintException extends DoNotRetryIOException {
   private static final long serialVersionUID = 1197446454511704140L;
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java Thu Mar  1 17:53:03 2012
@@ -23,6 +23,7 @@ import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.Put;
@@ -37,6 +38,7 @@ import org.apache.hadoop.hbase.regionser
  * This is an ease of use mechanism - all the functionality here could be
  * implemented on any given system by a coprocessor.
  */
+@InterfaceAudience.Private
 public class ConstraintProcessor extends BaseRegionObserver {
 
   private static final Log LOG = LogFactory.getLog(ConstraintProcessor.class);

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java Thu Mar  1 17:53:03 2012
@@ -31,6 +31,7 @@ import java.util.regex.Pattern;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -45,6 +46,7 @@ import org.apache.hadoop.hbase.util.Pair
  * NOTE: this class is NOT thread safe. Concurrent setting/enabling/disabling of
  * constraints can cause constraints to be run at incorrect times or not at all.
  */
+@InterfaceAudience.Private
 public final class Constraints {
   private static final int DEFAULT_PRIORITY = -1;
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java Thu Mar  1 17:53:03 2012
@@ -26,6 +26,8 @@ import java.util.NavigableSet;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
@@ -37,6 +39,8 @@ import org.apache.hadoop.hbase.util.Pair
  * A concrete AggregateProtocol implementation. Its system level coprocessor
  * that computes the aggregate function at a region level.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class AggregateImplementation extends BaseEndpointCoprocessor implements
     AggregateProtocol {
   protected static Log log = LogFactory.getLog(AggregateImplementation.class);

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateProtocol.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateProtocol.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateProtocol.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateProtocol.java Thu Mar  1 17:53:03 2012
@@ -23,6 +23,8 @@ package org.apache.hadoop.hbase.coproces
 import java.io.IOException;
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.coprocessor.AggregationClient;
 import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
@@ -38,6 +40,8 @@ import org.apache.hadoop.hbase.util.Pair
  * be picked. Refer to {@link AggregationClient} for some general conditions on
  * input parameters.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public interface AggregateProtocol extends CoprocessorProtocol {
   public static final long VERSION = 1L;
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEndpointCoprocessor.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEndpointCoprocessor.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEndpointCoprocessor.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEndpointCoprocessor.java Thu Mar  1 17:53:03 2012
@@ -18,6 +18,8 @@ package org.apache.hadoop.hbase.coproces
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.Coprocessor;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
@@ -34,6 +36,8 @@ import org.apache.hadoop.hbase.ipc.Versi
  * and Coprocessor to develop an Endpoint. But you won't be able to access
  * the region related resource, i.e., CoprocessorEnvironment.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public abstract class BaseEndpointCoprocessor implements Coprocessor,
     CoprocessorProtocol, VersionedProtocol {
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.coprocessor;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
@@ -29,6 +31,8 @@ import org.apache.hadoop.hbase.UnknownRe
 
 import java.io.IOException;
 
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class BaseMasterObserver implements MasterObserver {
   @Override
   public void preCreateTable(ObserverContext<MasterCoprocessorEnvironment> ctx,

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,9 @@ import java.util.List;
 import java.util.Map;
 
 import com.google.common.collect.ImmutableList;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.KeyValue;
@@ -47,6 +50,8 @@ import java.io.IOException;
  * By extending it, you can create your own region observer without
  * overriding all abstract methods of RegionObserver.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public abstract class BaseRegionObserver implements RegionObserver {
   @Override
   public void start(CoprocessorEnvironment e) throws IOException { }

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java Thu Mar  1 17:53:03 2012
@@ -22,6 +22,8 @@ package org.apache.hadoop.hbase.coproces
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.coprocessor.LongColumnInterpreter;
 import org.apache.hadoop.io.Writable;
@@ -45,6 +47,8 @@ import org.apache.hadoop.io.Writable;
  * @param <T> Cell value data type
  * @param <S> Promoted data type
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public interface ColumnInterpreter<T, S> extends Writable {
 
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorException.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorException.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorException.java Thu Mar  1 17:53:03 2012
@@ -19,11 +19,15 @@
  */
 package org.apache.hadoop.hbase.coprocessor;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 
 /**
  * Thrown if a coprocessor encounters any exception.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class CoprocessorException extends DoNotRetryIOException {
   private static final long serialVersionUID = 4357922136679804887L;
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java Thu Mar  1 17:53:03 2012
@@ -22,6 +22,8 @@ package org.apache.hadoop.hbase.coproces
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -50,6 +52,8 @@ import java.util.*;
  * @param <E> the specific environment extension that a concrete implementation
  * provides
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public abstract class CoprocessorHost<E extends CoprocessorEnvironment> {
   public static final String REGION_COPROCESSOR_CONF_KEY =
       "hbase.coprocessor.region.classes";

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterCoprocessorEnvironment.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterCoprocessorEnvironment.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterCoprocessorEnvironment.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterCoprocessorEnvironment.java Thu Mar  1 17:53:03 2012
@@ -20,9 +20,13 @@
 
 package org.apache.hadoop.hbase.coprocessor;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.master.MasterServices;
 
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public interface MasterCoprocessorEnvironment extends CoprocessorEnvironment {
   /** @return reference to the HMaster services */
   MasterServices getMasterServices();

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.coprocessor;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.*;
 
 import java.io.IOException;
@@ -28,6 +30,8 @@ import java.io.IOException;
  * Defines coprocessor hooks for interacting with operations on the
  * {@link org.apache.hadoop.hbase.master.HMaster} process.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public interface MasterObserver extends Coprocessor {
 
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java Thu Mar  1 17:53:03 2012
@@ -22,6 +22,8 @@ import java.util.List;
 import java.util.SortedSet;
 import java.util.TreeSet;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.client.Mutation;
@@ -34,6 +36,8 @@ import org.apache.hadoop.hbase.util.Byte
  * {@link HRegion#mutateRowsWithLocks(java.util.Collection, java.util.Collection)}
  * and Coprocessor endpoints.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class MultiRowMutationEndpoint extends BaseEndpointCoprocessor implements
     MultiRowMutationProtocol {
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationProtocol.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationProtocol.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationProtocol.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationProtocol.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.coproces
 import java.io.IOException;
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
 import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -45,6 +47,8 @@ import org.apache.hadoop.hbase.regionser
  * mrOp.mutateRows(mutations);
  * </pre></code>
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public interface MultiRowMutationProtocol extends CoprocessorProtocol {
   public void mutateRows(List<Mutation> mutations) throws IOException;
 }



Mime
View raw message