hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From e...@apache.org
Subject [2/2] hbase git commit: HBASE-10671 Add missing InterfaceAudience annotations for classes in hbase-common and hbase-client modules
Date Fri, 21 Nov 2014 22:11:00 GMT
HBASE-10671 Add missing InterfaceAudience annotations for classes in hbase-common and hbase-client modules


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/aa343ebc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/aa343ebc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/aa343ebc

Branch: refs/heads/branch-1
Commit: aa343ebcfe169732d2868006f235c231f1af2e5d
Parents: 42cbdea
Author: Enis Soztutar <enis@apache.org>
Authored: Fri Nov 21 11:44:46 2014 -0800
Committer: Enis Soztutar <enis@apache.org>
Committed: Fri Nov 21 14:09:28 2014 -0800

----------------------------------------------------------------------
 ...ExcludePrivateAnnotationsStandardDoclet.java |   2 +
 .../IncludePublicAnnotationsStandardDoclet.java |   3 +
 .../apache/hadoop/hbase/KeepDeletedCells.java   |   5 +
 .../hadoop/hbase/client/AsyncProcess.java       |   2 +
 .../client/DelegatingRetryingCallable.java      |   3 +
 .../apache/hadoop/hbase/client/FailureInfo.java |   2 +-
 .../hadoop/hbase/client/HTableInterface.java    |   5 +
 .../hadoop/hbase/client/HTableMultiplexer.java  |  30 +-
 .../hbase/client/RetriesExhaustedException.java |   4 +-
 .../hadoop/hbase/client/RowTooBigException.java |   2 +
 .../hbase/client/RpcRetryingCallerFactory.java  |   2 +
 .../RpcRetryingCallerWithReadReplicas.java      |   5 +
 .../hadoop/hbase/client/coprocessor/Batch.java  |   4 +
 .../exceptions/ConnectionClosingException.java  |   4 +-
 .../exceptions/FailedSanityCheckException.java  |   5 +
 .../hbase/exceptions/LockTimeoutException.java  |   7 +
 .../exceptions/PreemptiveFastFailException.java |   4 +-
 .../exceptions/UnknownProtocolException.java    |   5 +
 .../hadoop/hbase/filter/LongComparator.java     |   7 +-
 .../hbase/filter/RegexStringComparator.java     |  12 +-
 .../DelegatingPayloadCarryingRpcController.java |   2 +
 .../org/apache/hadoop/hbase/ipc/IPCUtil.java    |   2 +
 .../ipc/RegionServerCoprocessorRpcChannel.java  |   2 +-
 .../hadoop/hbase/ipc/RpcControllerFactory.java  |   6 +-
 .../hadoop/hbase/ipc/ServerRpcController.java   |   6 +
 .../hbase/ipc/TimeLimitedRpcController.java     |   7 +-
 .../hadoop/hbase/protobuf/ProtobufUtil.java     |   2 +
 .../hbase/replication/ReplicationException.java |   2 +
 .../hbase/replication/ReplicationFactory.java   |   2 +
 .../replication/ReplicationPeersZKImpl.java     |   8 +-
 .../hbase/replication/ReplicationQueueInfo.java |   3 +
 .../replication/ReplicationQueuesClient.java    |   3 +
 .../ReplicationQueuesClientZKImpl.java          |   2 +
 .../replication/ReplicationQueuesZKImpl.java    |   4 +-
 .../replication/ReplicationStateZKBase.java     |   2 +
 .../replication/ReplicationTrackerZKImpl.java   |   2 +
 .../hbase/security/HBaseSaslRpcClient.java      |  23 +-
 .../hbase/security/access/Permission.java       |   5 +
 .../hbase/security/access/TablePermission.java  |   1 +
 .../hbase/security/access/UserPermission.java   |   1 +
 .../token/AuthenticationTokenIdentifier.java    |   2 +-
 .../VisibilityControllerNotReadyException.java  |   2 +
 .../hbase/snapshot/ExportSnapshotException.java |   2 +
 .../hbase/TestInterfaceAudienceAnnotations.java | 273 +++++++++++++++++++
 .../apache/hadoop/hbase/BaseConfigurable.java   |   2 +
 .../org/apache/hadoop/hbase/HConstants.java     |   2 +
 .../hadoop/hbase/MetaMutationAnnotation.java    |   3 +
 .../hadoop/hbase/NamespaceDescriptor.java       |   6 +-
 .../hadoop/hbase/io/ImmutableBytesWritable.java |   2 +
 .../hadoop/hbase/io/LimitInputStream.java       |   2 +
 .../hadoop/hbase/io/crypto/Encryption.java      |   2 +
 .../io/hadoopbackport/ThrottledInputStream.java |   5 +-
 .../hadoop/hbase/security/UserProvider.java     |   1 -
 .../hbase/trace/HBaseHTraceConfiguration.java   |   2 +
 .../hadoop/hbase/trace/SpanReceiverHost.java    |   2 +
 .../org/apache/hadoop/hbase/types/PBType.java   |   7 +
 .../hadoop/hbase/util/AbstractByteRange.java    |   5 +
 .../hbase/util/AbstractPositionedByteRange.java |  25 +-
 .../org/apache/hadoop/hbase/util/Base64.java    |   2 +
 .../hbase/util/BoundedCompletionService.java    |   3 +
 .../hadoop/hbase/util/ChecksumFactory.java      |   7 +-
 .../apache/hadoop/hbase/util/ChecksumType.java  |   6 +-
 .../hadoop/hbase/util/ConcatenatedLists.java    |   3 +
 .../apache/hadoop/hbase/util/ExceptionUtil.java |   3 +
 .../apache/hadoop/hbase/util/MurmurHash3.java   |   5 +
 .../apache/hadoop/hbase/util/PrettyPrinter.java |   3 +-
 .../hbase/util/ReadOnlyByteRangeException.java  |   5 +
 .../hadoop/hbase/util/SimpleByteRange.java      |  11 +-
 .../org/apache/hadoop/hbase/ClassFinder.java    |  37 +++
 hbase-protocol/pom.xml                          |   4 +
 .../apache/hadoop/hbase/util/ByteStringer.java  |   2 +
 .../hadoop/hbase/io/hfile/ChecksumUtil.java     |   2 +
 .../regionserver/wal/HLogUtilsForTests.java     |   2 +
 73 files changed, 568 insertions(+), 70 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/ExcludePrivateAnnotationsStandardDoclet.java
----------------------------------------------------------------------
diff --git a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/ExcludePrivateAnnotationsStandardDoclet.java b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/ExcludePrivateAnnotationsStandardDoclet.java
index 6005871..eb03a21 100644
--- a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/ExcludePrivateAnnotationsStandardDoclet.java
+++ b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/ExcludePrivateAnnotationsStandardDoclet.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase.classification.tools;
 
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import com.sun.javadoc.DocErrorReporter;
 import com.sun.javadoc.LanguageVersion;
 import com.sun.javadoc.RootDoc;
@@ -29,6 +30,7 @@ import com.sun.tools.doclets.standard.Standard;
  * {@link org.apache.hadoop.hbase.classification.InterfaceAudience.LimitedPrivate}.
  * It delegates to the Standard Doclet, and takes the same options.
  */
+@InterfaceAudience.Private
 public class ExcludePrivateAnnotationsStandardDoclet {
 
   public static LanguageVersion languageVersion() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/IncludePublicAnnotationsStandardDoclet.java
----------------------------------------------------------------------
diff --git a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/IncludePublicAnnotationsStandardDoclet.java b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/IncludePublicAnnotationsStandardDoclet.java
index c283c91..def4f1a 100644
--- a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/IncludePublicAnnotationsStandardDoclet.java
+++ b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/IncludePublicAnnotationsStandardDoclet.java
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hbase.classification.tools;
 
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
 import com.sun.javadoc.DocErrorReporter;
 import com.sun.javadoc.LanguageVersion;
 import com.sun.javadoc.RootDoc;
@@ -33,6 +35,7 @@ import com.sun.tools.doclets.standard.Standard;
  * are also excluded.
  * It delegates to the Standard Doclet, and takes the same options.
  */
+@InterfaceAudience.Private
 public class IncludePublicAnnotationsStandardDoclet {
 
   public static LanguageVersion languageVersion() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/KeepDeletedCells.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/KeepDeletedCells.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/KeepDeletedCells.java
index 6cd52e8..d2d92b3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/KeepDeletedCells.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/KeepDeletedCells.java
@@ -18,6 +18,9 @@
  */
 package org.apache.hadoop.hbase;
 
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+
 /**
  * Ways to keep cells marked for delete around.
  */
@@ -25,6 +28,8 @@ package org.apache.hadoop.hbase;
  * Don't change the TRUE/FALSE labels below, these have to be called
  * this way for backwards compatibility.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public enum KeepDeletedCells {
   /** Deleted Cells are not retained. */
   FALSE,

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
index c182255..1450fe7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
@@ -39,6 +39,7 @@ import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HConstants;
@@ -90,6 +91,7 @@ import com.google.common.annotations.VisibleForTesting;
  * gets as well.
  * </p>
  */
+@InterfaceAudience.Private
 class AsyncProcess {
   protected static final Log LOG = LogFactory.getLog(AsyncProcess.class);
   protected static final AtomicLong COUNTER = new AtomicLong();

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DelegatingRetryingCallable.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DelegatingRetryingCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DelegatingRetryingCallable.java
index 44f1eca..984a867 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DelegatingRetryingCallable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DelegatingRetryingCallable.java
@@ -19,6 +19,9 @@ package org.apache.hadoop.hbase.client;
 
 import java.io.IOException;
 
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
+@InterfaceAudience.Private
 public class DelegatingRetryingCallable<T, D extends RetryingCallable<T>> implements
     RetryingCallable<T> {
   protected final D delegate;

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FailureInfo.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FailureInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FailureInfo.java
index 9d685b8..16707cb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FailureInfo.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FailureInfo.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.client;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicLong;
 
-import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 
 /**
  * Keeps track of repeated failures to any region server. Multiple threads manipulate the contents

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
index 34f90d5..911e034 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
@@ -21,6 +21,9 @@ package org.apache.hadoop.hbase.client;
 import java.io.IOException;
 import java.util.List;
 
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+
 /**
  * Used to communicate with a single HBase table.
  * Obtain an instance from an {@link HConnection}.
@@ -29,6 +32,8 @@ import java.util.List;
  * @deprecated use {@link org.apache.hadoop.hbase.client.Table} instead
  */
 @Deprecated
+@InterfaceAudience.Private
+@InterfaceStability.Stable
 public interface HTableInterface extends Table {
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
index 8d0fbc8..7d61a0b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
@@ -57,20 +57,20 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
  * Each put will be sharded into different buffer queues based on its destination region server.
  * So each region server buffer queue will only have the puts which share the same destination.
  * And each queue will have a flush worker thread to flush the puts request to the region server.
- * If any queue is full, the HTableMultiplexer starts to drop the Put requests for that 
+ * If any queue is full, the HTableMultiplexer starts to drop the Put requests for that
  * particular queue.
- * 
+ *
  * Also all the puts will be retried as a configuration number before dropping.
  * And the HTableMultiplexer can report the number of buffered requests and the number of the
  * failed (dropped) requests in total or on per region server basis.
- * 
+ *
  * This class is thread safe.
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class HTableMultiplexer {
   private static final Log LOG = LogFactory.getLog(HTableMultiplexer.class.getName());
-  
+
   public static final String TABLE_MULTIPLEXER_FLUSH_PERIOD_MS =
       "hbase.tablemultiplexer.flush.period.ms";
   public static final String TABLE_MULTIPLEXER_INIT_THREADS = "hbase.tablemultiplexer.init.threads";
@@ -89,7 +89,7 @@ public class HTableMultiplexer {
   private final int maxKeyValueSize;
   private final ScheduledExecutorService executor;
   private final long flushPeriod;
-  
+
   /**
    * @param conf The HBaseConfiguration
    * @param perRegionServerBufferQueueSize determines the max number of the buffered Put ops for
@@ -128,7 +128,7 @@ public class HTableMultiplexer {
   }
 
   /**
-   * The puts request will be buffered by their corresponding buffer queue. 
+   * The puts request will be buffered by their corresponding buffer queue.
    * Return the list of puts which could not be queued.
    * @param tableName
    * @param puts
@@ -138,13 +138,13 @@ public class HTableMultiplexer {
   public List<Put> put(TableName tableName, final List<Put> puts) {
     if (puts == null)
       return null;
-    
+
     List <Put> failedPuts = null;
     boolean result;
     for (Put put : puts) {
       result = put(tableName, put, this.retryNum);
       if (result == false) {
-        
+
         // Create the failed puts list if necessary
         if (failedPuts == null) {
           failedPuts = new ArrayList<Put>();
@@ -163,7 +163,7 @@ public class HTableMultiplexer {
   public List<Put> put(byte[] tableName, final List<Put> puts) {
     return put(TableName.valueOf(tableName), puts);
   }
-  
+
   /**
    * The put request will be buffered by its corresponding buffer queue. And the put request will be
    * retried before dropping the request.
@@ -185,7 +185,7 @@ public class HTableMultiplexer {
 
         // Generate a MultiPutStatus object and offer it into the queue
         PutStatus s = new PutStatus(loc.getRegionInfo(), put, retry);
-        
+
         return queue.offer(s);
       }
     } catch (IOException e) {
@@ -209,7 +209,7 @@ public class HTableMultiplexer {
   public boolean put(final byte[] tableName, Put put) {
     return put(TableName.valueOf(tableName), put);
   }
-  
+
   /**
    * @return the current HTableMultiplexerStatus
    */
@@ -239,6 +239,8 @@ public class HTableMultiplexer {
    * report the number of buffered requests and the number of the failed (dropped) requests
    * in total or on per region server basis.
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public static class HTableMultiplexerStatus {
     private long totalFailedPutCounter;
     private long totalBufferedPutCounter;
@@ -339,7 +341,7 @@ public class HTableMultiplexer {
       return this.serverToAverageLatencyMap;
     }
   }
-  
+
   private static class PutStatus {
     public final HRegionInfo regionInfo;
     public final Put put;
@@ -406,7 +408,7 @@ public class HTableMultiplexer {
     private final ScheduledExecutorService executor;
     private final int maxRetryInQueue;
     private final AtomicInteger retryInQueue = new AtomicInteger(0);
-    
+
     public FlushWorker(Configuration conf, ClusterConnection conn, HRegionLocation addr,
         HTableMultiplexer htableMultiplexer, int perRegionServerBufferQueueSize,
         ExecutorService pool, ScheduledExecutorService executor) {
@@ -443,7 +445,7 @@ public class HTableMultiplexer {
     private boolean resubmitFailedPut(PutStatus ps, HRegionLocation oldLoc) throws IOException {
       // Decrease the retry count
       final int retryCount = ps.retryCount - 1;
-      
+
       if (retryCount <= 0) {
         // Update the failed counter and no retry any more.
         return false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java
index e266376..0b4509b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java
@@ -42,6 +42,7 @@ public class RetriesExhaustedException extends IOException {
   /**
    * Datastructure that allows adding more info around Throwable incident.
    */
+  @InterfaceAudience.Private
   public static class ThrowableWithExtraContext {
     private final Throwable t;
     private final long when;
@@ -53,7 +54,7 @@ public class RetriesExhaustedException extends IOException {
       this.when = when;
       this.extras = extras;
     }
- 
+
     @Override
     public String toString() {
       return new Date(this.when).toString() + ", " + extras + ", " + t.toString();
@@ -77,6 +78,7 @@ public class RetriesExhaustedException extends IOException {
    * @param numTries
    * @param exceptions List of exceptions that failed before giving up
    */
+  @InterfaceAudience.Private
   public RetriesExhaustedException(final int numTries,
                                    final List<ThrowableWithExtraContext> exceptions) {
     super(getMessage(numTries, exceptions),

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowTooBigException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowTooBigException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowTooBigException.java
index 3da3fd5..ce87dd2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowTooBigException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowTooBigException.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hbase.client;
 
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.RegionException;
 
 /**
@@ -27,6 +28,7 @@ import org.apache.hadoop.hbase.RegionException;
  * hbase.table.max.rowsize).
  */
 @InterfaceAudience.Public
+@InterfaceStability.Stable
 public class RowTooBigException extends RegionException {
 
   public RowTooBigException(String message) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java
index f594a8c..9f05997 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.util.ReflectionUtils;
@@ -24,6 +25,7 @@ import org.apache.hadoop.hbase.util.ReflectionUtils;
 /**
  * Factory to create an {@link RpcRetryingCaller}
  */
+@InterfaceAudience.Private
 public class RpcRetryingCallerFactory {
 
   /** Configuration key for a custom {@link RpcRetryingCaller} */

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
index 8f01250..8d937aa 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
@@ -31,12 +31,16 @@ import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.ipc.PayloadCarryingRpcController;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.RequestConverter;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+
+import com.google.protobuf.ServiceException;
+
 import org.htrace.Trace;
 
 import java.io.IOException;
@@ -58,6 +62,7 @@ import java.util.concurrent.TimeoutException;
  * the first answer. If the answer comes from one of the secondary replica, it will
  * be marked as stale.
  */
+@InterfaceAudience.Private
 public class RpcRetryingCallerWithReadReplicas {
   static final Log LOG = LogFactory.getLog(RpcRetryingCallerWithReadReplicas.class);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
index 8fc00a3..55343ac 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
@@ -47,6 +47,8 @@ public abstract class Batch {
    * {@link Batch.Call#call(Object)}
    * @param <R> the return type from {@link Batch.Call#call(Object)}
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Stable
   public interface Call<T,R> {
     R call(T instance) throws IOException;
   }
@@ -65,6 +67,8 @@ public abstract class Batch {
    * @param <R> the return type from the associated {@link Batch.Call#call(Object)}
    * @see org.apache.hadoop.hbase.client.HTable#coprocessorService(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call)
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Stable
   public interface Callback<R> {
     void update(byte[] region, byte[] row, R result);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ConnectionClosingException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ConnectionClosingException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ConnectionClosingException.java
index cb8e5df..49134f1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ConnectionClosingException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ConnectionClosingException.java
@@ -38,8 +38,8 @@ package org.apache.hadoop.hbase.exceptions;
 
 import java.io.IOException;
 
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
 
 /**
 * Thrown when the client believes that we are trying to communicate to has

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java
index 570eda2..5bfd2f3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java
@@ -17,9 +17,14 @@
  */
 package org.apache.hadoop.hbase.exceptions;
 
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+
 /**
  * Exception thrown if a mutation fails sanity checks.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class FailedSanityCheckException extends org.apache.hadoop.hbase.DoNotRetryIOException {
 
   private static final long serialVersionUID = 1788783640409186240L;

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/LockTimeoutException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/LockTimeoutException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/LockTimeoutException.java
index c30955b..b6b3c32 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/LockTimeoutException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/LockTimeoutException.java
@@ -19,8 +19,15 @@
  */
 package org.apache.hadoop.hbase.exceptions;
 
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 
+/**
+ * Thrown when there is a timeout when trying to acquire a lock
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class LockTimeoutException extends DoNotRetryIOException {
 
   private static final long serialVersionUID = -1770764924258999825L;

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PreemptiveFastFailException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PreemptiveFastFailException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PreemptiveFastFailException.java
index 2d66d54..51c960d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PreemptiveFastFailException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PreemptiveFastFailException.java
@@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.exceptions;
 
 import java.net.ConnectException;
 
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.ServerName;
 
 /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/UnknownProtocolException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/UnknownProtocolException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/UnknownProtocolException.java
index 90ec7cf..933e888 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/UnknownProtocolException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/UnknownProtocolException.java
@@ -19,10 +19,15 @@
 
 package org.apache.hadoop.hbase.exceptions;
 
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+
 /**
  * An error requesting an RPC protocol that the server is not serving.
  */
 @SuppressWarnings("serial")
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class UnknownProtocolException extends org.apache.hadoop.hbase.DoNotRetryIOException {
   private Class<?> protocol;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java
index 38854d4..91eef6a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java
@@ -19,6 +19,9 @@
 package org.apache.hadoop.hbase.filter;
 
 import com.google.protobuf.InvalidProtocolBufferException;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -26,7 +29,8 @@ import org.apache.hadoop.hbase.util.Bytes;
 /**
  * A long comparator which numerical compares against the specified byte array
  */
-
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class LongComparator extends ByteArrayComparable {
     private Long longValue;
 
@@ -44,6 +48,7 @@ public class LongComparator extends ByteArrayComparable {
     /**
      * @return The comparator serialized using pb
      */
+    @Override
     public byte [] toByteArray() {
         ComparatorProtos.LongComparator.Builder builder =
                 ComparatorProtos.LongComparator.newBuilder();

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
index 6e4f7d0..0bc20f3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
@@ -80,6 +80,8 @@ public class RegexStringComparator extends ByteArrayComparable {
   private Engine engine;
 
   /** Engine implementation type (default=JAVA) */
+  @InterfaceAudience.Public
+  @InterfaceStability.Stable
   public enum EngineType {
     JAVA,
     JONI
@@ -154,6 +156,7 @@ public class RegexStringComparator extends ByteArrayComparable {
   /**
    * @return The comparator serialized using pb
    */
+  @Override
   public byte [] toByteArray() {
     return engine.toByteArray();
   }
@@ -176,7 +179,7 @@ public class RegexStringComparator extends ByteArrayComparable {
     if (proto.hasEngine()) {
       EngineType engine = EngineType.valueOf(proto.getEngine());
       comparator = new RegexStringComparator(proto.getPattern(), proto.getPatternFlags(),
-        engine);      
+        engine);
     } else {
       comparator = new RegexStringComparator(proto.getPattern(), proto.getPatternFlags());
     }
@@ -196,6 +199,7 @@ public class RegexStringComparator extends ByteArrayComparable {
    * @return true if and only if the fields of the comparator that are serialized
    * are equal to the corresponding fields in other.  Used for testing.
    */
+  @Override
   boolean areSerializedFieldsEqual(ByteArrayComparable other) {
     if (other == this) return true;
     if (!(other instanceof RegexStringComparator)) return false;
@@ -213,7 +217,7 @@ public class RegexStringComparator extends ByteArrayComparable {
 
   /**
    * This is an internal interface for abstracting access to different regular
-   * expression matching engines. 
+   * expression matching engines.
    */
   static interface Engine {
     /**
@@ -221,7 +225,7 @@ public class RegexStringComparator extends ByteArrayComparable {
      * for matching
      */
     String getPattern();
-    
+
     /**
      * Returns the set of configured match flags, a bit mask that may include
      * {@link Pattern} flags
@@ -413,7 +417,7 @@ public class RegexStringComparator extends ByteArrayComparable {
         encoding = e.getEncoding();
       } else {
         throw new IllegalCharsetNameException(name);
-      }    
+      }
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/DelegatingPayloadCarryingRpcController.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/DelegatingPayloadCarryingRpcController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/DelegatingPayloadCarryingRpcController.java
index a91ecb5..ad4224b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/DelegatingPayloadCarryingRpcController.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/DelegatingPayloadCarryingRpcController.java
@@ -19,11 +19,13 @@ package org.apache.hadoop.hbase.ipc;
 
 import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 
 /**
  * Simple delegating controller for use with the {@link RpcControllerFactory} to help override
  * standard behavior of a {@link PayloadCarryingRpcController}.
  */
+@InterfaceAudience.Private
 public class DelegatingPayloadCarryingRpcController extends PayloadCarryingRpcController {
   private PayloadCarryingRpcController delegate;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
index 6d00adc..67e2524 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
@@ -27,6 +27,7 @@ import java.nio.ByteBuffer;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CellScanner;
@@ -49,6 +50,7 @@ import com.google.protobuf.Message;
 /**
  * Utility to help ipc'ing.
  */
+@InterfaceAudience.Private
 class IPCUtil {
   public static final Log LOG = LogFactory.getLog(IPCUtil.class);
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java
index cf22dd5..76d7f9f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java
@@ -15,7 +15,7 @@ import java.io.IOException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.client.ClusterConnection;

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcControllerFactory.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcControllerFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcControllerFactory.java
index 8f1780c..f8ab23f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcControllerFactory.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcControllerFactory.java
@@ -22,11 +22,13 @@ import java.util.List;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CellScannable;
 import org.apache.hadoop.hbase.CellScanner;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.util.ReflectionUtils;
 
 /**
  * Factory to create a {@link PayloadCarryingRpcController}
  */
+@InterfaceAudience.Private
 public class RpcControllerFactory {
 
   public static final String CUSTOM_CONTROLLER_CONF_KEY = "hbase.rpc.controllerfactory.class";
@@ -39,7 +41,7 @@ public class RpcControllerFactory {
   public PayloadCarryingRpcController newController() {
     return new PayloadCarryingRpcController();
   }
-  
+
   public PayloadCarryingRpcController newController(final CellScanner cellScanner) {
     return new PayloadCarryingRpcController(cellScanner);
   }
@@ -47,7 +49,7 @@ public class RpcControllerFactory {
   public PayloadCarryingRpcController newController(final List<CellScannable> cellIterables) {
     return new PayloadCarryingRpcController(cellIterables);
   }
-  
+
 
   public static RpcControllerFactory instantiate(Configuration configuration) {
     String rpcControllerFactoryClazz =

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java
index cbf63fc..09da0d9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java
@@ -18,6 +18,11 @@
 
 package org.apache.hadoop.hbase.ipc;
 
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.util.StringUtils;
+
 import com.google.protobuf.Descriptors;
 import com.google.protobuf.Message;
 import com.google.protobuf.RpcCallback;
@@ -50,6 +55,7 @@ import java.io.IOException;
  * </code>
  * </p>
  */
+@InterfaceAudience.Private
 public class ServerRpcController implements RpcController {
   /**
    * The exception thrown within

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java
index ec98a5f..d438fa3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java
@@ -18,13 +18,14 @@
 
 package org.apache.hadoop.hbase.ipc;
 
+import java.util.concurrent.atomic.AtomicReference;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 
 import com.google.protobuf.RpcCallback;
 import com.google.protobuf.RpcController;
 
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicReference;
-
+@InterfaceAudience.Private
 public class TimeLimitedRpcController implements RpcController {
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index c832cdf..b8a391b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -37,6 +37,7 @@ import java.util.Map.Entry;
 import java.util.NavigableSet;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
@@ -155,6 +156,7 @@ import com.google.protobuf.TextFormat;
  */
 @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="DP_CREATE_CLASSLOADER_INSIDE_DO_PRIVILEGED",
   justification="None. Address sometime.")
+@InterfaceAudience.Private // TODO: some clients (Hive, etc) use this class
 public final class ProtobufUtil {
 
   private ProtobufUtil() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationException.java
index 66781f1..937e943 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationException.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hbase.replication;
 
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.exceptions.HBaseException;
 
 /**
@@ -27,6 +28,7 @@ import org.apache.hadoop.hbase.exceptions.HBaseException;
  * store, loss of connection to a peer cluster or errors during deserialization of replication data.
  */
 @InterfaceAudience.Public
+@InterfaceStability.Stable
 public class ReplicationException extends HBaseException {
 
   private static final long serialVersionUID = -8885598603988198062L;

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java
index 35fa602..f115a39 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java
@@ -18,6 +18,7 @@
  */
 package org.apache.hadoop.hbase.replication;
 
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Abortable;
 import org.apache.hadoop.hbase.Stoppable;
@@ -26,6 +27,7 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
 /**
  * A factory class for instantiating replication objects that deal with replication state.
  */
+@InterfaceAudience.Private
 public class ReplicationFactory {
 
   public static ReplicationQueues getReplicationQueues(final ZooKeeperWatcher zk,

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
index cc49a64..de6f79e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
@@ -29,6 +29,7 @@ import java.util.concurrent.ConcurrentMap;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Abortable;
 import org.apache.hadoop.hbase.CompoundConfiguration;
@@ -75,6 +76,7 @@ import com.google.protobuf.InvalidProtocolBufferException;
  *
  * /hbase/replication/peers/1/tableCFs [Value: "table1; table2:cf1,cf3; table3:cfx,cfy"]
  */
+@InterfaceAudience.Private
 public class ReplicationPeersZKImpl extends ReplicationStateZKBase implements ReplicationPeers {
 
   // Map of peer clusters keyed by their id
@@ -110,16 +112,16 @@ public class ReplicationPeersZKImpl extends ReplicationStateZKBase implements Re
         throw new IllegalArgumentException("Cannot add a peer with id=" + id
             + " because that id already exists.");
       }
-      
+
       if(id.contains("-")){
         throw new IllegalArgumentException("Found invalid peer name:" + id);
       }
-      
+
       ZKUtil.createWithParents(this.zookeeper, this.peersZNode);
       List<ZKUtilOp> listOfOps = new ArrayList<ZKUtil.ZKUtilOp>();
       ZKUtilOp op1 = ZKUtilOp.createAndFailSilent(ZKUtil.joinZNode(this.peersZNode, id),
         toByteArray(peerConfig));
-      // There is a race (if hbase.zookeeper.useMulti is false) 
+      // There is a race (if hbase.zookeeper.useMulti is false)
       // b/w PeerWatcher and ReplicationZookeeper#add method to create the
       // peer-state znode. This happens while adding a peer
       // The peer state data is set as "ENABLED" by default.

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java
index 0664923..fa0c654 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueueInfo.java
@@ -21,6 +21,8 @@ package org.apache.hadoop.hbase.replication;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.ServerName;
 
 import java.util.ArrayList;
@@ -32,6 +34,7 @@ import java.util.List;
  * It will extract the peerId if it's recovered as well as the dead region servers
  * that were part of the queue's history.
  */
+@InterfaceAudience.Private
 public class ReplicationQueueInfo {
   private static final Log LOG = LogFactory.getLog(ReplicationQueueInfo.class);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClient.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClient.java
index 689afba..5c068be 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClient.java
@@ -20,11 +20,14 @@ package org.apache.hadoop.hbase.replication;
 
 import java.util.List;
 
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
 
 /**
  * This provides an interface for clients of replication to view replication queues. These queues
  * keep track of the HLogs that still need to be replicated to remote clusters.
  */
+@InterfaceAudience.Private
 public interface ReplicationQueuesClient {
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
index a7d36c1..3bc4f48 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java
@@ -20,12 +20,14 @@ package org.apache.hadoop.hbase.replication;
 
 import java.util.List;
 
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Abortable;
 import org.apache.hadoop.hbase.zookeeper.ZKUtil;
 import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
 import org.apache.zookeeper.KeeperException;
 
+@InterfaceAudience.Private
 public class ReplicationQueuesClientZKImpl extends ReplicationStateZKBase implements
     ReplicationQueuesClient {
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
index 0ad94b3..9abb94b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
@@ -27,6 +27,7 @@ import java.util.TreeSet;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Abortable;
 import org.apache.hadoop.hbase.HConstants;
@@ -60,6 +61,7 @@ import org.apache.zookeeper.KeeperException;
  *
  * /hbase/replication/rs/hostname.example.org,6020,1234/1/23522342.23422 [VALUE: 254]
  */
+@InterfaceAudience.Private
 public class ReplicationQueuesZKImpl extends ReplicationStateZKBase implements ReplicationQueues {
 
   /** Znode containing all replication queues for this region server. */
@@ -69,7 +71,7 @@ public class ReplicationQueuesZKImpl extends ReplicationStateZKBase implements R
 
   private static final Log LOG = LogFactory.getLog(ReplicationQueuesZKImpl.class);
 
-  public ReplicationQueuesZKImpl(final ZooKeeperWatcher zk, Configuration conf, 
+  public ReplicationQueuesZKImpl(final ZooKeeperWatcher zk, Configuration conf,
       Abortable abortable) {
     super(zk, conf, abortable);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java
index 2302438..1691b3f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.replication;
 
 import java.util.List;
 
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Abortable;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
@@ -32,6 +33,7 @@ import org.apache.zookeeper.KeeperException;
 /**
  * This is a base class for maintaining replication state in zookeeper.
  */
+@InterfaceAudience.Private
 public abstract class ReplicationStateZKBase {
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
index 1a19cdd..f9f2d43 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
@@ -24,6 +24,7 @@ import java.util.concurrent.CopyOnWriteArrayList;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Abortable;
 import org.apache.hadoop.hbase.Stoppable;
@@ -37,6 +38,7 @@ import org.apache.zookeeper.KeeperException;
  * responsible for handling replication events that are defined in the ReplicationListener
  * interface.
  */
+@InterfaceAudience.Private
 public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements ReplicationTracker {
 
   private static final Log LOG = LogFactory.getLog(ReplicationTrackerZKImpl.class);

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java
index 74a9f35..8f6e8e1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java
@@ -61,7 +61,7 @@ public class HBaseSaslRpcClient {
   private final boolean fallbackAllowed;
   /**
    * Create a HBaseSaslRpcClient for an authentication method
-   * 
+   *
    * @param method
    *          the requested authentication method
    * @param token
@@ -75,11 +75,11 @@ public class HBaseSaslRpcClient {
   public HBaseSaslRpcClient(AuthMethod method,
       Token<? extends TokenIdentifier> token, String serverPrincipal, boolean fallbackAllowed)
       throws IOException {
-    this(method, token, serverPrincipal, fallbackAllowed, "authentication"); 
+    this(method, token, serverPrincipal, fallbackAllowed, "authentication");
   }
   /**
    * Create a HBaseSaslRpcClient for an authentication method
-   * 
+   *
    * @param method
    *          the requested authentication method
    * @param token
@@ -134,8 +134,8 @@ public class HBaseSaslRpcClient {
       throw new IOException("Unable to find SASL client implementation");
   }
 
-  protected SaslClient createDigestSaslClient(String[] mechanismNames, 
-      String saslDefaultRealm, CallbackHandler saslClientCallbackHandler) 
+  protected SaslClient createDigestSaslClient(String[] mechanismNames,
+      String saslDefaultRealm, CallbackHandler saslClientCallbackHandler)
       throws IOException {
     return Sasl.createSaslClient(mechanismNames, null, null, saslDefaultRealm,
         SaslUtil.SASL_PROPS, saslClientCallbackHandler);
@@ -143,7 +143,7 @@ public class HBaseSaslRpcClient {
 
   protected SaslClient createKerberosSaslClient(String[] mechanismNames,
       String userFirstPart, String userSecondPart) throws IOException {
-    return Sasl.createSaslClient(mechanismNames, null, userFirstPart, 
+    return Sasl.createSaslClient(mechanismNames, null, userFirstPart,
         userSecondPart, SaslUtil.SASL_PROPS, null);
   }
 
@@ -154,16 +154,16 @@ public class HBaseSaslRpcClient {
           WritableUtils.readString(inStream));
     }
   }
-  
+
   /**
    * Do client side SASL authentication with server via the given InputStream
    * and OutputStream
-   * 
+   *
    * @param inS
    *          InputStream to use
    * @param outS
    *          OutputStream to use
-   * @return true if connection is set up, or false if needs to switch 
+   * @return true if connection is set up, or false if needs to switch
    *             to simple Auth.
    * @throws IOException
    */
@@ -243,7 +243,7 @@ public class HBaseSaslRpcClient {
   /**
    * Get a SASL wrapped InputStream. Can be called only after saslConnect() has
    * been called.
-   * 
+   *
    * @param in
    *          the InputStream to wrap
    * @return a SASL wrapped InputStream
@@ -259,7 +259,7 @@ public class HBaseSaslRpcClient {
   /**
    * Get a SASL wrapped OutputStream. Can be called only after saslConnect() has
    * been called.
-   * 
+   *
    * @param out
    *          the OutputStream to wrap
    * @return a SASL wrapped OutputStream
@@ -287,6 +287,7 @@ public class HBaseSaslRpcClient {
       this.userPassword = SaslUtil.encodePassword(token.getPassword());
     }
 
+    @Override
     public void handle(Callback[] callbacks)
         throws UnsupportedCallbackException {
       NameCallback nc = null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
index b190084..f4538a6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
@@ -42,6 +42,9 @@ import java.util.Map;
 @InterfaceStability.Evolving
 public class Permission extends VersionedWritable {
   protected static final byte VERSION = 0;
+
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public enum Action {
     READ('R'), WRITE('W'), EXEC('X'), CREATE('C'), ADMIN('A');
 
@@ -147,6 +150,7 @@ public class Permission extends VersionedWritable {
     return result;
   }
 
+  @Override
   public String toString() {
     StringBuilder str = new StringBuilder("[Permission: ")
         .append("actions=");
@@ -166,6 +170,7 @@ public class Permission extends VersionedWritable {
   }
 
   /** @return the object version number */
+  @Override
   public byte getVersion() {
     return VERSION;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java
index a252b9e..1451c1a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java
@@ -354,6 +354,7 @@ public class TablePermission extends Permission {
     return result;
   }
 
+  @Override
   public String toString() {
     StringBuilder str = new StringBuilder("[TablePermission: ");
     if(namespace != null) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java
index 5232377..f4e87f5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java
@@ -183,6 +183,7 @@ public class UserPermission extends TablePermission {
     return result;
   }
 
+  @Override
   public String toString() {
     StringBuilder str = new StringBuilder("UserPermission: ")
         .append("user=").append(Bytes.toString(user))

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
index 868bd81..0fb6969 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
@@ -41,7 +41,7 @@ public class AuthenticationTokenIdentifier extends TokenIdentifier {
   protected long issueDate;
   protected long expirationDate;
   protected long sequenceNumber;
-  
+
   public AuthenticationTokenIdentifier() {
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityControllerNotReadyException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityControllerNotReadyException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityControllerNotReadyException.java
index 4d87bdf..90dd0a7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityControllerNotReadyException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityControllerNotReadyException.java
@@ -20,11 +20,13 @@ package org.apache.hadoop.hbase.security.visibility;
 import java.io.IOException;
 
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
 
 /*
  * This exception indicates that VisibilityController hasn't finished initialization.
  */
 @InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class VisibilityControllerNotReadyException extends IOException {
 
   private static final long serialVersionUID = 1725986525207989173L;

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotException.java
index f6817e7..05f3556 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotException.java
@@ -18,11 +18,13 @@
 package org.apache.hadoop.hbase.snapshot;
 
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
 
 /**
  * Thrown when a snapshot could not be exported due to an error during the operation.
  */
 @InterfaceAudience.Public
+@InterfaceStability.Stable
 @SuppressWarnings("serial")
 public class ExportSnapshotException extends HBaseSnapshotException {
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
new file mode 100644
index 0000000..4cb715e
--- /dev/null
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
@@ -0,0 +1,273 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase;
+
+import java.io.IOException;
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Modifier;
+import java.util.Set;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.ClassFinder.And;
+import org.apache.hadoop.hbase.ClassFinder.FileNameFilter;
+import org.apache.hadoop.hbase.ClassFinder.Not;
+import org.apache.hadoop.hbase.ClassTestFinder.TestClassFilter;
+import org.apache.hadoop.hbase.ClassTestFinder.TestFileNameFilter;
+import org.junit.Assert;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Test cases for ensuring our client visible classes have annotations
+ * for {@link InterfaceAudience}.
+ *
+ * All classes in hbase-client and hbase-common module MUST have InterfaceAudience
+ * annotations. All InterfaceAudience.Public annotated classes MUST also have InterfaceStability
+ * annotations. Think twice about marking an interface InterfaceAudience.Public. Make sure that
+ * it is an interface, not a class (for most cases), and clients will actually depend on it. Once
+ * something is marked with Public, we cannot change the signatures within the major release. NOT
+ * everything in the hbase-client module or every java public class has to be marked with
+ * InterfaceAudience.Public. ONLY the ones that an hbase application will directly use (Table, Get,
+ * etc, versus ProtobufUtil).
+ *
+ * Also note that HBase has it's own annotations in hbase-annotations module with the same names
+ * as in Hadoop. You should use the HBase's classes.
+ *
+ * See https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/InterfaceClassification.html
+ * and https://issues.apache.org/jira/browse/HBASE-10462.
+ */
+@Category(SmallTests.class)
+public class TestInterfaceAudienceAnnotations {
+
+  private static final Log LOG = LogFactory.getLog(TestInterfaceAudienceAnnotations.class);
+
+  /** Selects classes with generated in their package name */
+  class GeneratedClassFilter implements ClassFinder.ClassFilter {
+    @Override
+    public boolean isCandidateClass(Class<?> c) {
+      return c.getPackage().getName().contains("generated");
+    }
+  }
+
+  /** Selects classes with one of the {@link InterfaceAudience} annotation in their class
+   * declaration.
+   */
+  class InterfaceAudienceAnnotatedClassFilter implements ClassFinder.ClassFilter {
+    @Override
+    public boolean isCandidateClass(Class<?> c) {
+      if (getAnnotation(c) != null) {
+        // class itself has a declared annotation.
+        return true;
+      }
+
+      // If this is an internal class, look for the encapsulating class to see whether it has
+      // annotation. All inner classes of private classes are considered annotated.
+      return isAnnotatedPrivate(c.getEnclosingClass());
+    }
+
+    private boolean isAnnotatedPrivate(Class<?> c) {
+      if (c == null) {
+        return false;
+      }
+
+      Class<?> ann = getAnnotation(c);
+      if (ann != null &&
+        !InterfaceAudience.Public.class.equals(ann)) {
+        return true;
+      }
+
+      return isAnnotatedPrivate(c.getEnclosingClass());
+    }
+
+    protected Class<?> getAnnotation(Class<?> c) {
+      // we should get only declared annotations, not inherited ones
+      Annotation[] anns = c.getDeclaredAnnotations();
+
+      for (Annotation ann : anns) {
+        // Hadoop clearly got it wrong for not making the annotation values (private, public, ..)
+        // an enum instead we have three independent annotations!
+        Class<?> type = ann.annotationType();
+        if (isInterfaceAudienceClass(type)) {
+          return type;
+        }
+      }
+      return null;
+    }
+  }
+
+  /** Selects classes with one of the {@link InterfaceStability} annotation in their class
+   * declaration.
+   */
+  class InterfaceStabilityAnnotatedClassFilter implements ClassFinder.ClassFilter {
+    @Override
+    public boolean isCandidateClass(Class<?> c) {
+      if (getAnnotation(c) != null) {
+        // class itself has a declared annotation.
+        return true;
+      }
+      return false;
+    }
+
+    protected Class<?> getAnnotation(Class<?> c) {
+      // we should get only declared annotations, not inherited ones
+      Annotation[] anns = c.getDeclaredAnnotations();
+
+      for (Annotation ann : anns) {
+        // Hadoop clearly got it wrong for not making the annotation values (private, public, ..)
+        // an enum instead we have three independent annotations!
+        Class<?> type = ann.annotationType();
+        if (isInterfaceStabilityClass(type)) {
+          return type;
+        }
+      }
+      return null;
+    }
+  }
+
+  /** Selects classes with one of the {@link InterfaceAudience.Public} annotation in their
+   * class declaration.
+   */
+  class InterfaceAudiencePublicAnnotatedClassFilter extends InterfaceAudienceAnnotatedClassFilter {
+    @Override
+    public boolean isCandidateClass(Class<?> c) {
+      return (InterfaceAudience.Public.class.equals(getAnnotation(c)));
+    }
+  }
+
+  /**
+   * Selects InterfaceAudience or InterfaceStability classes. Don't go meta!!!
+   */
+  class IsInterfaceStabilityClassFilter implements ClassFinder.ClassFilter {
+    @Override
+    public boolean isCandidateClass(Class<?> c) {
+      return
+          isInterfaceAudienceClass(c) ||
+          isInterfaceStabilityClass(c);
+    }
+  }
+
+  private boolean isInterfaceAudienceClass(Class<?> c) {
+    return
+        c.equals(InterfaceAudience.Public.class) ||
+        c.equals(InterfaceAudience.Private.class) ||
+        c.equals(InterfaceAudience.LimitedPrivate.class);
+  }
+
+  private boolean isInterfaceStabilityClass(Class<?> c) {
+    return
+        c.equals(InterfaceStability.Stable.class) ||
+        c.equals(InterfaceStability.Unstable.class) ||
+        c.equals(InterfaceStability.Evolving.class);
+  }
+
+  /** Selects classes that are declared public */
+  class PublicClassFilter implements ClassFinder.ClassFilter {
+    @Override
+    public boolean isCandidateClass(Class<?> c) {
+      int mod = c.getModifiers();
+      return Modifier.isPublic(mod);
+    }
+  }
+
+  /** Selects paths (jars and class dirs) only from the main code, not test classes */
+  class MainCodeResourcePathFilter implements ClassFinder.ResourcePathFilter {
+    @Override
+    public boolean isCandidatePath(String resourcePath, boolean isJar) {
+      return !resourcePath.contains("test-classes") &&
+          !resourcePath.contains("tests.jar");
+    }
+  }
+
+  /**
+   * Checks whether all the classes in client and common modules contain
+   * {@link InterfaceAudience} annotations.
+   */
+  @Test
+  public void testInterfaceAudienceAnnotation()
+      throws ClassNotFoundException, IOException, LinkageError {
+
+    // find classes that are:
+    // In the main jar
+    // AND are public
+    // NOT test classes
+    // AND NOT generated classes
+    // AND are NOT annotated with InterfaceAudience
+    ClassFinder classFinder = new ClassFinder(
+      new MainCodeResourcePathFilter(),
+      new Not((FileNameFilter)new TestFileNameFilter()),
+      new And(new PublicClassFilter(),
+              new Not(new TestClassFilter()),
+              new Not(new GeneratedClassFilter()),
+              new Not(new IsInterfaceStabilityClassFilter()),
+              new Not(new InterfaceAudienceAnnotatedClassFilter()))
+    );
+
+    Set<Class<?>> classes = classFinder.findClasses(false);
+
+    LOG.info("These are the classes that DO NOT have @InterfaceAudience annotation:");
+    for (Class<?> clazz : classes) {
+      LOG.info(clazz);
+    }
+
+    Assert.assertEquals("All classes should have @InterfaceAudience annotation",
+      0, classes.size());
+  }
+
+  /**
+   * Checks whether all the classes in client and common modules that are marked
+   * InterfaceAudience.Public also have {@link InterfaceStability} annotations.
+   */
+  @Test
+  public void testInterfaceStabilityAnnotation()
+      throws ClassNotFoundException, IOException, LinkageError {
+
+    // find classes that are:
+    // In the main jar
+    // AND are public
+    // NOT test classes
+    // AND NOT generated classes
+    // AND are annotated with InterfaceAudience.Public
+    // AND NOT annotated with InterfaceStability
+    ClassFinder classFinder = new ClassFinder(
+      new MainCodeResourcePathFilter(),
+      new Not((FileNameFilter)new TestFileNameFilter()),
+      new And(new PublicClassFilter(),
+              new Not(new TestClassFilter()),
+              new Not(new GeneratedClassFilter()),
+              new InterfaceAudiencePublicAnnotatedClassFilter(),
+              new Not(new IsInterfaceStabilityClassFilter()),
+              new Not(new InterfaceStabilityAnnotatedClassFilter()))
+    );
+
+    Set<Class<?>> classes = classFinder.findClasses(false);
+
+    LOG.info("These are the classes that DO NOT have @InterfaceStability annotation:");
+    for (Class<?> clazz : classes) {
+      LOG.info(clazz);
+    }
+
+    Assert.assertEquals("All classes that are marked with @InterfaceAudience.Public should "
+        + "have @InterfaceStability annotation as well",
+      0, classes.size());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-common/src/main/java/org/apache/hadoop/hbase/BaseConfigurable.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/BaseConfigurable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/BaseConfigurable.java
index 8ad8584..86b4c32 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/BaseConfigurable.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/BaseConfigurable.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase;
 
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 
@@ -26,6 +27,7 @@ import org.apache.hadoop.conf.Configuration;
  * only sets the configuration through the {@link #setConf(Configuration)}
  * method
  */
+@InterfaceAudience.Private
 public class BaseConfigurable implements Configurable {
 
   private Configuration conf;

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index a139b50..f987b01 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -84,6 +84,7 @@ public final class HConstants {
   /**
    * Status codes used for return values of bulk operations.
    */
+  @InterfaceAudience.Private
   public enum OperationStatusCode {
     NOT_RUN,
     SUCCESS,
@@ -562,6 +563,7 @@ public final class HConstants {
   public static final String REGION_IMPL = "hbase.hregion.impl";
 
   /** modifyTable op for replacing the table descriptor */
+  @InterfaceAudience.Private
   public static enum Modify {
     CLOSE_REGION,
     TABLE_COMPACT,

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-common/src/main/java/org/apache/hadoop/hbase/MetaMutationAnnotation.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/MetaMutationAnnotation.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/MetaMutationAnnotation.java
index 1033f40..5e6f6f7 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/MetaMutationAnnotation.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/MetaMutationAnnotation.java
@@ -25,6 +25,8 @@ import java.lang.annotation.Retention;
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
 /**
  * The field or the parameter to which this annotation can be applied only when it
  * holds mutations for hbase:meta table.
@@ -32,6 +34,7 @@ import java.lang.annotation.Target;
 @Documented
 @Target( { ElementType.LOCAL_VARIABLE, ElementType.PARAMETER })
 @Retention(RetentionPolicy.CLASS)
+@InterfaceAudience.Private
 public @interface MetaMutationAnnotation {
 
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java
index 31a1313..4f0e296 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java
@@ -161,6 +161,8 @@ public class NamespaceDescriptor {
     return new Builder(ns);
   }
 
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public static class Builder {
     private String bName;
     private Map<String, String> bConfiguration = new TreeMap<String, String>();
@@ -173,7 +175,7 @@ public class NamespaceDescriptor {
     private Builder(String name) {
       this.bName = name;
     }
-    
+
     public Builder addConfiguration(Map<String, String> configuration) {
       this.bConfiguration.putAll(configuration);
       return this;
@@ -193,7 +195,7 @@ public class NamespaceDescriptor {
       if (this.bName == null){
          throw new IllegalArgumentException("A name has to be specified in a namespace.");
       }
-      
+
       NamespaceDescriptor desc = new NamespaceDescriptor(this.bName);
       desc.configuration = this.bConfiguration;
       return desc;

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
index b70e94a..d74a5d6 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
@@ -228,6 +228,8 @@ implements WritableComparable<ImmutableBytesWritable> {
 
   /** A Comparator optimized for ImmutableBytesWritable.
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Stable
   public static class Comparator extends WritableComparator {
     private BytesWritable.Comparator comparator =
       new BytesWritable.Comparator();

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java
index 8a21f9e..1497fcb 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java
@@ -25,12 +25,14 @@ import java.io.InputStream;
 
 import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 
 /**
  * Copied from guava source code v15 (LimitedInputStream)
  * Guava deprecated LimitInputStream in v14 and removed it in v15. Copying this class here
  * allows to be compatible with guava 11 to 15+.
  */
+@InterfaceAudience.Private
 public final class LimitInputStream extends FilterInputStream {
   private long left;
   private long mark = -1;

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
index 85d6eb5..9c20f3b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
@@ -56,6 +56,8 @@ public final class Encryption {
   /**
    * Crypto context
    */
+  @InterfaceAudience.Public
+  @InterfaceStability.Evolving
   public static class Context extends org.apache.hadoop.hbase.io.crypto.Context {
 
     /** The null crypto context */

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hadoopbackport/ThrottledInputStream.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hadoopbackport/ThrottledInputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hadoopbackport/ThrottledInputStream.java
index 369d71e..dd6df0c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hadoopbackport/ThrottledInputStream.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hadoopbackport/ThrottledInputStream.java
@@ -21,6 +21,8 @@ package org.apache.hadoop.hbase.io.hadoopbackport;
 import java.io.IOException;
 import java.io.InputStream;
 
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
 /**
  * The ThrottleInputStream provides bandwidth throttling on a specified
  * InputStream. It is implemented as a wrapper on top of another InputStream
@@ -31,6 +33,7 @@ import java.io.InputStream;
  * (Thus, while the read-rate might exceed the maximum for a given short interval,
  * the average tends towards the specified maximum, overall.)
  */
+@InterfaceAudience.Private
 public class ThrottledInputStream extends InputStream {
 
   private final InputStream rawStream;
@@ -47,7 +50,7 @@ public class ThrottledInputStream extends InputStream {
   }
 
   public ThrottledInputStream(InputStream rawStream, long maxBytesPerSec) {
-    assert maxBytesPerSec > 0 : "Bandwidth " + maxBytesPerSec + " is invalid"; 
+    assert maxBytesPerSec > 0 : "Bandwidth " + maxBytesPerSec + " is invalid";
     this.rawStream = rawStream;
     this.maxBytesPerSec = maxBytesPerSec;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
index 82f686f..66df645 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.util.ReflectionUtils;
 /**
  * Provide an instance of a user. Allows custom {@link User} creation.
  */
-
 @InterfaceAudience.Private
 public class UserProvider extends BaseConfigurable {
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/aa343ebc/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java
index b7fa574..b2a36d7 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java
@@ -19,8 +19,10 @@
 package org.apache.hadoop.hbase.trace;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.htrace.HTraceConfiguration;
 
+@InterfaceAudience.Private
 public class HBaseHTraceConfiguration extends HTraceConfiguration {
 
   public static final String KEY_PREFIX = "hbase.";


Mime
View raw message