hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From chia7...@apache.org
Subject [4/4] hbase git commit: HBASE-20212 Make all Public classes have InterfaceAudience category
Date Thu, 22 Mar 2018 10:22:00 GMT
HBASE-20212 Make all Public classes have InterfaceAudience category

Signed-off-by: tedyu <yuzhihong@gmail.com>
Signed-off-by: Michael Stack <stack@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/dd9e46bb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/dd9e46bb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/dd9e46bb

Branch: refs/heads/branch-2
Commit: dd9e46bbf5fa76b06720392d27d07855ce2d553e
Parents: 0a94c9c
Author: Chia-Ping Tsai <chia7712@gmail.com>
Authored: Thu Mar 22 09:24:14 2018 +0800
Committer: Chia-Ping Tsai <chia7712@gmail.com>
Committed: Thu Mar 22 18:09:54 2018 +0800

----------------------------------------------------------------------
 hbase-annotations/pom.xml                       |   4 +
 hbase-client/pom.xml                            |   4 +
 .../hbase/TestInterfaceAudienceAnnotations.java | 540 -------------------
 hbase-common/pom.xml                            |   4 +
 .../apache/hadoop/hbase/trace/TraceUtil.java    |   2 +
 .../org/apache/hadoop/hbase/util/JSONBean.java  |  11 +-
 .../hadoop/hbase/util/JSONMetricUtil.java       |  11 +-
 hbase-endpoint/pom.xml                          |   4 +
 hbase-examples/pom.xml                          |   4 +
 .../client/example/AsyncClientExample.java      |   3 +-
 .../client/example/BufferedMutatorExample.java  |   3 +-
 .../client/example/ExportEndpointExample.java   |  15 +-
 .../hbase/client/example/HttpProxyExample.java  |   3 +-
 .../example/MultiThreadedClientExample.java     |   3 +-
 .../client/example/RefreshHFilesClient.java     |   7 +-
 .../coprocessor/example/BulkDeleteEndpoint.java |  10 +-
 .../example/DelegatingInternalScanner.java      |   3 +-
 .../ExampleMasterObserverWithMetrics.java       |   3 +-
 .../ExampleRegionObserverWithMetrics.java       |   3 +-
 .../example/RefreshHFilesEndpoint.java          |   8 +-
 .../coprocessor/example/RowCountEndpoint.java   |  10 +-
 .../example/ScanModifyingObserver.java          |   3 +-
 .../example/ValueRewritingObserver.java         |   3 +-
 .../example/WriteHeavyIncrementObserver.java    |   3 +-
 .../example/ZooKeeperScanPolicyObserver.java    |   3 +-
 .../hadoop/hbase/mapreduce/IndexBuilder.java    |   3 +-
 .../hadoop/hbase/mapreduce/SampleUploader.java  |   3 +-
 .../apache/hadoop/hbase/thrift/DemoClient.java  |   4 +-
 .../hadoop/hbase/thrift/HttpDoAsClient.java     |   5 +-
 .../apache/hadoop/hbase/thrift2/DemoClient.java |   5 +-
 .../org/apache/hadoop/hbase/types/PBCell.java   |   5 +-
 hbase-external-blockcache/pom.xml               |   4 +
 hbase-hadoop-compat/pom.xml                     |   4 +
 .../hadoop/hbase/CompatibilityFactory.java      |   2 +
 .../hbase/CompatibilitySingletonFactory.java    |   2 +
 .../apache/hadoop/hbase/io/MetricsIOSource.java |   2 +
 .../hadoop/hbase/io/MetricsIOWrapper.java       |   3 +
 .../hbase/ipc/MetricsHBaseServerSource.java     |   2 +
 .../ipc/MetricsHBaseServerSourceFactory.java    |   3 +
 .../hbase/ipc/MetricsHBaseServerWrapper.java    |   3 +
 .../master/MetricsAssignmentManagerSource.java  |   2 +
 .../master/MetricsMasterFileSystemSource.java   |   2 +
 .../hbase/master/MetricsMasterProcSource.java   |   2 +
 .../master/MetricsMasterProcSourceFactory.java  |   3 +
 .../hbase/master/MetricsMasterQuotaSource.java  |   2 +
 .../master/MetricsMasterQuotaSourceFactory.java |   3 +
 .../hbase/master/MetricsMasterSource.java       |   2 +
 .../master/MetricsMasterSourceFactory.java      |   3 +
 .../hbase/master/MetricsMasterWrapper.java      |   2 +
 .../hbase/master/MetricsSnapshotSource.java     |   2 +
 .../master/balancer/MetricsBalancerSource.java  |   2 +
 .../MetricsStochasticBalancerSource.java        |   7 +-
 .../apache/hadoop/hbase/metrics/BaseSource.java |   3 +
 .../hbase/metrics/ExceptionTrackingSource.java  |   3 +
 .../hbase/metrics/JvmPauseMonitorSource.java    |   3 +
 .../hadoop/hbase/metrics/MBeanSource.java       |   2 +
 .../hadoop/hbase/metrics/OperationMetrics.java  |   3 +
 .../MetricsHeapMemoryManagerSource.java         |   2 +
 .../MetricsRegionAggregateSource.java           |   2 +
 .../MetricsRegionServerQuotaSource.java         |   2 +
 .../regionserver/MetricsRegionServerSource.java |   2 +
 .../MetricsRegionServerSourceFactory.java       |   2 +
 .../MetricsRegionServerWrapper.java             |   3 +
 .../hbase/regionserver/MetricsRegionSource.java |   2 +
 .../regionserver/MetricsRegionWrapper.java      |   3 +
 .../MetricsTableAggregateSource.java            |   2 +
 .../regionserver/MetricsTableLatencies.java     |   3 +
 .../hbase/regionserver/MetricsTableSource.java  |   3 +
 .../MetricsTableWrapperAggregate.java           |   2 +
 .../regionserver/wal/MetricsWALSource.java      |   2 +
 .../MetricsReplicationSinkSource.java           |   3 +
 .../regionserver/MetricsReplicationSource.java  |   2 +
 .../MetricsReplicationSourceFactory.java        |   3 +
 .../MetricsReplicationSourceSource.java         |   2 +
 .../hadoop/hbase/rest/MetricsRESTSource.java    |  10 +-
 .../hbase/thrift/MetricsThriftServerSource.java |   2 +
 .../MetricsThriftServerSourceFactory.java       |   3 +
 .../hbase/zookeeper/MetricsZooKeeperSource.java |   2 +
 .../apache/hadoop/metrics2/MetricHistogram.java |   3 +
 .../apache/hadoop/metrics2/MetricsExecutor.java |   2 +
 hbase-hadoop2-compat/pom.xml                    |   4 +
 .../hadoop/hbase/io/MetricsIOSourceImpl.java    |   2 +
 .../metrics/ExceptionTrackingSourceImpl.java    |   2 +
 .../impl/GlobalMetricRegistriesAdapter.java     |   9 +-
 .../impl/HBaseMetrics2HadoopMetricsAdapter.java |   3 +-
 .../MetricsReplicationGlobalSourceSource.java   |   2 +
 .../MetricsReplicationSinkSourceImpl.java       |   3 +-
 .../MetricsReplicationSourceFactoryImpl.java    |   3 +
 .../MetricsReplicationSourceSourceImpl.java     |   2 +
 .../lib/DefaultMetricsSystemHelper.java         |   2 +
 .../hadoop/metrics2/lib/MutableFastCounter.java |   2 +
 hbase-http/pom.xml                              |   4 +
 .../hadoop/hbase/http/FilterContainer.java      |   2 +
 .../hadoop/hbase/http/FilterInitializer.java    |   2 +
 .../apache/hadoop/hbase/http/HtmlQuoting.java   |   5 +-
 .../hadoop/hbase/http/HttpRequestLog.java       |   7 +-
 .../hbase/http/HttpRequestLogAppender.java      |   2 +
 .../hadoop/hbase/http/HttpServerUtil.java       |  10 +-
 .../hadoop/hbase/http/jmx/JMXJsonServlet.java   |   4 +-
 .../apache/hadoop/hbase/http/log/LogLevel.java  |  14 +-
 hbase-it/pom.xml                                |   4 +
 hbase-mapreduce/pom.xml                         |   4 +
 .../hbase/mapreduce/HFileInputFormat.java       |   3 +-
 .../hadoop/hbase/mapreduce/HashTable.java       |   4 +-
 .../hadoop/hbase/mapreduce/JarFinder.java       |  10 +-
 .../mapreduce/MultithreadedTableMapper.java     |   3 +-
 .../hadoop/hbase/mapreduce/SyncTable.java       |   6 +-
 .../replication/VerifyReplication.java          |   2 +
 hbase-metrics-api/pom.xml                       |   4 +
 hbase-metrics/pom.xml                           |   4 +
 hbase-procedure/pom.xml                         |   4 +
 hbase-protocol-shaded/pom.xml                   |   4 +
 hbase-protocol/pom.xml                          |   4 +
 .../protobuf/HBaseZeroCopyByteString.java       |   3 +
 hbase-replication/pom.xml                       |   4 +
 hbase-rest/pom.xml                              |   4 +
 .../hbase/rest/ProtobufStreamingOutput.java     |   4 +-
 .../hadoop/hbase/rest/filter/AuthFilter.java    |   4 +-
 hbase-rsgroup/pom.xml                           |   4 +
 hbase-server/pom.xml                            |   4 +
 .../hadoop/hbase/DaemonThreadFactory.java       |   2 +
 .../apache/hadoop/hbase/HealthCheckChore.java   |   2 +
 .../org/apache/hadoop/hbase/JMXListener.java    |  20 +-
 .../hbase/SslRMIClientSocketFactorySecure.java  |   3 +-
 .../hbase/SslRMIServerSocketFactorySecure.java  |   3 +-
 .../org/apache/hadoop/hbase/ZNodeClearer.java   |   5 +-
 .../example/HFileArchiveTableMonitor.java       |   3 +-
 .../org/apache/hadoop/hbase/fs/HFileSystem.java |  12 +-
 .../org/apache/hadoop/hbase/io/MetricsIO.java   |   3 +
 .../hadoop/hbase/io/MetricsIOWrapperImpl.java   |   2 +
 .../hadoop/hbase/io/hfile/AgeSnapshot.java      |   2 +
 .../hadoop/hbase/io/hfile/BlockPriority.java    |   3 +
 .../hbase/io/hfile/BlockWithScanInfo.java       |   2 +
 .../hadoop/hbase/io/hfile/PrefetchExecutor.java |   8 +-
 .../hbase/ipc/EmptyServiceNameException.java    |   3 +
 .../hadoop/hbase/ipc/FifoRpcScheduler.java      |   3 +-
 .../ipc/MetricsHBaseServerWrapperImpl.java      |   3 +
 .../apache/hadoop/hbase/ipc/QosPriority.java    |   3 +-
 .../hbase/ipc/UnknownServiceException.java      |   3 +
 ...MasterAnnotationReadingPriorityFunction.java |  12 +-
 .../hbase/master/MetricsAssignmentManager.java  |   6 +-
 .../hbase/master/MetricsMasterFileSystem.java   |   2 +
 .../hadoop/hbase/master/MetricsSnapshot.java    |   2 +
 .../hbase/master/balancer/BaseLoadBalancer.java |   2 +
 .../hbase/master/balancer/ClusterLoadState.java |   3 +-
 .../balancer/FavoredStochasticBalancer.java     |   2 +
 .../hbase/master/balancer/MetricsBalancer.java  |   6 +-
 .../balancer/MetricsStochasticBalancer.java     |   2 +
 .../master/cleaner/BaseFileCleanerDelegate.java |   5 +-
 .../hbase/master/cleaner/CleanerChore.java      |  23 +-
 .../master/procedure/RSProcedureDispatcher.java |   2 +
 .../master/procedure/RecoverMetaProcedure.java  |   4 +-
 .../master/procedure/ServerCrashProcedure.java  |   3 +-
 .../procedure/MasterProcedureManagerHost.java   |   5 +-
 .../RegionServerProcedureManagerHost.java       |   5 +-
 .../hadoop/hbase/procedure/Subprocedure.java    |   3 +-
 .../quotas/TableSpaceQuotaSnapshotNotifier.java |   3 +-
 ...ssingSnapshotViolationPolicyEnforcement.java |   6 +-
 .../regionserver/DumpRegionServerMetrics.java   |   9 +-
 .../hbase/regionserver/SteppingSplitPolicy.java |   3 +
 .../DateTieredCompactionRequest.java            |   3 +-
 .../hbase/replication/BulkLoadCellFilter.java   |   5 +-
 .../regionserver/DumpReplicationQueues.java     |   3 +-
 .../regionserver/ReplicationSinkManager.java    |   4 +-
 .../regionserver/ReplicationSyncUp.java         |   2 +
 .../VisibilityNewVersionBehaivorTracker.java    |   3 +-
 .../visibility/VisibilityReplication.java       |   7 +-
 .../hadoop/hbase/snapshot/CreateSnapshot.java   |   5 +-
 .../hadoop/hbase/tool/WriteSinkCoprocessor.java |  10 +-
 .../hadoop/hbase/util/GetJavaProperty.java      |   7 +-
 .../hadoop/hbase/util/HFileArchiveUtil.java     |   4 +-
 .../hbase/util/RollingStatCalculator.java       |   3 +
 .../hbase/util/ServerRegionReplicaUtil.java     |   3 +-
 .../hadoop/hbase/util/ShutdownHookManager.java  |  20 +-
 .../apache/hadoop/hbase/util/SortedList.java    |   4 +-
 .../hadoop/hbase/util/YammerHistogramUtils.java |   3 +-
 .../util/hbck/TableIntegrityErrorHandler.java   |   9 +-
 .../hbck/TableIntegrityErrorHandlerImpl.java    |   5 +-
 .../hbase/wal/NettyAsyncFSWALConfigHelper.java  |   6 +-
 hbase-shell/pom.xml                             |   4 +
 hbase-thrift/pom.xml                            |   4 +
 .../thrift/HttpAuthenticationException.java     |   3 +
 .../hadoop/hbase/thrift/IncrementCoalescer.java |   3 +-
 .../hbase/thrift/IncrementCoalescerMBean.java   |   3 +
 .../hbase/thrift/THBaseThreadPoolExecutor.java  |   2 +
 hbase-zookeeper/pom.xml                         |   4 +
 .../zookeeper/RegionNormalizerTracker.java      |   3 +-
 pom.xml                                         |  29 +
 188 files changed, 610 insertions(+), 736 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-annotations/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-annotations/pom.xml b/hbase-annotations/pom.xml
index 88084fe..63a2e55 100644
--- a/hbase-annotations/pom.xml
+++ b/hbase-annotations/pom.xml
@@ -47,6 +47,10 @@
           <failOnViolation>true</failOnViolation>
         </configuration>
       </plugin>
+      <plugin>
+        <groupId>net.revelc.code</groupId>
+        <artifactId>warbucks-maven-plugin</artifactId>
+      </plugin>
     </plugins>
   </build>
 </project>

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-client/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml
index e09fec2..6798f53 100644
--- a/hbase-client/pom.xml
+++ b/hbase-client/pom.xml
@@ -47,6 +47,10 @@
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-source-plugin</artifactId>
       </plugin>
+      <plugin>
+        <groupId>net.revelc.code</groupId>
+        <artifactId>warbucks-maven-plugin</artifactId>
+      </plugin>
     </plugins>
     <pluginManagement>
       <plugins>

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
deleted file mode 100644
index 493e23a..0000000
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
+++ /dev/null
@@ -1,540 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase;
-
-import java.io.IOException;
-import java.lang.annotation.Annotation;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.Method;
-import java.lang.reflect.Modifier;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Objects;
-import java.util.Set;
-import org.apache.hadoop.hbase.ClassFinder.And;
-import org.apache.hadoop.hbase.ClassFinder.FileNameFilter;
-import org.apache.hadoop.hbase.ClassFinder.Not;
-import org.apache.hadoop.hbase.ClassTestFinder.TestClassFilter;
-import org.apache.hadoop.hbase.ClassTestFinder.TestFileNameFilter;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.util.Pair;
-import org.apache.hadoop.hbase.util.Triple;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
-import org.junit.Assert;
-import org.junit.ClassRule;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Test cases for ensuring our client visible classes have annotations for
- * {@link InterfaceAudience}.
- * <p>
- * All classes in hbase-client and hbase-common module MUST have InterfaceAudience annotations.
- * Think twice about marking an interface InterfaceAudience.Public. Make sure that it is an
- * interface, not a class (for most cases), and clients will actually depend on it. Once something
- * is marked with Public, we cannot change the signatures within the major release. NOT everything
- * in the hbase-client module or every java public class has to be marked with
- * InterfaceAudience.Public. ONLY the ones that an hbase application will directly use (Table, Get,
- * etc, versus ProtobufUtil). And also, InterfaceAudience.Public annotated classes MUST NOT have
- * InterfaceStability annotations. The stability of these classes only depends on versioning.
- * <p>
- * All classes which are marked as InterfaceAudience.LimitedPrivate MUST also have
- * InterfaceStability annotations. The only exception is HBaseInterfaceAudience.CONFIG. It is used
- * to indicate that the class name will be exposed in user facing configuration files.
- * <p>
- * Also note that HBase has it's own annotations in hbase-annotations module with the same names as
- * in Hadoop. You should use the HBase's classes.
- * <p>
- * See
- * https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/InterfaceClassification.html
- * and https://issues.apache.org/jira/browse/HBASE-10462.
- */
-@Category(SmallTests.class)
-public class TestInterfaceAudienceAnnotations {
-
-  @ClassRule
-  public static final HBaseClassTestRule CLASS_RULE =
-      HBaseClassTestRule.forClass(TestInterfaceAudienceAnnotations.class);
-
-  private static final String HBASE_PROTOBUF = "org.apache.hadoop.hbase.protobuf.generated";
-  private static final Logger LOG = LoggerFactory.getLogger(TestInterfaceAudienceAnnotations.class);
-
-  /** Selects classes with generated in their package name */
-  static class GeneratedClassFilter implements ClassFinder.ClassFilter {
-    @Override
-    public boolean isCandidateClass(Class<?> c) {
-      return c.getPackage().getName().contains("generated");
-    }
-  }
-
-  static class ShadedProtobufClassFilter implements ClassFinder.ClassFilter {
-    @Override
-    public boolean isCandidateClass(Class<?> c) {
-      return c.getPackage().getName().
-          contains("org.apache.hbase.thirdparty.com.google.protobuf");
-    }
-  }
-
-  /** Selects classes with one of the {@link InterfaceAudience} annotation in their class
-   * declaration.
-   */
-  class InterfaceAudienceAnnotatedClassFilter implements ClassFinder.ClassFilter {
-    @Override
-    public boolean isCandidateClass(Class<?> c) {
-      if (getAnnotation(c) != null) {
-        // class itself has a declared annotation.
-        return true;
-      }
-
-      // If this is an internal class, look for the encapsulating class to see whether it has
-      // annotation. All inner classes of private classes are considered annotated.
-      return isAnnotatedPrivate(c.getEnclosingClass());
-    }
-
-    private boolean isAnnotatedPrivate(Class<?> c) {
-      if (c == null) {
-        return false;
-      }
-
-      Annotation ann = getAnnotation(c);
-      if (ann != null && !InterfaceAudience.Public.class.equals(ann.annotationType())) {
-        return true;
-      }
-
-      return isAnnotatedPrivate(c.getEnclosingClass());
-    }
-
-    protected Annotation getAnnotation(Class<?> c) {
-      // we should get only declared annotations, not inherited ones
-      Annotation[] anns = c.getDeclaredAnnotations();
-
-      for (Annotation ann : anns) {
-        // Hadoop clearly got it wrong for not making the annotation values (private, public, ..)
-        // an enum instead we have three independent annotations!
-        Class<?> type = ann.annotationType();
-        if (isInterfaceAudienceClass(type)) {
-          return ann;
-        }
-      }
-      return null;
-    }
-  }
-
-  /** Selects classes with one of the {@link InterfaceStability} annotation in their class
-   * declaration.
-   */
-  class InterfaceStabilityAnnotatedClassFilter implements ClassFinder.ClassFilter {
-    @Override
-    public boolean isCandidateClass(Class<?> c) {
-      if (getAnnotation(c) != null) {
-        // class itself has a declared annotation.
-        return true;
-      }
-      return false;
-    }
-
-    protected Class<?> getAnnotation(Class<?> c) {
-      // we should get only declared annotations, not inherited ones
-      Annotation[] anns = c.getDeclaredAnnotations();
-
-      for (Annotation ann : anns) {
-        // Hadoop clearly got it wrong for not making the annotation values (private, public, ..)
-        // an enum instead we have three independent annotations!
-        Class<?> type = ann.annotationType();
-        if (isInterfaceStabilityClass(type)) {
-          return type;
-        }
-      }
-      return null;
-    }
-  }
-
-  /**
-   * Selects classes with one of the {@link InterfaceAudience.Public} annotation in their class
-   * declaration.
-   */
-  class InterfaceAudiencePublicAnnotatedClassFilter extends InterfaceAudienceAnnotatedClassFilter {
-    @Override
-    public boolean isCandidateClass(Class<?> c) {
-      Annotation ann = getAnnotation(c);
-      return ann != null && InterfaceAudience.Public.class.equals(ann.annotationType());
-    }
-  }
-
-  /**
-   * Selects classes with one of the {@link InterfaceAudience.LimitedPrivate} annotation in their
-   * class declaration.
-   */
-  class InterfaceAudienceLimitedPrivateAnnotatedNotConfigClassFilter
-      extends InterfaceAudienceAnnotatedClassFilter {
-    @Override
-    public boolean isCandidateClass(Class<?> c) {
-      Annotation ann = getAnnotation(c);
-      if (ann == null || !InterfaceAudience.LimitedPrivate.class.equals(ann.annotationType())) {
-        return false;
-      }
-      InterfaceAudience.LimitedPrivate iaAnn = (InterfaceAudience.LimitedPrivate) ann;
-      return iaAnn.value().length == 0 || !HBaseInterfaceAudience.CONFIG.equals(iaAnn.value()[0]);
-    }
-  }
-
-  /**
-   * Selects InterfaceAudience or InterfaceStability classes. Don't go meta!!!
-   */
-  class IsInterfaceStabilityClassFilter implements ClassFinder.ClassFilter {
-    @Override
-    public boolean isCandidateClass(Class<?> c) {
-      return
-          isInterfaceAudienceClass(c) ||
-          isInterfaceStabilityClass(c);
-    }
-  }
-
-  private boolean isInterfaceAudienceClass(Class<?> c) {
-    return
-        c.equals(InterfaceAudience.Public.class) ||
-        c.equals(InterfaceAudience.Private.class) ||
-        c.equals(InterfaceAudience.LimitedPrivate.class);
-  }
-
-  private boolean isInterfaceStabilityClass(Class<?> c) {
-    return
-        c.equals(InterfaceStability.Stable.class) ||
-        c.equals(InterfaceStability.Unstable.class) ||
-        c.equals(InterfaceStability.Evolving.class);
-  }
-
-  private boolean isInterfacePrivateMethod(Method m) {
-    if(m.getDeclaredAnnotations().length > 0) {
-      for(Annotation ann : m.getDeclaredAnnotations()) {
-        if(ann.annotationType().equals(InterfaceAudience.Private.class)) {
-          return true;
-        }
-      }
-    }
-    return false;
-  }
-
-  private boolean isInterfacePrivateContructor(Constructor<?> c) {
-    if(c.getDeclaredAnnotations().length > 0) {
-      for(Annotation ann : c.getDeclaredAnnotations()) {
-        if(ann.annotationType().equals(InterfaceAudience.Private.class)) {
-          return true;
-        }
-      }
-    }
-    return false;
-  }
-
-  /** Selects classes that are declared public */
-  static class PublicClassFilter implements ClassFinder.ClassFilter {
-    @Override
-    public boolean isCandidateClass(Class<?> c) {
-      int mod = c.getModifiers();
-      return Modifier.isPublic(mod);
-    }
-  }
-
-  /** Selects paths (jars and class dirs) only from the main code, not test classes */
-  static class MainCodeResourcePathFilter implements ClassFinder.ResourcePathFilter {
-    @Override
-    public boolean isCandidatePath(String resourcePath, boolean isJar) {
-      return !resourcePath.contains("test-classes") &&
-          !resourcePath.contains("tests.jar");
-    }
-  }
-
-  /**
-   * Selects classes that appear to be source instrumentation from Clover.
-   * Clover generates instrumented code in order to calculate coverage. Part of the
-   * generated source is a static inner class on each source class.
-   *
-   * - has an enclosing class
-   * - enclosing class is not an interface
-   * - name starts with "__CLR"
-   */
-  static class CloverInstrumentationFilter implements ClassFinder.ClassFilter {
-    @Override
-    public boolean isCandidateClass(Class<?> clazz) {
-      boolean clover = false;
-      final Class<?> enclosing = clazz.getEnclosingClass();
-      if (enclosing != null) {
-        if (!(enclosing.isInterface())) {
-          clover = clazz.getSimpleName().startsWith("__CLR");
-        }
-      }
-      return clover;
-    }
-  }
-
-  /**
-   * Checks whether all the classes in client and common modules contain
-   * {@link InterfaceAudience} annotations.
-   */
-  @Ignore @Test
-  public void testInterfaceAudienceAnnotation()
-      throws ClassNotFoundException, IOException, LinkageError {
-
-    // find classes that are:
-    // In the main jar
-    // AND are not in a hadoop-compat module
-    // AND are public
-    // NOT test classes
-    // AND NOT generated classes
-    // AND are NOT annotated with InterfaceAudience
-    // AND are NOT from Clover rewriting sources
-    ClassFinder classFinder = new ClassFinder(
-      new And(new MainCodeResourcePathFilter(),
-              new TestFileNameFilter()),
-      new Not((FileNameFilter)new TestFileNameFilter()),
-      new And(new PublicClassFilter(),
-              new Not(new TestClassFilter()),
-              new Not(new GeneratedClassFilter()),
-              new Not(new ShadedProtobufClassFilter()),
-              new Not(new IsInterfaceStabilityClassFilter()),
-              new Not(new InterfaceAudienceAnnotatedClassFilter()),
-              new Not(new CloverInstrumentationFilter()))
-    );
-
-    Set<Class<?>> classes = classFinder.findClasses(false);
-    if (!classes.isEmpty()) {
-      LOG.info("These are the classes that DO NOT have @InterfaceAudience annotation:");
-      for (Class<?> clazz : classes) {
-        LOG.info(Objects.toString(clazz));
-      }
-    }
-
-    Assert.assertEquals("All classes should have @InterfaceAudience annotation",
-      0, classes.size());
-  }
-
-  /**
-   * Checks whether all the classes in client and common modules that are marked
-   * InterfaceAudience.Public do not have {@link InterfaceStability} annotations.
-   */
-  @Ignore @Test
-  public void testNoInterfaceStabilityAnnotationForPublicAPI()
-      throws ClassNotFoundException, IOException, LinkageError {
-
-    // find classes that are:
-    // In the main jar
-    // AND are not in a hadoop-compat module
-    // AND are public
-    // NOT test classes
-    // AND NOT generated classes
-    // AND are annotated with InterfaceAudience.Public
-    // AND annotated with InterfaceStability
-    ClassFinder classFinder = new ClassFinder(
-      new And(new MainCodeResourcePathFilter(),
-              new TestFileNameFilter()),
-      new Not((FileNameFilter)new TestFileNameFilter()),
-      new And(new PublicClassFilter(),
-              new Not(new TestClassFilter()),
-              new Not(new GeneratedClassFilter()),
-              new Not(new ShadedProtobufClassFilter()),
-              new InterfaceAudiencePublicAnnotatedClassFilter(),
-              new Not(new IsInterfaceStabilityClassFilter()),
-              new InterfaceStabilityAnnotatedClassFilter())
-    );
-
-    Set<Class<?>> classes = classFinder.findClasses(false);
-
-    if (!classes.isEmpty()) {
-      LOG.info("These are the @InterfaceAudience.Public classes that have @InterfaceStability " +
-          "annotation:");
-      for (Class<?> clazz : classes) {
-        LOG.info(Objects.toString(clazz));
-      }
-    }
-
-    Assert.assertEquals("All classes that are marked with @InterfaceAudience.Public should not "
-        + "have @InterfaceStability annotation",
-      0, classes.size());
-  }
-
-  /**
-   * Checks whether all the classes in client and common modules that are marked
-   * InterfaceAudience.Public do not have {@link InterfaceStability} annotations.
-   */
-  @Ignore
-  @Test
-  public void testInterfaceStabilityAnnotationForLimitedAPI()
-      throws ClassNotFoundException, IOException, LinkageError {
-
-    // find classes that are:
-    // In the main jar
-    // AND are not in a hadoop-compat module
-    // AND are public
-    // NOT test classes
-    // AND NOT generated classes
-    // AND are annotated with InterfaceAudience.LimitedPrivate
-    // AND NOT annotated with InterfaceStability
-    ClassFinder classFinder = new ClassFinder(
-      new And(new MainCodeResourcePathFilter(),
-              new TestFileNameFilter()),
-      new Not((FileNameFilter)new TestFileNameFilter()),
-      new And(new PublicClassFilter(),
-              new Not(new TestClassFilter()),
-              new Not(new GeneratedClassFilter()),
-              new Not(new ShadedProtobufClassFilter()),
-              new InterfaceAudienceLimitedPrivateAnnotatedNotConfigClassFilter(),
-              new Not(new IsInterfaceStabilityClassFilter()),
-              new Not(new InterfaceStabilityAnnotatedClassFilter()))
-    );
-
-    Set<Class<?>> classes = classFinder.findClasses(false);
-
-    if (!classes.isEmpty()) {
-      LOG.info("These are the @InterfaceAudience.LimitedPrivate classes that DO NOT " +
-          "have @InterfaceStability annotation:");
-      for (Class<?> clazz : classes) {
-        LOG.info(Objects.toString(clazz));
-      }
-    }
-    Assert.assertEquals("All classes that are marked with @InterfaceAudience.LimitedPrivate " +
-        "should have @InterfaceStability annotation",
-      0, classes.size());
-  }
-
-  @Ignore @Test
-  public void testProtosInReturnTypes() throws ClassNotFoundException, IOException, LinkageError {
-    Set<Class<?>> classes = findPublicClasses();
-    List<Pair<Class<?>, Method>> protosReturnType = new ArrayList<>();
-    for (Class<?> clazz : classes) {
-      findProtoInReturnType(clazz, protosReturnType);
-    }
-    if (protosReturnType.size() != 0) {
-      LOG.info("These are the methods that have Protos as the return type");
-      for (Pair<Class<?>, Method> pair : protosReturnType) {
-        LOG.info(pair.getFirst().getName() + " " + pair.getSecond().getName() + " "
-            + pair.getSecond().getReturnType().getName());
-      }
-    }
-
-    Assert.assertEquals("Public exposed methods should not have protos in return type", 0,
-      protosReturnType.size());
-  }
-
-  private Set<Class<?>> findPublicClasses()
-      throws ClassNotFoundException, IOException, LinkageError {
-    ClassFinder classFinder =
-        new ClassFinder(new And(new MainCodeResourcePathFilter(), new TestFileNameFilter()),
-            new Not((FileNameFilter) new TestFileNameFilter()),
-            new And(new PublicClassFilter(), new Not(new TestClassFilter()),
-                new Not(new GeneratedClassFilter()),
-                new Not(new ShadedProtobufClassFilter()),
-                new InterfaceAudiencePublicAnnotatedClassFilter()));
-    Set<Class<?>> classes = classFinder.findClasses(false);
-    return classes;
-  }
-
-  @Ignore @Test
-  public void testProtosInParamTypes() throws ClassNotFoundException, IOException, LinkageError {
-    Set<Class<?>> classes = findPublicClasses();
-    List<Triple<Class<?>, Method, Class<?>>> protosParamType = new ArrayList<>();
-    for (Class<?> clazz : classes) {
-      findProtoInParamType(clazz, protosParamType);
-    }
-
-    if (protosParamType.size() != 0) {
-      LOG.info("These are the methods that have Protos as the param type");
-      for (Triple<Class<?>, Method, Class<?>> pair : protosParamType) {
-        LOG.info(pair.getFirst().getName() + " " + pair.getSecond().getName() + " "
-            + pair.getThird().getName());
-      }
-    }
-
-    Assert.assertEquals("Public exposed methods should not have protos in param type", 0,
-      protosParamType.size());
-  }
-
-  @Ignore @Test
-  public void testProtosInConstructors() throws ClassNotFoundException, IOException, LinkageError {
-    Set<Class<?>> classes = findPublicClasses();
-    List<Class<?>> classList = new ArrayList<>();
-    for (Class<?> clazz : classes) {
-      Constructor<?>[] constructors = clazz.getConstructors();
-      for (Constructor<?> cons : constructors) {
-        if (!isInterfacePrivateContructor(cons)) {
-          Class<?>[] parameterTypes = cons.getParameterTypes();
-          for (Class<?> param : parameterTypes) {
-            if (param.getName().contains(HBASE_PROTOBUF)) {
-              classList.add(clazz);
-              break;
-            }
-          }
-        }
-      }
-    }
-
-    if (classList.size() != 0) {
-      LOG.info("These are the classes that have Protos in the constructor");
-      for (Class<?> clazz : classList) {
-        LOG.info(clazz.getName());
-      }
-    }
-
-    Assert.assertEquals("Public exposed classes should not have protos in constructors", 0,
-      classList.size());
-  }
-
-  private void findProtoInReturnType(Class<?> clazz,
-      List<Pair<Class<?>, Method>> protosReturnType) {
-    Pair<Class<?>, Method> returnTypePair = new Pair<>();
-    Method[] methods = clazz.getMethods();
-    returnTypePair.setFirst(clazz);
-    for (Method method : methods) {
-      if (clazz.isInterface() || method.getModifiers() == Modifier.PUBLIC) {
-        if (!isInterfacePrivateMethod(method)) {
-          Class<?> returnType = method.getReturnType();
-          if (returnType.getName().contains(HBASE_PROTOBUF)) {
-            returnTypePair.setSecond(method);
-            protosReturnType.add(returnTypePair);
-            continue;
-          }
-        }
-      }
-    }
-  }
-
-  private void findProtoInParamType(Class<?> clazz,
-      List<Triple<Class<?>, Method, Class<?>>> protosParamType) {
-    Triple<Class<?>, Method, Class<?>> paramType = new Triple<>();
-    Method[] methods = clazz.getMethods();
-    paramType.setFirst(clazz);
-    for (Method method : methods) {
-      if (clazz.isInterface() || method.getModifiers() == Modifier.PUBLIC) {
-        if (!isInterfacePrivateMethod(method)) {
-          Class<?>[] parameters = method.getParameterTypes();
-          for (Class<?> param : parameters) {
-            if (param.getName().contains(HBASE_PROTOBUF)) {
-              paramType.setSecond(method);
-              paramType.setThird(param);
-              protosParamType.add(paramType);
-              break;
-            }
-          }
-        }
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-common/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml
index 9d2e2cb..1e2971a 100644
--- a/hbase-common/pom.xml
+++ b/hbase-common/pom.xml
@@ -132,6 +132,10 @@
             </excludes>
           </configuration>
         </plugin>
+      <plugin>
+        <groupId>net.revelc.code</groupId>
+        <artifactId>warbucks-maven-plugin</artifactId>
+      </plugin>
     </plugins>
     <pluginManagement>
       <plugins>

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/TraceUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/TraceUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/TraceUtil.java
index d52c67d..89386f4 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/TraceUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/TraceUtil.java
@@ -24,10 +24,12 @@ import org.apache.htrace.core.Span;
 import org.apache.htrace.core.SpanReceiver;
 import org.apache.htrace.core.TraceScope;
 import org.apache.htrace.core.Tracer;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * This wrapper class provides functions for accessing htrace 4+ functionality in a simplified way.
  */
+@InterfaceAudience.Private
 public final class TraceUtil {
   private static HTraceConfiguration conf;
   private static Tracer tracer;

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
index ab95b31..80ffa27 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
@@ -16,6 +16,9 @@
  */
 package org.apache.hadoop.hbase.util;
 
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonGenerationException;
+import com.fasterxml.jackson.core.JsonGenerator;
 import java.io.Closeable;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
@@ -25,7 +28,6 @@ import java.lang.reflect.Array;
 import java.nio.charset.StandardCharsets;
 import java.util.Iterator;
 import java.util.Set;
-
 import javax.management.AttributeNotFoundException;
 import javax.management.InstanceNotFoundException;
 import javax.management.IntrospectionException;
@@ -41,17 +43,14 @@ import javax.management.RuntimeMBeanException;
 import javax.management.openmbean.CompositeData;
 import javax.management.openmbean.CompositeType;
 import javax.management.openmbean.TabularData;
-
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.fasterxml.jackson.core.JsonFactory;
-import com.fasterxml.jackson.core.JsonGenerationException;
-import com.fasterxml.jackson.core.JsonGenerator;
-
 /**
  * Utility for doing JSON and MBeans.
  */
+@InterfaceAudience.Private
 public class JSONBean {
   private static final Logger LOG = LoggerFactory.getLogger(JSONBean.class);
   private final JsonFactory jsonFactory;

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
index b6c05b6..7bc2257e 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONMetricUtil.java
@@ -17,6 +17,9 @@
  * */
 package org.apache.hadoop.hbase.util;
 
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import java.beans.IntrospectionException;
 import java.io.IOException;
 import java.io.PrintWriter;
@@ -28,7 +31,6 @@ import java.lang.management.RuntimeMXBean;
 import java.util.Hashtable;
 import java.util.List;
 import java.util.Set;
-
 import javax.management.InstanceNotFoundException;
 import javax.management.MBeanAttributeInfo;
 import javax.management.MBeanInfo;
@@ -37,14 +39,11 @@ import javax.management.MalformedObjectNameException;
 import javax.management.ObjectName;
 import javax.management.ReflectionException;
 import javax.management.openmbean.CompositeData;
-
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-
+@InterfaceAudience.Private
 public final class JSONMetricUtil {
 
   private static final Logger LOG = LoggerFactory.getLogger(JSONMetricUtil.class);

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-endpoint/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml
index 1f4bad4..cd38f5c 100644
--- a/hbase-endpoint/pom.xml
+++ b/hbase-endpoint/pom.xml
@@ -65,6 +65,10 @@
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>net.revelc.code</groupId>
+        <artifactId>warbucks-maven-plugin</artifactId>
+      </plugin>
     </plugins>
     <pluginManagement>
       <plugins>

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml
index c5ab9ea..d1881376 100644
--- a/hbase-examples/pom.xml
+++ b/hbase-examples/pom.xml
@@ -65,6 +65,10 @@
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>net.revelc.code</groupId>
+        <artifactId>warbucks-maven-plugin</artifactId>
+      </plugin>
     </plugins>
     <pluginManagement>
       <plugins>

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/AsyncClientExample.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/AsyncClientExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/AsyncClientExample.java
index 63d00fb..bcc9c0a 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/AsyncClientExample.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/AsyncClientExample.java
@@ -23,7 +23,6 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.stream.IntStream;
-
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.hbase.TableName;
@@ -36,12 +35,14 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
  * A simple example shows how to use asynchronous client.
  */
+@InterfaceAudience.Private
 public class AsyncClientExample extends Configured implements Tool {
 
   private static final Logger LOG = LoggerFactory.getLogger(AsyncClientExample.class);

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java
index 8e8a828..9c900d7 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java
@@ -28,7 +28,6 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
-
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.BufferedMutator;
@@ -40,12 +39,14 @@ import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
  * An example of using the {@link BufferedMutator} interface.
  */
+@InterfaceAudience.Private
 public class BufferedMutatorExample extends Configured implements Tool {
 
   private static final Logger LOG = LoggerFactory.getLogger(BufferedMutatorExample.class);

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/ExportEndpointExample.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/ExportEndpointExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/ExportEndpointExample.java
index e15c993..d3ee2cc 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/ExportEndpointExample.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/ExportEndpointExample.java
@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.hbase.client.example;
 
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -28,14 +31,11 @@ import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.coprocessor.Export;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.coprocessor.Export;
 import org.apache.hadoop.hbase.util.Bytes;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * A simple example on how to use {@link org.apache.hadoop.hbase.coprocessor.Export}.
@@ -45,7 +45,8 @@ import java.util.Map;
  * hbase-endpoint/src/main/protobuf/Export.proto.
  * </p>
  */
-public class ExportEndpointExample {
+@InterfaceAudience.Private
+public final class ExportEndpointExample {
 
   public static void main(String[] args) throws Throwable {
     int rowCount = 100;
@@ -83,4 +84,6 @@ public class ExportEndpointExample {
       System.out.println("total cells:" + totalOutputCells);
     }
   }
+
+  private ExportEndpointExample(){}
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java
index 37bda0f..f9caf2b 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java
@@ -21,7 +21,6 @@ import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.util.Optional;
 import java.util.concurrent.ExecutionException;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.TableName;
@@ -31,6 +30,7 @@ import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.ipc.NettyRpcClientConfigHelper;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.yetus.audience.InterfaceAudience;
 
 import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
 import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
@@ -71,6 +71,7 @@ import org.apache.hbase.thirdparty.io.netty.util.concurrent.GlobalEventExecutor;
  * Use HTTP GET to fetch data, and use HTTP PUT to put data. Encode the value as the request content
  * when doing PUT.
  */
+@InterfaceAudience.Private
 public class HttpProxyExample {
 
   private final EventLoopGroup bossGroup = new NioEventLoopGroup(1);

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java
index ae89e64..2c98c18 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java
@@ -29,7 +29,6 @@ import java.util.concurrent.Future;
 import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.ThreadLocalRandom;
 import java.util.concurrent.TimeUnit;
-
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.Cell.Type;
@@ -48,6 +47,7 @@ import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -90,6 +90,7 @@ import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFacto
  * </ul>
  *
  */
+@InterfaceAudience.Private
 public class MultiThreadedClientExample extends Configured implements Tool {
   private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedClientExample.class);
   private static final int DEFAULT_NUM_OPERATIONS = 500000;

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java
index a829b2a..ead0af0 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/RefreshHFilesClient.java
@@ -19,6 +19,8 @@
 
 package org.apache.hadoop.hbase.client.example;
 
+import java.io.Closeable;
+import java.io.IOException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
@@ -29,16 +31,15 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils.BlockingRpcCallback;
 import org.apache.hadoop.hbase.ipc.ServerRpcController;
 import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.Closeable;
-import java.io.IOException;
-
 /**
  * This client class is for invoking the refresh HFile function deployed on the
  * Region Server side via the RefreshHFilesService.
  */
+@InterfaceAudience.Private
 public class RefreshHFilesClient implements Closeable {
   private static final Logger LOG = LoggerFactory.getLogger(RefreshHFilesClient.class);
   private final Connection connection;

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
index 4735b3d..e2681ae 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.hbase.coprocessor.example;
 
+import com.google.protobuf.RpcCallback;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.Service;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -24,7 +27,6 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 import java.util.TreeSet;
-
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
@@ -48,13 +50,10 @@ import org.apache.hadoop.hbase.regionserver.OperationStatus;
 import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
-
 /**
  * Defines a protocol to delete data in bulk based on a scan. The scan can be range scan or with
  * conditions(filters) etc.This can be used to delete rows, column family(s), column qualifier(s)
@@ -95,6 +94,7 @@ import com.google.protobuf.Service;
  * }
  * </code></pre>
  */
+@InterfaceAudience.Private
 public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCoprocessor {
   private static final String NO_OF_VERSIONS_TO_DELETE = "noOfVersionsToDelete";
   private static final Logger LOG = LoggerFactory.getLogger(BulkDeleteEndpoint.class);

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/DelegatingInternalScanner.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/DelegatingInternalScanner.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/DelegatingInternalScanner.java
index f781a33..ada040f 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/DelegatingInternalScanner.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/DelegatingInternalScanner.java
@@ -19,14 +19,15 @@ package org.apache.hadoop.hbase.coprocessor.example;
 
 import java.io.IOException;
 import java.util.List;
-
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.ScannerContext;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * A simple delegation for doing filtering on {@link InternalScanner}.
  */
+@InterfaceAudience.Private
 public class DelegatingInternalScanner implements InternalScanner {
 
   protected final InternalScanner scanner;

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java
index 87387a0..5fe920e 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleMasterObserverWithMetrics.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.coprocessor.example;
 
 import java.io.IOException;
 import java.util.Optional;
-
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.RegionInfo;
@@ -33,6 +32,7 @@ import org.apache.hadoop.hbase.metrics.Counter;
 import org.apache.hadoop.hbase.metrics.Gauge;
 import org.apache.hadoop.hbase.metrics.MetricRegistry;
 import org.apache.hadoop.hbase.metrics.Timer;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -47,6 +47,7 @@ import org.slf4j.LoggerFactory;
  * </p>
  * @see ExampleRegionObserverWithMetrics
  */
+@InterfaceAudience.Private
 public class ExampleMasterObserverWithMetrics implements MasterCoprocessor, MasterObserver {
   @Override
   public Optional<MasterObserver> getMasterObserver() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.java
index 4e31d22..ec33e08 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.java
@@ -24,7 +24,6 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Optional;
 import java.util.concurrent.ThreadLocalRandom;
-
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.client.Get;
@@ -40,6 +39,7 @@ import org.apache.hadoop.hbase.regionserver.Store;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * An example coprocessor that collects some metrics to demonstrate the usage of exporting custom
@@ -52,6 +52,7 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
  *
  * @see ExampleMasterObserverWithMetrics
  */
+@InterfaceAudience.Private
 public class ExampleRegionObserverWithMetrics implements RegionCoprocessor {
 
   private Counter preGetCounter;

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java
index 60cb154..29fe90c 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RefreshHFilesEndpoint.java
@@ -21,7 +21,8 @@ package org.apache.hadoop.hbase.coprocessor.example;
 import com.google.protobuf.RpcCallback;
 import com.google.protobuf.RpcController;
 import com.google.protobuf.Service;
-
+import java.io.IOException;
+import java.util.Collections;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorException;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
@@ -29,12 +30,10 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
 import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos;
 import org.apache.hadoop.hbase.regionserver.Store;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.util.Collections;
-
 /**
  * Coprocessor endpoint to refresh HFiles on replica.
  * <p>
@@ -43,6 +42,7 @@ import java.util.Collections;
  * hbase-protocol/src/main/protobuf/RefreshHFiles.proto.
  * </p>
  */
+@InterfaceAudience.Private
 public class RefreshHFilesEndpoint extends RefreshHFilesProtos.RefreshHFilesService
   implements RegionCoprocessor {
   protected static final Logger LOG = LoggerFactory.getLogger(RefreshHFilesEndpoint.class);

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java
index ff7b43d..21d3b7a 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java
@@ -18,11 +18,13 @@
 
 package org.apache.hadoop.hbase.coprocessor.example;
 
+import com.google.protobuf.RpcCallback;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.Service;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
@@ -35,10 +37,7 @@ import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.util.Bytes;
-
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.Service;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Sample coprocessor endpoint exposing a Service interface for counting rows and key values.
@@ -48,6 +47,7 @@ import com.google.protobuf.Service;
  * hbase-examples/src/main/protobuf/Examples.proto.
  * </p>
  */
+@InterfaceAudience.Private
 public class RowCountEndpoint extends ExampleProtos.RowCountService implements RegionCoprocessor {
   private RegionCoprocessorEnvironment env;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ScanModifyingObserver.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ScanModifyingObserver.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ScanModifyingObserver.java
index 942315c..1c323d2 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ScanModifyingObserver.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ScanModifyingObserver.java
@@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.coprocessor.example;
 
 import java.io.IOException;
 import java.util.Optional;
-
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
@@ -26,11 +25,13 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.RegionObserver;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * A RegionObserver which modifies incoming Scan requests to include additional
  * columns than what the user actually requested.
  */
+@InterfaceAudience.Private
 public class ScanModifyingObserver implements RegionCoprocessor, RegionObserver {
 
   public static final String FAMILY_TO_ADD_KEY = "hbase.examples.coprocessor.scanmodifying.family";

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ValueRewritingObserver.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ValueRewritingObserver.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ValueRewritingObserver.java
index 422f4c1..988b51b 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ValueRewritingObserver.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ValueRewritingObserver.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.coprocessor.example;
 import java.io.IOException;
 import java.util.List;
 import java.util.Optional;
-
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellBuilder;
 import org.apache.hadoop.hbase.CellBuilderFactory;
@@ -37,10 +36,12 @@ import org.apache.hadoop.hbase.regionserver.Store;
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * This RegionObserver replaces the values of Puts from one value to another on compaction.
  */
+@InterfaceAudience.Private
 public class ValueRewritingObserver implements RegionObserver, RegionCoprocessor {
   public static final String ORIGINAL_VALUE_KEY =
       "hbase.examples.coprocessor.value.rewrite.orig";

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.java
index 01914b3..c7ebf0d 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.java
@@ -26,7 +26,6 @@ import java.util.NavigableMap;
 import java.util.Optional;
 import java.util.TreeMap;
 import java.util.stream.IntStream;
-
 import org.apache.commons.lang3.mutable.MutableLong;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellBuilderFactory;
@@ -52,6 +51,7 @@ import org.apache.hadoop.hbase.regionserver.Store;
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.yetus.audience.InterfaceAudience;
 
 import org.apache.hbase.thirdparty.com.google.common.math.IntMath;
 
@@ -64,6 +64,7 @@ import org.apache.hbase.thirdparty.com.google.common.math.IntMath;
  * Notice that this is only an example so we do not handle most corner cases, for example, you must
  * provide a qualifier when doing a get.
  */
+@InterfaceAudience.Private
 public class WriteHeavyIncrementObserver implements RegionCoprocessor, RegionObserver {
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
index 9b1db69..12d7d17 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.coprocessor.example;
 import java.io.IOException;
 import java.util.Optional;
 import java.util.OptionalLong;
-
 import org.apache.curator.framework.CuratorFramework;
 import org.apache.curator.framework.CuratorFrameworkFactory;
 import org.apache.curator.framework.recipes.cache.ChildData;
@@ -40,6 +39,7 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTrack
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * This is an example showing how a RegionObserver could configured via ZooKeeper in order to
@@ -51,6 +51,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
  * successful backup via ZK and instruct HBase that to safely delete the data which has already been
  * backup.
  */
+@InterfaceAudience.Private
 public class ZooKeeperScanPolicyObserver implements RegionCoprocessor, RegionObserver {
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
index 01e9ef3..3098ac2 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.mapreduce;
 
 import java.io.IOException;
 import java.util.TreeMap;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -33,6 +32,7 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Example map/reduce job to construct index tables that can be used to quickly
@@ -64,6 +64,7 @@ import org.apache.hadoop.util.ToolRunner;
  * This code was written against HBase 0.21 trunk.
  * </p>
  */
+@InterfaceAudience.Private
 public class IndexBuilder extends Configured implements Tool {
   /** the column family containing the indexed row key */
   public static final byte[] INDEX_COLUMN = Bytes.toBytes("INDEX");

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
index 18eb5a6..1248f87 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hbase.mapreduce;
 
 import java.io.IOException;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
@@ -35,6 +34,7 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Sample Uploader MapReduce
@@ -58,6 +58,7 @@ import org.apache.hadoop.util.ToolRunner;
  * <p>
  * This code was written against HBase 0.21 trunk.
  */
+@InterfaceAudience.Private
 public class SampleUploader extends Configured implements Tool {
 
   private static final String NAME = "SampleUploader";

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
index 706f82f..07486bf 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
@@ -31,13 +31,11 @@ import java.util.List;
 import java.util.Map;
 import java.util.SortedMap;
 import java.util.TreeMap;
-
 import javax.security.auth.Subject;
 import javax.security.auth.login.AppConfigurationEntry;
 import javax.security.auth.login.Configuration;
 import javax.security.auth.login.LoginContext;
 import javax.security.sasl.Sasl;
-
 import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
 import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
 import org.apache.hadoop.hbase.thrift.generated.Hbase;
@@ -49,10 +47,12 @@ import org.apache.thrift.protocol.TProtocol;
 import org.apache.thrift.transport.TSaslClientTransport;
 import org.apache.thrift.transport.TSocket;
 import org.apache.thrift.transport.TTransport;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * See the instructions under hbase-examples/README.txt
  */
+@InterfaceAudience.Private
 public class DemoClient {
 
     static protected int port;

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
index 25fdc4a..56aadce 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java
@@ -18,7 +18,6 @@
  */
 package org.apache.hadoop.hbase.thrift;
 
-
 import java.io.UnsupportedEncodingException;
 import java.nio.ByteBuffer;
 import java.nio.charset.CharacterCodingException;
@@ -31,12 +30,10 @@ import java.util.List;
 import java.util.Map;
 import java.util.SortedMap;
 import java.util.TreeMap;
-
 import javax.security.auth.Subject;
 import javax.security.auth.login.AppConfigurationEntry;
 import javax.security.auth.login.Configuration;
 import javax.security.auth.login.LoginContext;
-
 import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
 import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
 import org.apache.hadoop.hbase.thrift.generated.Hbase;
@@ -48,6 +45,7 @@ import org.apache.thrift.protocol.TProtocol;
 import org.apache.thrift.transport.THttpClient;
 import org.apache.thrift.transport.TSocket;
 import org.apache.thrift.transport.TTransport;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.ietf.jgss.GSSContext;
 import org.ietf.jgss.GSSCredential;
 import org.ietf.jgss.GSSException;
@@ -58,6 +56,7 @@ import org.ietf.jgss.Oid;
 /**
  * See the instructions under hbase-examples/README.txt
  */
+@InterfaceAudience.Private
 public class HttpDoAsClient {
 
   static protected int port;

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java
index 666997e..7fbe2aa 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java
@@ -24,15 +24,12 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-
 import javax.security.auth.Subject;
 import javax.security.auth.login.AppConfigurationEntry;
 import javax.security.auth.login.Configuration;
 import javax.security.auth.login.LoginContext;
 import javax.security.sasl.Sasl;
-
 import org.apache.hadoop.hbase.HBaseConfiguration;
-
 import org.apache.hadoop.hbase.thrift2.generated.TColumnValue;
 import org.apache.hadoop.hbase.thrift2.generated.TGet;
 import org.apache.hadoop.hbase.thrift2.generated.THBaseService;
@@ -44,7 +41,9 @@ import org.apache.thrift.transport.TFramedTransport;
 import org.apache.thrift.transport.TSaslClientTransport;
 import org.apache.thrift.transport.TSocket;
 import org.apache.thrift.transport.TTransport;
+import org.apache.yetus.audience.InterfaceAudience;
 
+@InterfaceAudience.Private
 public class DemoClient {
 
   private static String host = "localhost";

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java
index c063aa9..b1ec97e 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java
@@ -19,14 +19,15 @@ package org.apache.hadoop.hbase.types;
 
 import com.google.protobuf.CodedInputStream;
 import com.google.protobuf.CodedOutputStream;
+import java.io.IOException;
 import org.apache.hadoop.hbase.protobuf.generated.CellProtos;
 import org.apache.hadoop.hbase.util.PositionedByteRange;
-
-import java.io.IOException;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * An example for using protobuf objects with {@link DataType} API.
  */
+@InterfaceAudience.Private
 public class PBCell extends PBType<CellProtos.Cell> {
   @Override
   public Class<CellProtos.Cell> encodedClass() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-external-blockcache/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-external-blockcache/pom.xml b/hbase-external-blockcache/pom.xml
index 777e583..ec5a88d 100644
--- a/hbase-external-blockcache/pom.xml
+++ b/hbase-external-blockcache/pom.xml
@@ -84,6 +84,10 @@
           <failOnViolation>true</failOnViolation>
         </configuration>
       </plugin>
+      <plugin>
+        <groupId>net.revelc.code</groupId>
+        <artifactId>warbucks-maven-plugin</artifactId>
+      </plugin>
     </plugins>
     <pluginManagement>
       <plugins>

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-hadoop-compat/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-hadoop-compat/pom.xml b/hbase-hadoop-compat/pom.xml
index f543e49..c8974a4 100644
--- a/hbase-hadoop-compat/pom.xml
+++ b/hbase-hadoop-compat/pom.xml
@@ -49,6 +49,10 @@
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-source-plugin</artifactId>
         </plugin>
+          <plugin>
+              <groupId>net.revelc.code</groupId>
+              <artifactId>warbucks-maven-plugin</artifactId>
+          </plugin>
         </plugins>
       <pluginManagement>
         <plugins>

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilityFactory.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilityFactory.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilityFactory.java
index 5c1f103..d29e7bc 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilityFactory.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilityFactory.java
@@ -21,12 +21,14 @@ package org.apache.hadoop.hbase;
 import java.util.Iterator;
 import java.util.ServiceLoader;
 
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
  * Class that will create many instances of classes provided by the hbase-hadoop{1|2}-compat jars.
  */
+@InterfaceAudience.Private
 public class CompatibilityFactory {
 
   private static final Logger LOG = LoggerFactory.getLogger(CompatibilitySingletonFactory.class);

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java
index 3dc3f49..0e633b8 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/CompatibilitySingletonFactory.java
@@ -23,6 +23,7 @@ import java.util.Iterator;
 import java.util.Map;
 import java.util.ServiceLoader;
 
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -30,6 +31,7 @@ import org.slf4j.LoggerFactory;
  *  Factory for classes supplied by hadoop compatibility modules.  Only one of each class will be
  *  created.
  */
+@InterfaceAudience.Private
 public class CompatibilitySingletonFactory extends CompatibilityFactory {
   public static enum SingletonStorage {
     INSTANCE;

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/io/MetricsIOSource.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/io/MetricsIOSource.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/io/MetricsIOSource.java
index 3f27747..c0a8519 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/io/MetricsIOSource.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/io/MetricsIOSource.java
@@ -19,7 +19,9 @@
 package org.apache.hadoop.hbase.io;
 
 import org.apache.hadoop.hbase.metrics.BaseSource;
+import org.apache.yetus.audience.InterfaceAudience;
 
+@InterfaceAudience.Private
 public interface MetricsIOSource extends BaseSource {
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/io/MetricsIOWrapper.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/io/MetricsIOWrapper.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/io/MetricsIOWrapper.java
index ed07898..3ba8cd5 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/io/MetricsIOWrapper.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/io/MetricsIOWrapper.java
@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.hbase.io;
 
+import org.apache.yetus.audience.InterfaceAudience;
+
+@InterfaceAudience.Private
 public interface MetricsIOWrapper {
 
   long getChecksumFailures();

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
index 534331a..0833751 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
@@ -20,7 +20,9 @@
 package org.apache.hadoop.hbase.ipc;
 
 import org.apache.hadoop.hbase.metrics.ExceptionTrackingSource;
+import org.apache.yetus.audience.InterfaceAudience;
 
+@InterfaceAudience.Private
 public interface MetricsHBaseServerSource extends ExceptionTrackingSource {
   String AUTHORIZATION_SUCCESSES_NAME = "authorizationSuccesses";
   String AUTHORIZATION_SUCCESSES_DESC =

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactory.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactory.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactory.java
index e9a3348..7f1415a 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactory.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactory.java
@@ -19,6 +19,9 @@
 
 package org.apache.hadoop.hbase.ipc;
 
+import org.apache.yetus.audience.InterfaceAudience;
+
+@InterfaceAudience.Private
 public abstract class MetricsHBaseServerSourceFactory {
   /**
    * The name of the metrics

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
index b272cd0..c80d1a9 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
@@ -19,6 +19,9 @@
 
 package org.apache.hadoop.hbase.ipc;
 
+import org.apache.yetus.audience.InterfaceAudience;
+
+@InterfaceAudience.Private
 public interface MetricsHBaseServerWrapper {
   long getTotalQueueSize();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsAssignmentManagerSource.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsAssignmentManagerSource.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsAssignmentManagerSource.java
index 4e4a9e0..92d447c 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsAssignmentManagerSource.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsAssignmentManagerSource.java
@@ -20,7 +20,9 @@ package org.apache.hadoop.hbase.master;
 
 import org.apache.hadoop.hbase.metrics.BaseSource;
 import org.apache.hadoop.hbase.metrics.OperationMetrics;
+import org.apache.yetus.audience.InterfaceAudience;
 
+@InterfaceAudience.Private
 public interface MetricsAssignmentManagerSource extends BaseSource {
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterFileSystemSource.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterFileSystemSource.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterFileSystemSource.java
index 6cf942b..91dc71a 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterFileSystemSource.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterFileSystemSource.java
@@ -19,7 +19,9 @@
 package org.apache.hadoop.hbase.master;
 
 import org.apache.hadoop.hbase.metrics.BaseSource;
+import org.apache.yetus.audience.InterfaceAudience;
 
+@InterfaceAudience.Private
 public interface MetricsMasterFileSystemSource extends BaseSource {
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSource.java
----------------------------------------------------------------------
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSource.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSource.java
index 51a17a8..db4f25e 100644
--- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSource.java
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSource.java
@@ -19,10 +19,12 @@
 package org.apache.hadoop.hbase.master;
 
 import org.apache.hadoop.hbase.metrics.BaseSource;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Interface that classes that expose metrics about the master will implement.
  */
+@InterfaceAudience.Private
 public interface MetricsMasterProcSource extends BaseSource {
 
   /**


Mime
View raw message