hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r1295710 [3/8] - in /hbase/trunk: bin/ src/main/java/org/apache/hadoop/hbase/ src/main/java/org/apache/hadoop/hbase/avro/ src/main/java/org/apache/hadoop/hbase/catalog/ src/main/java/org/apache/hadoop/hbase/client/ src/main/java/org/apache/...
Date Thu, 01 Mar 2012 17:53:33 GMT
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/ObserverContext.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/ObserverContext.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/ObserverContext.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/ObserverContext.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.coprocessor;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
 
 /**
@@ -32,6 +34,8 @@ import org.apache.hadoop.hbase.Coprocess
  * @param <E> The {@link CoprocessorEnvironment} subclass applicable to the
  *     revelant Observer interface.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public class ObserverContext<E extends CoprocessorEnvironment> {
   private E env;
   private boolean bypass;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java Thu Mar  1 17:53:03 2012
@@ -20,10 +20,14 @@
 
 package org.apache.hadoop.hbase.coprocessor;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.RegionServerServices;
 
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public interface RegionCoprocessorEnvironment extends CoprocessorEnvironment {
   /** @return the region associated with this coprocessor */
   public HRegion getRegion();

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java Thu Mar  1 17:53:03 2012
@@ -19,6 +19,8 @@ package org.apache.hadoop.hbase.coproces
 import java.io.IOException;
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.Coprocessor;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.KeyValue;
@@ -45,6 +47,8 @@ import com.google.common.collect.Immutab
  * Coprocessors implement this interface to observe and mediate client actions
  * on the region.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public interface RegionObserver extends Coprocessor {
 
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/WALCoprocessorEnvironment.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/WALCoprocessorEnvironment.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/WALCoprocessorEnvironment.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/WALCoprocessorEnvironment.java Thu Mar  1 17:53:03 2012
@@ -20,9 +20,13 @@
 
 package org.apache.hadoop.hbase.coprocessor;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.regionserver.wal.HLog;
 
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public interface WALCoprocessorEnvironment extends CoprocessorEnvironment {
   /** @return reference to the region server services */
   public HLog getWAL();

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/WALObserver.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/WALObserver.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/WALObserver.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/coprocessor/WALObserver.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.coprocessor;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.Coprocessor;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
@@ -37,6 +39,8 @@ import java.io.IOException;
  * Defines coprocessor hooks for interacting with operations on the
  * {@link org.apache.hadoop.hbase.regionserver.wal.HLog}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public interface WALObserver extends Coprocessor {
 
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java Thu Mar  1 17:53:03 2012
@@ -24,6 +24,7 @@ import java.util.concurrent.atomic.Atomi
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.Server;
 
 
@@ -53,6 +54,7 @@ import org.apache.hadoop.hbase.Server;
  * {@link #setListener(EventHandlerListener)}.
  * @see ExecutorService
  */
+@InterfaceAudience.Private
 public abstract class EventHandler implements Runnable, Comparable<Runnable> {
   private static final Log LOG = LogFactory.getLog(EventHandler.class);
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java Thu Mar  1 17:53:03 2012
@@ -38,6 +38,7 @@ import java.util.concurrent.atomic.Atomi
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.executor.EventHandler.EventHandlerListener;
 import org.apache.hadoop.hbase.executor.EventHandler.EventType;
 import org.apache.hadoop.hbase.monitoring.ThreadMonitoring;
@@ -61,6 +62,7 @@ import com.google.common.util.concurrent
  * with {@link #registerListener(EventHandler.EventType, EventHandler.EventHandlerListener)}.  Be sure
  * to deregister your listener when done via {@link #unregisterListener(EventHandler.EventType)}.
  */
+@InterfaceAudience.Private
 public class ExecutorService {
   private static final Log LOG = LogFactory.getLog(ExecutorService.class);
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/executor/RegionTransitionData.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/executor/RegionTransitionData.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/executor/RegionTransitionData.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/executor/RegionTransitionData.java Thu Mar  1 17:53:03 2012
@@ -23,6 +23,7 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.executor.EventHandler.EventType;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -32,6 +33,7 @@ import org.apache.hadoop.io.Writable;
 /**
  * Data serialized into ZooKeeper for region transitions.
  */
+@InterfaceAudience.Private
 public class RegionTransitionData implements Writable {
   /**
    * Type of transition event (offline, opening, opened, closing, closed).

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java Thu Mar  1 17:53:03 2012
@@ -20,12 +20,16 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**
  * A binary comparator which lexicographically compares against the specified
  * byte array using {@link org.apache.hadoop.hbase.util.Bytes#compareTo(byte[], byte[])}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class BinaryComparator extends WritableByteArrayComparable {
 
   /** Nullary constructor for Writable, do not use */

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**
@@ -27,6 +29,8 @@ import org.apache.hadoop.hbase.util.Byte
  * up to the length of this byte array. For the rest it is similar to
  * {@link BinaryComparator}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class BinaryPrefixComparator extends WritableByteArrayComparable {
 
   /** Nullary constructor for Writable, do not use */

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java Thu Mar  1 17:53:03 2012
@@ -24,10 +24,15 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * A bit comparator which performs the specified bitwise operation on each of the bytes
  * with the specified byte array. Then returns whether the result is non-zero.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class BitComparator extends WritableByteArrayComparable {
 
   /** Nullary constructor for Writable, do not use */

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 
 import java.io.DataInput;
@@ -35,6 +37,8 @@ import com.google.common.base.Preconditi
  * its quota of columns, {@link #filterAllRemaining()} returns true.  This
  * makes this filter unsuitable as a Scan filter.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class ColumnCountGetFilter extends FilterBase {
   private int limit = 0;
   private int count = 0;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java Thu Mar  1 17:53:03 2012
@@ -24,6 +24,8 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.util.ArrayList;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import com.google.common.base.Preconditions;
 
@@ -32,6 +34,8 @@ import com.google.common.base.Preconditi
  * This filter can be used for row-based indexing, where references to other tables are stored across many columns,
  * in order to efficient lookups and paginated results for end users.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class ColumnPaginationFilter extends FilterBase
 {
   private int limit = 0;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -35,6 +37,8 @@ import com.google.common.base.Preconditi
  * a particular prefix. For example, if prefix is 'an', it will pass keys with
  * columns like 'and', 'anti' but not keys with columns like 'ball', 'act'.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class ColumnPrefixFilter extends FilterBase {
   protected byte [] prefix = null;
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -42,6 +44,8 @@ import com.google.common.base.Preconditi
  * minColumnInclusive and maxColumnInclusive specify if the ranges are inclusive
  * or not.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class ColumnRangeFilter extends FilterBase {
   protected byte[] minColumn = null;
   protected boolean minColumnInclusive = true;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.io.HbaseObjectWritable;
 
 import java.io.DataInput;
@@ -43,6 +45,8 @@ import com.google.common.base.Preconditi
  * <p>
  * Multiple filters can be combined using {@link FilterList}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public abstract class CompareFilter extends FilterBase {
 
   /** Comparison operators. */

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java Thu Mar  1 17:53:03 2012
@@ -28,6 +28,8 @@ import java.util.List;
 import java.util.Set;
 import java.util.ArrayList;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -40,6 +42,8 @@ import com.google.common.base.Preconditi
  * Not compatible with Scan.setBatch as operations need 
  * full rows for correct filtering 
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class DependentColumnFilter extends CompareFilter {
 
   protected byte[] columnFamily;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 
 import java.util.ArrayList;
@@ -37,6 +39,8 @@ import java.util.ArrayList;
  * If an already known column family is looked for, use {@link org.apache.hadoop.hbase.client.Get#addFamily(byte[])}
  * directly rather than a filter.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class FamilyFilter extends CompareFilter {
   /**
    * Writable constructor, do not use.

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/Filter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/Filter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/Filter.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.io.Writable;
 
@@ -48,6 +50,8 @@ import java.util.List;
  * 
  * @see FilterBase
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public interface Filter extends Writable {
   /**
    * Reset the state of the filter between rows.

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java Thu Mar  1 17:53:03 2012
@@ -19,6 +19,8 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 
 import java.util.List;
@@ -32,6 +34,8 @@ import java.util.ArrayList;
  * If you could instantiate FilterBase, it would end up being a "null" filter -
  * that is one that never filters anything.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public abstract class FilterBase implements Filter {
 
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java Thu Mar  1 17:53:03 2012
@@ -19,6 +19,8 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.KeyValue;
@@ -41,6 +43,8 @@ import java.util.List;
  * Defaults to {@link Operator#MUST_PASS_ALL}.
  * <p>TODO: Fix creation of Configuration on serialization and deserialization.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class FilterList implements Filter {
   /** set operator */
   public static enum Operator {

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java Thu Mar  1 17:53:03 2012
@@ -19,6 +19,8 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 
 import java.io.DataOutput;
@@ -34,6 +36,8 @@ import com.google.common.base.Preconditi
  * <p>
  * This filter can be used to more efficiently perform row count operations.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class FirstKeyOnlyFilter extends FilterBase {
   private boolean foundKV = false;
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -37,6 +39,8 @@ import com.google.common.base.Preconditi
  *
  * Use this filter to include the stop row, eg: [A,Z].
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class InclusiveStopFilter extends FilterBase {
   private byte [] stopRowKey;
   private boolean done = false;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/IncompatibleFilterException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/IncompatibleFilterException.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/IncompatibleFilterException.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/IncompatibleFilterException.java Thu Mar  1 17:53:03 2012
@@ -19,9 +19,14 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * Used to indicate a filter incompatibility
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class IncompatibleFilterException extends RuntimeException {
   private static final long serialVersionUID = 3236763276623198231L;
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java Thu Mar  1 17:53:03 2012
@@ -19,9 +19,14 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * Used to indicate an invalid RowFilter.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class InvalidRowFilterException extends RuntimeException {
   private static final long serialVersionUID = 2667894046345657865L;
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java Thu Mar  1 17:53:03 2012
@@ -22,6 +22,9 @@ package org.apache.hadoop.hbase.filter;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -36,6 +39,8 @@ import com.google.common.base.Preconditi
  * This filter can be used to grab all of the keys without having to also grab
  * the values.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class KeyOnlyFilter extends FilterBase {
 
   boolean lenAsVal;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java Thu Mar  1 17:53:03 2012
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -33,6 +35,8 @@ import java.util.ArrayList;
  * a particular prefix. For example, if prefix is 'an', it will pass keys will
  * columns like 'and', 'anti' but not keys with columns like 'ball', 'act'.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class MultipleColumnPrefixFilter extends FilterBase {
   protected byte [] hint = null;
   protected TreeSet<byte []> sortedPrefixes = createTreeSet();

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java Thu Mar  1 17:53:03 2012
@@ -20,10 +20,15 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  * A binary comparator which lexicographically compares against the specified
  * byte array using {@link org.apache.hadoop.hbase.util.Bytes#compareTo(byte[], byte[])}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class NullComparator extends WritableByteArrayComparable {
 
   /** Nullary constructor for Writable, do not use */

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java Thu Mar  1 17:53:03 2012
@@ -19,6 +19,8 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 
 import java.io.DataInput;
@@ -39,6 +41,8 @@ import com.google.common.base.Preconditi
  * individual HRegions by making sure that the page size is never exceeded
  * locally.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class PageFilter extends FilterBase {
   private long pageSize = Long.MAX_VALUE;
   private int rowsAccepted = 0;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java Thu Mar  1 17:53:03 2012
@@ -19,6 +19,8 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.ipc.HRegionInterface;
 import org.apache.hadoop.hbase.util.Bytes;
 import java.nio.ByteBuffer;
@@ -29,6 +31,8 @@ import org.apache.hadoop.hbase.filter.*;
  * ParseConstants holds a bunch of constants related to parsing Filter Strings
  * Used by {@link ParseFilter}
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public final class ParseConstants {
 
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java Thu Mar  1 17:53:03 2012
@@ -27,6 +27,8 @@ import java.util.*;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -40,6 +42,8 @@ import org.apache.hadoop.hbase.util.Byte
  * This class addresses the HBASE-4168 JIRA. More documentaton on this
  * Filter Language can be found at: https://issues.apache.org/jira/browse/HBASE-4176
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class ParseFilter {
   private static final Log LOG = LogFactory.getLog(ParseFilter.class);
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -34,6 +36,8 @@ import com.google.common.base.Preconditi
 /**
  * Pass results that have same row prefix.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class PrefixFilter extends FilterBase {
   protected byte [] prefix = null;
   protected boolean passedPrefix = false;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Get;
 
@@ -38,6 +40,8 @@ import java.util.ArrayList;
  * If an already known column qualifier is looked for, use {@link Get#addColumn}
  * directly rather than a filter.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class QualifierFilter extends CompareFilter {
 
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java Thu Mar  1 17:53:03 2012
@@ -25,12 +25,16 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.util.Random;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 
 /**
  * A filter that includes rows based on a chance.
  * 
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class RandomRowFilter extends FilterBase {
   protected static final Random random = new Random();
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java Thu Mar  1 17:53:03 2012
@@ -19,6 +19,8 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -54,6 +56,8 @@ import java.util.regex.Pattern;
  *         "{3}[\\d]{1,3})?)(\\/[0-9]+)?"));
  * </pre>
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class RegexStringComparator extends WritableByteArrayComparable {
 
   private static final Log LOG = LogFactory.getLog(RegexStringComparator.class);

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java Thu Mar  1 17:53:03 2012
@@ -22,6 +22,8 @@ package org.apache.hadoop.hbase.filter;
 
 import java.util.ArrayList;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Scan;
 
@@ -37,6 +39,8 @@ import org.apache.hadoop.hbase.client.Sc
  * If an already known row range needs to be scanned, use {@link Scan} start
  * and stop rows directly rather than a filter.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class RowFilter extends CompareFilter {
 
   private boolean filterOutRow = false;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 
@@ -31,6 +33,8 @@ import java.util.ArrayList;
  * {@link SingleColumnValueFilter}, if the tested column value is not actually
  * needed as input (besides for the filtering itself).
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter {
 
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java Thu Mar  1 17:53:03 2012
@@ -22,6 +22,8 @@ package org.apache.hadoop.hbase.filter;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
@@ -64,6 +66,8 @@ import com.google.common.base.Preconditi
  * <p>
  * To filter based on the value of all scanned columns, use {@link ValueFilter}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class SingleColumnValueFilter extends FilterBase {
   static final Log LOG = LogFactory.getLog(SingleColumnValueFilter.class);
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 
 import java.io.DataInput;
@@ -45,6 +47,8 @@ import java.util.List;
  * Without this filter, the other non-zero valued columns in the row would still
  * be emitted.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class SkipFilter extends FilterBase {
   private boolean filterRow = false;
   private Filter filter;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java Thu Mar  1 17:53:03 2012
@@ -19,6 +19,8 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.util.Bytes;
 
 import java.io.DataInput;
@@ -40,6 +42,8 @@ import java.io.IOException;
  *     new SubstringComparator("substr"));
  * </pre>
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class SubstringComparator extends WritableByteArrayComparable {
 
   private String substr;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java Thu Mar  1 17:53:03 2012
@@ -25,6 +25,8 @@ import java.util.List;
 import java.util.TreeSet;
 import java.util.ArrayList;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 import com.google.common.base.Preconditions;
 
@@ -37,6 +39,8 @@ import com.google.common.base.Preconditi
  * {@link org.apache.hadoop.hbase.client.Scan#setTimeRange(long, long)}, {@link org.apache.hadoop.hbase.client.Get#setTimeStamp(long)},
  * or {@link org.apache.hadoop.hbase.client.Scan#setTimeStamp(long)}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class TimestampsFilter extends FilterBase {
 
   TreeSet<Long> timestamps;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 
 import java.util.ArrayList;
@@ -37,6 +39,8 @@ import java.util.ArrayList;
  * To test the value of a single qualifier when scanning multiple qualifiers,
  * use {@link SingleColumnValueFilter}.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class ValueFilter extends CompareFilter {
 
   /**

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,8 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue;
 
 import java.io.DataInput;
@@ -35,6 +37,8 @@ import java.util.List;
  * {@link org.apache.hadoop.hbase.filter.Filter#filterAllRemaining()} methods
  * returns true.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class WhileMatchFilter extends FilterBase {
   private boolean filterAllRemaining = false;
   private Filter filter;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/WritableByteArrayComparable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/WritableByteArrayComparable.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/WritableByteArrayComparable.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/WritableByteArrayComparable.java Thu Mar  1 17:53:03 2012
@@ -19,6 +19,8 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.Writable;
 
@@ -27,6 +29,8 @@ import java.io.DataOutput;
 import java.io.IOException;
 
 /** Base class, combines Comparable<byte []> and Writable. */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public abstract class WritableByteArrayComparable implements Writable, Comparable<byte[]> {
 
   byte[] value;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/CodeToClassAndBack.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/CodeToClassAndBack.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/CodeToClassAndBack.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/CodeToClassAndBack.java Thu Mar  1 17:53:03 2012
@@ -22,6 +22,8 @@ package org.apache.hadoop.hbase.io;
 
 import java.util.*;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * A Static Interface.
  * Instead of having this code in the the HbaseMapWritable code, where it
@@ -29,6 +31,7 @@ import java.util.*;
  * it is put here in this static interface where the static final Maps are
  * loaded one time. Only byte[] and Cell are supported at this time.
  */
+@InterfaceAudience.Private
 public interface CodeToClassAndBack {
   /**
    * Static map that contains mapping from code to class

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/DataOutputOutputStream.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/DataOutputOutputStream.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/DataOutputOutputStream.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/DataOutputOutputStream.java Thu Mar  1 17:53:03 2012
@@ -22,13 +22,11 @@ import java.io.IOException;
 import java.io.OutputStream;
 
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
 
 /**
  * OutputStream implementation that wraps a DataOutput.
  */
 @InterfaceAudience.Private
-@InterfaceStability.Unstable
 class DataOutputOutputStream extends OutputStream {
 
   private final DataOutput out;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/DoubleOutputStream.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/DoubleOutputStream.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/DoubleOutputStream.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/DoubleOutputStream.java Thu Mar  1 17:53:03 2012
@@ -22,12 +22,15 @@ package org.apache.hadoop.hbase.io;
 import java.io.IOException;
 import java.io.OutputStream;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * An output stream that writes to two streams on each operation. Does not
  * attempt to handle exceptions gracefully. If any operation other than
  * {@link #close()} fails on the first stream, it is not called on the second
  * stream.
  */
+@InterfaceAudience.Private
 public class DoubleOutputStream extends OutputStream {
   private OutputStream out1;
   private OutputStream out2;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java Thu Mar  1 17:53:03 2012
@@ -24,6 +24,7 @@ import java.nio.ByteBuffer;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.KeyValue;
@@ -46,6 +47,7 @@ import org.apache.hadoop.hbase.util.Byte
  *
  * <p>This file is not splitable.  Calls to {@link #midkey()} return null.
  */
+@InterfaceAudience.Private
 public class HalfStoreFileReader extends StoreFile.Reader {
   final Log LOG = LogFactory.getLog(HalfStoreFileReader.class);
   final boolean top;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java Thu Mar  1 17:53:03 2012
@@ -30,6 +30,7 @@ import java.util.SortedMap;
 import java.util.TreeMap;
 import java.util.concurrent.atomic.AtomicReference;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -45,6 +46,7 @@ import org.apache.hadoop.util.Reflection
  * @param <K> <byte []> key  TODO: Parameter K is never used, could be removed.
  * @param <V> value Expects a Writable or byte [].
  */
+@InterfaceAudience.Private
 public class HbaseMapWritable <K,V>
 implements SortedMap<byte[],V>, Configurable, Writable, CodeToClassAndBack{
   private AtomicReference<Configuration> conf = null;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java Thu Mar  1 17:53:03 2012
@@ -38,6 +38,7 @@ import java.util.NavigableSet;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
@@ -115,6 +116,7 @@ import com.google.protobuf.Message;
  * name and reflection to instantiate class was costing in excess of the cell
  * handling).
  */
+@InterfaceAudience.Private
 public class HbaseObjectWritable implements Writable, WritableWithSize, Configurable {
   protected final static Log LOG = LogFactory.getLog(HbaseObjectWritable.class);
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HeapSize.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HeapSize.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HeapSize.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/HeapSize.java Thu Mar  1 17:53:03 2012
@@ -19,6 +19,8 @@
  */
 package org.apache.hadoop.hbase.io;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * Implementations can be asked for an estimate of their size in bytes.
  * <p>
@@ -37,6 +39,7 @@ package org.apache.hadoop.hbase.io;
  * }
  * </pre>
  */
+@InterfaceAudience.Private
 public interface HeapSize {
   /**
    * @return Approximate 'exclusive deep size' of implementing object.  Includes

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java Thu Mar  1 17:53:03 2012
@@ -26,6 +26,8 @@ import java.io.DataOutput;
 import java.util.Arrays;
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableComparator;
@@ -39,6 +41,8 @@ import org.apache.hadoop.io.WritableComp
  * the underlying byte [] is not copied, just referenced.  The backing
  * buffer is accessed when we go to serialize.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class ImmutableBytesWritable
 implements WritableComparable<ImmutableBytesWritable> {
   private byte[] bytes;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/Reference.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/Reference.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/Reference.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/Reference.java Thu Mar  1 17:53:03 2012
@@ -23,6 +23,7 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -48,6 +49,7 @@ import org.apache.hadoop.io.Writable;
  * Note, a region is itself not splitable if it has instances of store file
  * references.  References are cleaned up by compactions.
  */
+@InterfaceAudience.Private
 public class Reference implements Writable {
   private byte [] splitkey;
   private Range region;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java Thu Mar  1 17:53:03 2012
@@ -26,6 +26,8 @@ import java.io.IOException;
 
 import org.apache.hadoop.io.Writable;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**
@@ -36,6 +38,8 @@ import org.apache.hadoop.hbase.util.Byte
  * <p>
  * Only used internally; should not be accessed directly by clients.
  */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
 public class TimeRange implements Writable {
   private long minStamp = 0L;
   private long maxStamp = Long.MAX_VALUE;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/WritableWithSize.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/WritableWithSize.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/WritableWithSize.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/WritableWithSize.java Thu Mar  1 17:53:03 2012
@@ -20,9 +20,12 @@
 
 package org.apache.hadoop.hbase.io;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * An optional interface to 'size' writables.
  */
+@InterfaceAudience.Private
 public interface WritableWithSize {
   /**
    * Provide a size hint to the caller. write() should ideally

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java Thu Mar  1 17:53:03 2012
@@ -21,6 +21,7 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.SamePrefixComparator;
 import org.apache.hadoop.hbase.util.ByteBufferUtils;
@@ -31,6 +32,7 @@ import org.apache.hadoop.io.WritableUtil
 /**
  * Base class for all data block encoders that use a buffer.
  */
+@InterfaceAudience.Private
 abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
 
   private static int INITIAL_KEY_BUFFER_SIZE = 512;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/CompressionState.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/CompressionState.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/CompressionState.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/CompressionState.java Thu Mar  1 17:53:03 2012
@@ -18,12 +18,14 @@ package org.apache.hadoop.hbase.io.encod
 
 import java.nio.ByteBuffer;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.util.ByteBufferUtils;
 
 /**
  * Stores the state of data block encoder at the beginning of new key.
  */
+@InterfaceAudience.Private
 class CompressionState {
   int keyLength;
   int valueLength;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java Thu Mar  1 17:53:03 2012
@@ -21,6 +21,7 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.util.ByteBufferUtils;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.RawComparator;
@@ -29,6 +30,7 @@ import org.apache.hadoop.io.RawComparato
  * Just copy data, do not do any kind of compression. Use for comparison and
  * benchmarking.
  */
+@InterfaceAudience.Private
 public class CopyKeyDataBlockEncoder extends BufferedDataBlockEncoder {
   @Override
   public void compressKeyValues(DataOutputStream out,

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java Thu Mar  1 17:53:03 2012
@@ -21,6 +21,7 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.io.RawComparator;
 
@@ -34,6 +35,7 @@ import org.apache.hadoop.io.RawComparato
  * </ul>
  * It is designed to work fast enough to be feasible as in memory compression.
  */
+@InterfaceAudience.Private
 public interface DataBlockEncoder {
   /**
    * Compress KeyValues and write them to output buffer.

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java Thu Mar  1 17:53:03 2012
@@ -23,6 +23,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**
@@ -31,6 +32,7 @@ import org.apache.hadoop.hbase.util.Byte
  * want to add a new algorithm/version, assign it a new id. Announce the new id
  * in the HBase mailing list to prevent collisions.
  */
+@InterfaceAudience.Private
 public enum DataBlockEncoding {
 
   /** Disable data block encoding. */

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java Thu Mar  1 17:53:03 2012
@@ -21,6 +21,7 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.util.ByteBufferUtils;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -47,6 +48,7 @@ import org.apache.hadoop.io.RawComparato
  * - 1 byte:    type (only if FLAG_SAME_TYPE is not set in the flag)
  * - ... bytes: value
  */
+@InterfaceAudience.Private
 public class DiffKeyDeltaEncoder extends BufferedDataBlockEncoder {
   static final int FLAG_SAME_KEY_LENGTH = 1;
   static final int FLAG_SAME_VALUE_LENGTH = 1 << 1;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java Thu Mar  1 17:53:03 2012
@@ -25,6 +25,7 @@ import java.nio.ByteBuffer;
 import java.util.Iterator;
 
 import org.apache.commons.lang.NotImplementedException;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.io.compress.Compressor;
 
@@ -32,6 +33,7 @@ import org.apache.hadoop.io.compress.Com
  * Encapsulates a data block compressed using a particular encoding algorithm.
  * Useful for testing and benchmarking.
  */
+@InterfaceAudience.Private
 public class EncodedDataBlock {
   private static final int BUFFER_SIZE = 4 * 1024;
   protected DataBlockEncoder dataBlockEncoder;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/EncoderBufferTooSmallException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/EncoderBufferTooSmallException.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/EncoderBufferTooSmallException.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/EncoderBufferTooSmallException.java Thu Mar  1 17:53:03 2012
@@ -16,9 +16,12 @@
  */
 package org.apache.hadoop.hbase.io.encoding;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * Internal error which indicates a bug in a data block encoding algorithm.
  */
+@InterfaceAudience.Private
 public class EncoderBufferTooSmallException extends RuntimeException {
   private static final long serialVersionUID = 4767495176134878737L;
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java Thu Mar  1 17:53:03 2012
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.util.ByteBufferUtils;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -52,6 +53,7 @@ import org.apache.hadoop.io.RawComparato
  * - ... bytes: value (only if FLAG_SAME_VALUE is not set in the flag)
  *
  */
+@InterfaceAudience.Private
 public class FastDiffDeltaEncoder extends BufferedDataBlockEncoder {
   final int MASK_TIMESTAMP_LENGTH = (1 << 0) | (1 << 1) | (1 << 2);
   final int SHIFT_TIMESTAMP_LENGTH = 0;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java Thu Mar  1 17:53:03 2012
@@ -21,6 +21,7 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.util.ByteBufferUtils;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -40,6 +41,7 @@ import org.apache.hadoop.io.RawComparato
  * In a worst case compressed KeyValue will be three bytes longer than original.
  *
  */
+@InterfaceAudience.Private
 public class PrefixKeyDeltaEncoder extends BufferedDataBlockEncoder {
 
   private int addKV(int prevKeyOffset, DataOutputStream out,

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java Thu Mar  1 17:53:03 2012
@@ -23,6 +23,7 @@ import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.concurrent.atomic.AtomicLong;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.KeyValue;
@@ -34,6 +35,7 @@ import org.apache.hadoop.io.RawComparato
 /**
  * Common functionality needed by all versions of {@link HFile} readers.
  */
+@InterfaceAudience.Private
 public abstract class AbstractHFileReader extends SchemaConfigured
     implements HFile.Reader {
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java Thu Mar  1 17:53:03 2012
@@ -24,6 +24,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -39,6 +40,7 @@ import org.apache.hadoop.io.Writable;
 /**
  * Common functionality needed by all versions of {@link HFile} writers.
  */
+@InterfaceAudience.Private
 public abstract class AbstractHFileWriter extends SchemaConfigured
     implements HFile.Writer {
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java Thu Mar  1 17:53:03 2012
@@ -22,12 +22,14 @@ package org.apache.hadoop.hbase.io.hfile
 import java.io.IOException;
 import java.util.List;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 
 /**
  * Block cache interface. Anything that implements the {@link Cacheable}
  * interface can be put in the cache.
  */
+@InterfaceAudience.Private
 public interface BlockCache {
   /**
    * Add block to cache.

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheColumnFamilySummary.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheColumnFamilySummary.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheColumnFamilySummary.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheColumnFamilySummary.java Thu Mar  1 17:53:03 2012
@@ -23,6 +23,7 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Writable;
 
@@ -34,6 +35,7 @@ import org.apache.hadoop.io.Writable;
  * the owning Table is included in the summarization.
  *
  */
+@InterfaceAudience.Private
 public class BlockCacheColumnFamilySummary implements Writable, Comparable<BlockCacheColumnFamilySummary> {
 
   private String table = "";

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java Thu Mar  1 17:53:03 2012
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase.io.hfile;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -25,6 +26,7 @@ import org.apache.hadoop.hbase.util.Clas
 /**
  * Cache Key for use with implementations of {@link BlockCache}
  */
+@InterfaceAudience.Private
 public class BlockCacheKey implements HeapSize {
   private final String hfileName;
   private final long offset;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java Thu Mar  1 17:53:03 2012
@@ -25,6 +25,7 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**
@@ -32,6 +33,7 @@ import org.apache.hadoop.hbase.util.Byte
  * must not be relied upon. The values in the enum appear in the order they
  * appear in a version 2 {@link HFile}.
  */
+@InterfaceAudience.Private
 public enum BlockType {
 
   // Scanned block section

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BoundedRangeFileInputStream.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BoundedRangeFileInputStream.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BoundedRangeFileInputStream.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/BoundedRangeFileInputStream.java Thu Mar  1 17:53:03 2012
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.io.hfile
 import java.io.IOException;
 import java.io.InputStream;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.FSDataInputStream;
 
 /**
@@ -29,6 +30,7 @@ import org.apache.hadoop.fs.FSDataInputS
  * would not interfere with each other.
  * Copied from hadoop-335 tfile.
  */
+@InterfaceAudience.Private
 class BoundedRangeFileInputStream  extends InputStream {
 
   private FSDataInputStream in;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java Thu Mar  1 17:53:03 2012
@@ -22,6 +22,7 @@ import java.lang.management.MemoryUsage;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
@@ -33,6 +34,7 @@ import org.apache.hadoop.util.StringUtil
 /**
  * Stores all of the cache objects and configuration for a single HFile.
  */
+@InterfaceAudience.Private
 public class CacheConfig {
   private static final Log LOG = LogFactory.getLog(CacheConfig.class.getName());
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java Thu Mar  1 17:53:03 2012
@@ -21,9 +21,12 @@ package org.apache.hadoop.hbase.io.hfile
 
 import java.util.concurrent.atomic.AtomicLong;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * Class that implements cache metrics.
  */
+@InterfaceAudience.Private
 public class CacheStats {
 
   /** Sliding window statistics. The number of metric periods to include in

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Cacheable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Cacheable.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Cacheable.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Cacheable.java Thu Mar  1 17:53:03 2012
@@ -21,6 +21,8 @@
 package org.apache.hadoop.hbase.io.hfile;
 
 import java.nio.ByteBuffer;
+
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
 
@@ -33,6 +35,7 @@ import org.apache.hadoop.hbase.regionser
  * getSerializedLength() of 0.
  *
  */
+@InterfaceAudience.Private
 public interface Cacheable extends HeapSize {
   /**
    * Returns the length of the ByteBuffer required to serialized the object. If the

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.java Thu Mar  1 17:53:03 2012
@@ -20,10 +20,13 @@ package org.apache.hadoop.hbase.io.hfile
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+
 /**
  * Interface for a deserializer. Throws an IOException if the serialized data is
  * incomplete or wrong.
  * */
+@InterfaceAudience.Private
 public interface CacheableDeserializer<T extends Cacheable> {
   /**
    * Returns the deserialized object.

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CachedBlock.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CachedBlock.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CachedBlock.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CachedBlock.java Thu Mar  1 17:53:03 2012
@@ -19,6 +19,7 @@
  */
 package org.apache.hadoop.hbase.io.hfile;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;
@@ -31,6 +32,7 @@ import org.apache.hadoop.hbase.util.Clas
  * either instantiating as in-memory or handling the transition from single
  * to multiple access.
  */
+@InterfaceAudience.Private
 public class CachedBlock implements HeapSize, Comparable<CachedBlock> {
 
   public final static long PER_BLOCK_OVERHEAD = ClassSize.align(

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CachedBlockQueue.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CachedBlockQueue.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CachedBlockQueue.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/CachedBlockQueue.java Thu Mar  1 17:53:03 2012
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.io.hfile
 
 import com.google.common.collect.MinMaxPriorityQueue;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.io.HeapSize;
 
 /**
@@ -36,6 +37,7 @@ import org.apache.hadoop.hbase.io.HeapSi
  * <p>Object used in this queue must implement {@link HeapSize} as well as
  * {@link Comparable}.
  */
+@InterfaceAudience.Private
 public class CachedBlockQueue implements HeapSize {
 
   private MinMaxPriorityQueue<CachedBlock> queue;

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java Thu Mar  1 17:53:03 2012
@@ -25,6 +25,7 @@ import java.io.OutputStream;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.CodecPool;
@@ -41,6 +42,7 @@ import org.apache.hadoop.util.Reflection
  * Compression related stuff.
  * Copied from hadoop-3315 tfile.
  */
+@InterfaceAudience.Private
 public final class Compression {
   static final Log LOG = LogFactory.getLog(Compression.class);
 

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/DoubleBlockCache.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/DoubleBlockCache.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/DoubleBlockCache.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/DoubleBlockCache.java Thu Mar  1 17:53:03 2012
@@ -24,6 +24,7 @@ import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.io.hfile.slab.SlabCache;
@@ -37,6 +38,7 @@ import org.apache.hadoop.util.StringUtil
  * combined size and hits and misses of both caches.
  *
  **/
+@InterfaceAudience.Private
 public class DoubleBlockCache implements BlockCache, HeapSize {
 
   static final Log LOG = LogFactory.getLog(DoubleBlockCache.class.getName());



Mime
View raw message