Return-Path:
X-Original-To: apmail-hbase-commits-archive@www.apache.org
Delivered-To: apmail-hbase-commits-archive@www.apache.org
Received: from mail.apache.org (hermes.apache.org [140.211.11.3])
by minotaur.apache.org (Postfix) with SMTP id 2ADC49DDD
for ;
Thu, 1 Mar 2012 17:54:29 +0000 (UTC)
Received: (qmail 20512 invoked by uid 500); 1 Mar 2012 17:54:28 -0000
Delivered-To: apmail-hbase-commits-archive@hbase.apache.org
Received: (qmail 20483 invoked by uid 500); 1 Mar 2012 17:54:28 -0000
Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm
Precedence: bulk
List-Help:
List-Unsubscribe:
List-Post:
List-Id:
Reply-To: dev@hbase.apache.org
Delivered-To: mailing list commits@hbase.apache.org
Received: (qmail 20466 invoked by uid 99); 1 Mar 2012 17:54:28 -0000
Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136)
by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 01 Mar 2012 17:54:28 +0000
X-ASF-Spam-Status: No, hits=-2000.0 required=5.0
tests=ALL_TRUSTED,T_FRT_STOCK2
X-Spam-Check-By: apache.org
Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4)
by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 01 Mar 2012 17:54:23 +0000
Received: from eris.apache.org (localhost [127.0.0.1])
by eris.apache.org (Postfix) with ESMTP id 79D342388A3F
for ; Thu, 1 Mar 2012 17:54:03 +0000 (UTC)
Content-Type: text/plain; charset="utf-8"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: svn commit: r1295710 [4/8] - in /hbase/trunk: bin/
src/main/java/org/apache/hadoop/hbase/
src/main/java/org/apache/hadoop/hbase/avro/
src/main/java/org/apache/hadoop/hbase/catalog/
src/main/java/org/apache/hadoop/hbase/client/ src/main/java/org/apache/...
Date: Thu, 01 Mar 2012 17:53:33 -0000
To: commits@hbase.apache.org
From: stack@apache.org
X-Mailer: svnmailer-1.0.8-patched
Message-Id: <20120301175403.79D342388A3F@eris.apache.org>
X-Virus-Checked: Checked by ClamAV on apache.org
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java Thu Mar 1 17:53:03 2012
@@ -29,6 +29,7 @@ import java.nio.ByteBuffer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.RawComparator;
@@ -44,6 +45,7 @@ import com.google.common.io.NullOutputSt
* trailer size is fixed within a given {@link HFile} format version only, but
* we always store the version number as the last four-byte integer of the file.
*/
+@InterfaceAudience.Private
public class FixedFileTrailer {
private static final Log LOG = LogFactory.getLog(FixedFileTrailer.class);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java Thu Mar 1 17:53:03 2012
@@ -31,6 +31,7 @@ import java.util.concurrent.atomic.Atomi
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -117,6 +118,7 @@ import com.google.common.base.Preconditi
* that points at its file say for the case where an index lives apart from
* an HFile instance?
*/
+@InterfaceAudience.Private
public class HFile {
static final Log LOG = LogFactory.getLog(HFile.class);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java Thu Mar 1 17:53:03 2012
@@ -30,6 +30,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@@ -74,6 +75,7 @@ import com.google.common.base.Preconditi
* The version 2 block representation in the block cache is the same as above,
* except that the data section is always uncompressed in the cache.
*/
+@InterfaceAudience.Private
public class HFileBlock extends SchemaConfigured implements Cacheable {
public static final boolean FILL_HEADER = true;
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java Thu Mar 1 17:53:03 2012
@@ -34,6 +34,7 @@ import java.util.concurrent.atomic.Atomi
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.hbase.KeyValue;
@@ -57,6 +58,7 @@ import org.apache.hadoop.util.StringUtil
* to use the reader can be found in {@link HFileReaderV2} and
* TestHFileBlockIndex.
*/
+@InterfaceAudience.Private
public class HFileBlockIndex {
private static final Log LOG = LogFactory.getLog(HFileBlockIndex.class);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java Thu Mar 1 17:53:03 2012
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.io.hfile
import java.io.IOException;
import java.nio.ByteBuffer;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.util.Pair;
@@ -28,6 +29,7 @@ import org.apache.hadoop.hbase.util.Pair
* not set or the given block is not a data block (encoded or not), methods
* should just return the unmodified block.
*/
+@InterfaceAudience.Private
public interface HFileDataBlockEncoder {
/**
* Converts a block from the on-disk format to the in-cache format. Called in
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java Thu Mar 1 17:53:03 2012
@@ -21,6 +21,7 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
@@ -34,6 +35,7 @@ import com.google.common.base.Preconditi
* Do different kinds of data block encoding according to column family
* options.
*/
+@InterfaceAudience.Private
public class HFileDataBlockEncoderImpl implements HFileDataBlockEncoder {
private final DataBlockEncoding onDisk;
private final DataBlockEncoding inCache;
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java Thu Mar 1 17:53:03 2012
@@ -34,6 +34,8 @@ import org.apache.commons.cli.ParseExcep
import org.apache.commons.cli.PosixParser;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -53,6 +55,8 @@ import org.apache.hadoop.hbase.util.Writ
/**
* Implements pretty-printing functionality for {@link HFile}s.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public class HFilePrettyPrinter {
private static final Log LOG = LogFactory.getLog(HFilePrettyPrinter.class);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java Thu Mar 1 17:53:03 2012
@@ -27,6 +27,7 @@ import java.nio.ByteBuffer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.KeyValue;
@@ -46,6 +47,7 @@ import com.google.common.base.Preconditi
* even in cache only, i.e. HFile v1 blocks are always brought into cache
* unencoded.
*/
+@InterfaceAudience.Private
public class HFileReaderV1 extends AbstractHFileReader {
private static final Log LOG = LogFactory.getLog(HFileReaderV1.class);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java Thu Mar 1 17:53:03 2012
@@ -27,6 +27,7 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.KeyValue;
@@ -41,6 +42,7 @@ import org.apache.hadoop.io.WritableUtil
/**
* {@link HFile} reader for version 2.
*/
+@InterfaceAudience.Private
public class HFileReaderV2 extends AbstractHFileReader {
private static final Log LOG = LogFactory.getLog(HFileReaderV2.class);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java Thu Mar 1 17:53:03 2012
@@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.io.hfile
import java.io.IOException;
import java.nio.ByteBuffer;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.KeyValue;
/**
@@ -36,6 +37,7 @@ import org.apache.hadoop.hbase.KeyValue;
* you position the Scanner using the seekTo variants and then getKey and
* getValue.
*/
+@InterfaceAudience.Private
public interface HFileScanner {
/**
* SeekTo or just before the passed key
. Examine the return
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java Thu Mar 1 17:53:03 2012
@@ -29,6 +29,7 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -49,6 +50,7 @@ import org.apache.hadoop.io.compress.Com
/**
* Writes version 1 HFiles. Mainly used for testing backwards-compatibility.
*/
+@InterfaceAudience.Private
public class HFileWriterV1 extends AbstractHFileWriter {
/** Meta data block name for bloom filter parameters. */
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java Thu Mar 1 17:53:03 2012
@@ -28,6 +28,7 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -45,6 +46,7 @@ import org.apache.hadoop.io.WritableUtil
/**
* Writes HFile format version 2.
*/
+@InterfaceAudience.Private
public class HFileWriterV2 extends AbstractHFileWriter {
static final Log LOG = LogFactory.getLog(HFileWriterV2.class);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.java Thu Mar 1 17:53:03 2012
@@ -22,11 +22,14 @@ package org.apache.hadoop.hbase.io.hfile
import java.io.DataOutput;
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+
/**
* A way to write "inline" blocks into an {@link HFile}. Inline blocks are
* interspersed with data blocks. For example, Bloom filter chunks and
* leaf-level blocks of a multi-level block index are stored as inline blocks.
*/
+@InterfaceAudience.Private
public interface InlineBlockWriter {
/**
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/InvalidHFileException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/InvalidHFileException.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/InvalidHFileException.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/InvalidHFileException.java Thu Mar 1 17:53:03 2012
@@ -19,9 +19,12 @@ package org.apache.hadoop.hbase.io.hfile
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+
/**
* Thrown when an invalid HFile format is detected
*/
+@InterfaceAudience.Private
public class InvalidHFileException extends IOException {
private static final long serialVersionUID = 4660352028739861249L;
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java Thu Mar 1 17:53:03 2012
@@ -39,6 +39,7 @@ import java.util.concurrent.locks.Reentr
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -91,6 +92,7 @@ import com.google.common.util.concurrent
* to free). It then uses the priority chunk sizes to evict fairly according
* to the relative sizes and usage.
*/
+@InterfaceAudience.Private
public class LruBlockCache implements BlockCache, HeapSize {
static final Log LOG = LogFactory.getLog(LruBlockCache.class);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java Thu Mar 1 17:53:03 2012
@@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.io.hfile
import java.nio.ByteBuffer;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.util.Pair;
@@ -25,6 +26,7 @@ import org.apache.hadoop.hbase.util.Pair
/**
* Does not perform any kind of encoding/decoding.
*/
+@InterfaceAudience.Private
public class NoOpDataBlockEncoder implements HFileDataBlockEncoder {
public static final NoOpDataBlockEncoder INSTANCE =
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/ReusableStreamGzipCodec.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/ReusableStreamGzipCodec.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/ReusableStreamGzipCodec.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/ReusableStreamGzipCodec.java Thu Mar 1 17:53:03 2012
@@ -24,6 +24,7 @@ import java.util.zip.GZIPOutputStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.io.compress.CompressorStream;
import org.apache.hadoop.io.compress.GzipCodec;
@@ -33,6 +34,7 @@ import org.apache.hadoop.io.compress.zli
* Fixes an inefficiency in Hadoop's Gzip codec, allowing to reuse compression
* streams.
*/
+@InterfaceAudience.Private
public class ReusableStreamGzipCodec extends GzipCodec {
private static final Log LOG = LogFactory.getLog(Compression.class);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/SimpleBlockCache.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/SimpleBlockCache.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/SimpleBlockCache.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/SimpleBlockCache.java Thu Mar 1 17:53:03 2012
@@ -25,12 +25,14 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
/**
* Simple one RFile soft reference cache.
*/
+@InterfaceAudience.Private
public class SimpleBlockCache implements BlockCache {
private static class Ref extends SoftReference {
public BlockCacheKey blockId;
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SingleSizeCache.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SingleSizeCache.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SingleSizeCache.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SingleSizeCache.java Thu Mar 1 17:53:03 2012
@@ -26,6 +26,7 @@ import java.util.concurrent.atomic.Atomi
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
@@ -52,6 +53,7 @@ import com.google.common.collect.MapMake
* ConcurrentLinkedHashMap.
*
**/
+@InterfaceAudience.Private
public class SingleSizeCache implements BlockCache, HeapSize {
private final Slab backingStore;
private final ConcurrentMap backingMap;
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/slab/Slab.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/slab/Slab.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/slab/Slab.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/slab/Slab.java Thu Mar 1 17:53:03 2012
@@ -25,6 +25,7 @@ import java.util.concurrent.LinkedBlocki
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.DirectMemoryUtils;
import com.google.common.base.Preconditions;
@@ -35,6 +36,7 @@ import com.google.common.base.Preconditi
* requisite size, then puts them all in a buffer.
**/
+@InterfaceAudience.Private
class Slab implements org.apache.hadoop.hbase.io.HeapSize {
static final Log LOG = LogFactory.getLog(Slab.class);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SlabCache.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SlabCache.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SlabCache.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SlabCache.java Thu Mar 1 17:53:03 2012
@@ -32,6 +32,7 @@ import java.util.concurrent.atomic.Atomi
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
@@ -52,6 +53,7 @@ import com.google.common.util.concurrent
* correct SingleSizeCache.
*
**/
+@InterfaceAudience.Private
public class SlabCache implements SlabItemActionWatcher, BlockCache, HeapSize {
private final ConcurrentHashMap backingStore;
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SlabItemActionWatcher.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SlabItemActionWatcher.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SlabItemActionWatcher.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SlabItemActionWatcher.java Thu Mar 1 17:53:03 2012
@@ -20,11 +20,13 @@
package org.apache.hadoop.hbase.io.hfile.slab;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
/**
* Interface for objects that want to know when actions occur in a SingleSizeCache.
* */
+@InterfaceAudience.Private
interface SlabItemActionWatcher {
/**
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionHeader.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionHeader.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionHeader.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionHeader.java Thu Mar 1 17:53:03 2012
@@ -25,12 +25,14 @@ import java.io.IOException;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.security.User;
/**
* The IPC connection header sent by the client to the server
* on connection establishment.
*/
+@InterfaceAudience.Private
class ConnectionHeader implements Writable {
protected String protocol;
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorProtocol.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorProtocol.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorProtocol.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorProtocol.java Thu Mar 1 17:53:03 2012
@@ -19,6 +19,9 @@
*/
package org.apache.hadoop.hbase.ipc;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
/**
* All custom RPC protocols to be exported by Coprocessors must extend this interface.
*
@@ -34,6 +37,8 @@ package org.apache.hadoop.hbase.ipc;
*
*
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public interface CoprocessorProtocol extends VersionedProtocol {
public static final long VERSION = 1L;
}
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java Thu Mar 1 17:53:03 2012
@@ -21,9 +21,12 @@ package org.apache.hadoop.hbase.ipc;
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+
/**
* A call whose response can be delayed by the server.
*/
+@InterfaceAudience.Private
public interface Delayable {
/**
* Signal that the call response should be delayed, thus freeing the RPC
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ExecRPCInvoker.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ExecRPCInvoker.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ExecRPCInvoker.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ExecRPCInvoker.java Thu Mar 1 17:53:03 2012
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.ipc;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.client.coprocessor.Exec;
@@ -36,6 +37,7 @@ import java.lang.reflect.Method;
* separate RPC call for each method invocation (using a
* {@link org.apache.hadoop.hbase.client.ServerCallable} instance).
*/
+@InterfaceAudience.Private
public class ExecRPCInvoker implements InvocationHandler {
// LOG is NOT in hbase subpackage intentionally so that the default HBase
// DEBUG log level does NOT emit RPC-level logging.
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java Thu Mar 1 17:53:03 2012
@@ -43,6 +43,7 @@ import javax.net.SocketFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.security.User;
@@ -66,6 +67,7 @@ import org.apache.hadoop.util.Reflection
*
* @see HBaseServer
*/
+@InterfaceAudience.Private
public class HBaseClient {
private static final Log LOG =
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java Thu Mar 1 17:53:03 2012
@@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.ipc;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HConstants;
@@ -65,6 +66,7 @@ import java.util.Map;
* All methods in the protocol should throw only IOException. No field data of
* the protocol instance is transmitted.
*/
+@InterfaceAudience.Private
public class HBaseRPC {
// Leave this out in the hadoop ipc package but keep class name. Do this
// so that we dont' get the logging of this class's invocations by doing our
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCErrorHandler.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCErrorHandler.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCErrorHandler.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCErrorHandler.java Thu Mar 1 17:53:03 2012
@@ -20,9 +20,12 @@
package org.apache.hadoop.hbase.ipc;
+import org.apache.hadoop.classification.InterfaceAudience;
+
/**
* An interface for calling out of RPC for error conditions.
*/
+@InterfaceAudience.Private
public interface HBaseRPCErrorHandler {
/**
* Take actions on the event of an OutOfMemoryError.
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCStatistics.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCStatistics.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCStatistics.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCStatistics.java Thu Mar 1 17:53:03 2012
@@ -20,6 +20,7 @@
package org.apache.hadoop.hbase.ipc;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics.util.MBeanUtil;
import org.apache.hadoop.metrics.util.MetricsDynamicMBeanBase;
import org.apache.hadoop.metrics.util.MetricsRegistry;
@@ -30,6 +31,7 @@ import javax.management.ObjectName;
* Exports HBase RPC statistics recorded in {@link HBaseRpcMetrics} as an MBean
* for JMX monitoring.
*/
+@InterfaceAudience.Private
public class HBaseRPCStatistics extends MetricsDynamicMBeanBase {
private final ObjectName mbeanName;
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java Thu Mar 1 17:53:03 2012
@@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.ipc;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.ipc.VersionedProtocol;
import org.apache.hadoop.metrics.MetricsContext;
import org.apache.hadoop.metrics.MetricsRecord;
@@ -43,6 +44,7 @@ import java.lang.reflect.Method;
* {@link #rpcQueueTime}.inc(time)
*
*/
+@InterfaceAudience.Private
public class HBaseRpcMetrics implements Updater {
public static final String NAME_DELIM = "$";
private final MetricsRegistry registry = new MetricsRegistry();
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java Thu Mar 1 17:53:03 2012
@@ -57,6 +57,7 @@ import java.util.concurrent.LinkedBlocki
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.HbaseObjectWritable;
import org.apache.hadoop.hbase.io.WritableWithSize;
@@ -83,6 +84,7 @@ import com.google.common.util.concurrent
*
* @see HBaseClient
*/
+@InterfaceAudience.Private
public abstract class HBaseServer implements RpcServer {
/**
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HMasterInterface.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HMasterInterface.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HMasterInterface.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HMasterInterface.java Thu Mar 1 17:53:03 2012
@@ -22,6 +22,8 @@ package org.apache.hadoop.hbase.ipc;
import java.io.IOException;
import java.util.List;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -42,6 +44,8 @@ import org.apache.hadoop.hbase.util.Pair
@KerberosInfo(
serverPrincipal = "hbase.master.kerberos.principal")
@TokenInfo("HBASE_AUTH_TOKEN")
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public interface HMasterInterface extends VersionedProtocol {
/**
* This Interfaces' version. Version changes when the Interface changes.
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HMasterRegionInterface.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HMasterRegionInterface.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HMasterRegionInterface.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HMasterRegionInterface.java Thu Mar 1 17:53:03 2012
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.ipc;
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HServerLoad;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.security.KerberosInfo;
@@ -34,6 +35,7 @@ import org.apache.hadoop.hbase.ipc.Versi
@KerberosInfo(
serverPrincipal = "hbase.master.kerberos.principal",
clientPrincipal = "hbase.regionserver.kerberos.principal")
+@InterfaceAudience.Private
public interface HMasterRegionInterface extends VersionedProtocol {
/**
* This Interfaces' version. Version changes when the Interface changes.
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java Thu Mar 1 17:53:03 2012
@@ -23,6 +23,8 @@ import java.io.IOException;
import java.net.ConnectException;
import java.util.List;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HServerInfo;
@@ -61,6 +63,8 @@ import org.apache.hadoop.hbase.ipc.Versi
@KerberosInfo(
serverPrincipal = "hbase.regionserver.kerberos.principal")
@TokenInfo("HBASE_AUTH_TOKEN")
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public interface HRegionInterface extends VersionedProtocol, Stoppable, Abortable {
/**
* This Interfaces' version. Version changes when the Interface changes.
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java Thu Mar 1 17:53:03 2012
@@ -19,6 +19,7 @@
*/
package org.apache.hadoop.hbase.ipc;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.HbaseObjectWritable;
@@ -32,6 +33,7 @@ import java.lang.reflect.Field;
import java.lang.reflect.Method;
/** A method invocation, including the method name and its parameters.*/
+@InterfaceAudience.Private
public class Invocation extends VersionedWritable implements Configurable {
protected String methodName;
@SuppressWarnings("rawtypes")
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ProtocolSignature.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ProtocolSignature.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ProtocolSignature.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ProtocolSignature.java Thu Mar 1 17:53:03 2012
@@ -25,10 +25,12 @@ import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.HashMap;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableFactories;
import org.apache.hadoop.io.WritableFactory;
+@InterfaceAudience.Private
public class ProtocolSignature implements Writable {
static { // register a ctor
WritableFactories.setFactory
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/RequestContext.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/RequestContext.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/RequestContext.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/RequestContext.java Thu Mar 1 17:53:03 2012
@@ -20,6 +20,7 @@
package org.apache.hadoop.hbase.ipc;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.security.User;
import java.net.InetAddress;
@@ -30,6 +31,7 @@ import java.net.InetAddress;
* called outside the context of a RPC request, all values will be
* null
.
*/
+@InterfaceAudience.Private
public class RequestContext {
private static ThreadLocal instance =
new ThreadLocal() {
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ResponseFlag.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ResponseFlag.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ResponseFlag.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ResponseFlag.java Thu Mar 1 17:53:03 2012
@@ -17,10 +17,13 @@
*/
package org.apache.hadoop.hbase.ipc;
+import org.apache.hadoop.classification.InterfaceAudience;
+
/**
* Utility for managing the flag byte passed in response to a
* {@link HBaseServer.Call}
*/
+@InterfaceAudience.Private
class ResponseFlag {
private static final byte ERROR_BIT = 0x1;
private static final byte LENGTH_BIT = 0x2;
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/RpcEngine.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/RpcEngine.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/RpcEngine.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/RpcEngine.java Thu Mar 1 17:53:03 2012
@@ -26,9 +26,11 @@ import javax.net.SocketFactory;
import org.apache.hadoop.hbase.ipc.VersionedProtocol;
import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
/** An RPC implementation. */
+@InterfaceAudience.Private
interface RpcEngine {
/** Construct a client-side proxy object. */
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java Thu Mar 1 17:53:03 2012
@@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.ipc;
import com.google.common.base.Function;
import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.ipc.VersionedProtocol;
import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
@@ -30,6 +31,7 @@ import java.net.InetSocketAddress;
/**
*/
+@InterfaceAudience.Private
public interface RpcServer {
void setSocketSendBufSize(int size);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ServerNotRunningYetException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ServerNotRunningYetException.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ServerNotRunningYetException.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/ServerNotRunningYetException.java Thu Mar 1 17:53:03 2012
@@ -22,6 +22,10 @@ package org.apache.hadoop.hbase.ipc;
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+
+@SuppressWarnings("serial")
+@InterfaceAudience.Private
public class ServerNotRunningYetException extends IOException {
public ServerNotRunningYetException(String s) {
super(s);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/Status.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/Status.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/Status.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/Status.java Thu Mar 1 17:53:03 2012
@@ -17,9 +17,12 @@
*/
package org.apache.hadoop.hbase.ipc;
+import org.apache.hadoop.classification.InterfaceAudience;
+
/**
* Status of a Hadoop IPC call.
*/
+@InterfaceAudience.Private
enum Status {
SUCCESS (0),
ERROR (1),
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/VersionedProtocol.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/VersionedProtocol.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/VersionedProtocol.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/VersionedProtocol.java Thu Mar 1 17:53:03 2012
@@ -20,11 +20,16 @@ package org.apache.hadoop.hbase.ipc;
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
/**
* Superclass of all protocols that use Hadoop RPC.
* Subclasses of this interface are also supposed to have
* a static final long versionID field.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
public interface VersionedProtocol {
/**
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java Thu Mar 1 17:53:03 2012
@@ -47,11 +47,13 @@ import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.hbase.ipc.VersionedProtocol;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.*;
import org.codehaus.jackson.map.ObjectMapper;
/** An RpcEngine implementation for Writable data. */
+@InterfaceAudience.Private
class WritableRpcEngine implements RpcEngine {
// LOG is NOT in hbase subpackage intentionally so that the default HBase
// DEBUG log level does NOT emit RPC-level logging.
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java Thu Mar 1 17:53:03 2012
@@ -24,6 +24,8 @@ import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
@@ -61,6 +63,8 @@ import org.apache.hadoop.io.Text;
* second a regex based or prefix based row filter to restrict the
* count operation to a limited subset of rows from the table.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class CellCounter {
private static final Log LOG =
LogFactory.getLog(CellCounter.class.getName());
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java Thu Mar 1 17:53:03 2012
@@ -19,6 +19,8 @@
*/
package org.apache.hadoop.hbase.mapreduce;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
@@ -36,6 +38,8 @@ import java.util.Map;
* It is also configurable with a start and time as well as a specification
* of the region server implementation if different from the local cluster.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class CopyTable {
final static String NAME = "copytable";
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java Thu Mar 1 17:53:03 2012
@@ -19,6 +19,8 @@
*/
package org.apache.hadoop.hbase.mapreduce;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.mapreduce.replication.VerifyReplication;
import org.apache.hadoop.util.ProgramDriver;
@@ -26,6 +28,8 @@ import org.apache.hadoop.util.ProgramDri
* Driver for hbase mapreduce jobs. Select which to run by passing
* name of job to this main.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class Driver {
/**
* @param args
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java Thu Mar 1 17:53:03 2012
@@ -21,6 +21,8 @@ package org.apache.hadoop.hbase.mapreduc
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -45,6 +47,8 @@ import org.apache.commons.logging.LogFac
* Writes content to sequence files up in HDFS. Use {@link Import} to read it
* back in again.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class Export {
private static final Log LOG = LogFactory.getLog(Export.class);
final static String NAME = "export";
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java Thu Mar 1 17:53:03 2012
@@ -23,6 +23,8 @@ import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.KeyValue;
@@ -36,6 +38,8 @@ import org.apache.hadoop.mapreduce.Job;
/**
* Extract grouping columns from input record.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class GroupingTableMapper
extends TableMapper implements Configurable {
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java Thu Mar 1 17:53:03 2012
@@ -35,6 +35,8 @@ import java.util.UUID;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileSystem;
@@ -71,6 +73,8 @@ import org.apache.hadoop.mapreduce.lib.o
* all HFiles being written.
* @see KeyValueSortReducer
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class HFileOutputFormat extends FileOutputFormat {
static Log LOG = LogFactory.getLog(HFileOutputFormat.class);
static final String COMPRESSION_CONF_KEY = "hbase.hfileoutputformat.families.compression";
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java Thu Mar 1 17:53:03 2012
@@ -23,6 +23,8 @@ import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -44,6 +46,8 @@ import org.apache.hadoop.mapreduce.Parti
* @param The type of the key.
* @param The type of the value.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class HRegionPartitioner
extends Partitioner
implements Configurable {
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableMapper.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableMapper.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableMapper.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableMapper.java Thu Mar 1 17:53:03 2012
@@ -21,6 +21,8 @@ package org.apache.hadoop.hbase.mapreduc
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -29,6 +31,8 @@ import org.apache.hadoop.mapreduce.Job;
/**
* Pass the given key and record as-is to the reduce phase.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class IdentityTableMapper
extends TableMapper {
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java Thu Mar 1 17:53:03 2012
@@ -23,6 +23,8 @@ import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.OutputFormat;
@@ -48,6 +50,8 @@ import org.apache.hadoop.mapreduce.Outpu
* {@link org.apache.hadoop.hbase.client.Delete Delete} define the
* row and columns implicitly.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class IdentityTableReducer
extends TableReducer {
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java Thu Mar 1 17:53:03 2012
@@ -23,6 +23,8 @@ import java.io.IOException;
import java.util.Map;
import java.util.TreeMap;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -43,6 +45,8 @@ import org.apache.hadoop.util.GenericOpt
/**
* Import data written by {@link Export}.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class Import {
final static String NAME = "import";
final static String CF_RENAME_PROP = "HBASE_IMPORTER_RENAME_CFS";
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java Thu Mar 1 17:53:03 2012
@@ -24,6 +24,8 @@ import org.apache.hadoop.hbase.util.Base
import java.io.IOException;
import java.util.ArrayList;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -50,6 +52,8 @@ import com.google.common.collect.Lists;
*
* @see ImportTsv#usage(String)
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class ImportTsv {
final static String NAME = "importtsv";
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSortReducer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSortReducer.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSortReducer.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSortReducer.java Thu Mar 1 17:53:03 2012
@@ -21,6 +21,8 @@ package org.apache.hadoop.hbase.mapreduc
import java.util.TreeSet;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.mapreduce.Reducer;
@@ -32,6 +34,8 @@ import org.apache.hadoop.mapreduce.Reduc
* memory sorting.
* @see HFileOutputFormat
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class KeyValueSortReducer extends Reducer {
protected void reduce(ImmutableBytesWritable row, java.lang.Iterable kvs,
org.apache.hadoop.mapreduce.Reducer.Context context)
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java Thu Mar 1 17:53:03 2012
@@ -44,6 +44,8 @@ import java.util.concurrent.atomic.Atomi
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;
@@ -86,6 +88,8 @@ import com.google.common.util.concurrent
* Tool to load the output of HFileOutputFormat into an existing table.
* @see #usage()
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class LoadIncrementalHFiles extends Configured implements Tool {
private static Log LOG = LogFactory.getLog(LoadIncrementalHFiles.class);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java Thu Mar 1 17:53:03 2012
@@ -25,6 +25,8 @@ import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Delete;
@@ -55,6 +57,8 @@ import org.apache.hadoop.mapreduce.TaskA
* because it is easy to rerun a bulk import).
*
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class MultiTableOutputFormat extends OutputFormat {
/** Set this to {@link #WAL_OFF} to turn off write-ahead logging (HLog) */
public static final String WAL_PROPERTY = "hbase.mapreduce.multitableoutputformat.wal";
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java Thu Mar 1 17:53:03 2012
@@ -23,6 +23,8 @@ import java.util.Iterator;
import java.util.List;
import java.util.TreeSet;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -37,6 +39,8 @@ import org.apache.hadoop.util.StringUtil
* @see HFileOutputFormat
* @see KeyValueSortReducer
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class PutSortReducer extends
Reducer {
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java Thu Mar 1 17:53:03 2012
@@ -21,6 +21,8 @@ package org.apache.hadoop.hbase.mapreduc
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
@@ -37,6 +39,8 @@ import org.apache.hadoop.util.GenericOpt
* A job with a just a map phase to count rows. Map outputs table rows IF the
* input row has columns that have content.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class RowCounter {
/** Name of this 'program'. */
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java Thu Mar 1 17:53:03 2012
@@ -21,6 +21,8 @@ package org.apache.hadoop.hbase.mapreduc
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -43,6 +45,8 @@ import org.apache.hadoop.mapreduce.Parti
* @see #START
* @see #END
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class SimpleTotalOrderPartitioner extends Partitioner
implements Configurable {
private final static Log LOG = LogFactory.getLog(SimpleTotalOrderPartitioner.class);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java Thu Mar 1 17:53:03 2012
@@ -23,6 +23,8 @@ import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.KeyValue;
@@ -34,6 +36,8 @@ import org.apache.hadoop.util.StringUtil
/**
* Convert HBase tabular data into a format that is consumable by Map/Reduce.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class TableInputFormat extends TableInputFormatBase
implements Configurable {
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java Thu Mar 1 17:53:03 2012
@@ -29,6 +29,8 @@ import javax.naming.NamingException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HServerAddress;
import org.apache.hadoop.hbase.client.HTable;
@@ -72,6 +74,8 @@ import org.apache.hadoop.net.DNS;
* }
*
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public abstract class TableInputFormatBase
extends InputFormat {
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java Thu Mar 1 17:53:03 2012
@@ -34,6 +34,8 @@ import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -57,6 +59,8 @@ import org.apache.hadoop.util.StringUtil
* Utility for {@link TableMapper} and {@link TableReducer}
*/
@SuppressWarnings("unchecked")
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class TableMapReduceUtil {
static Log LOG = LogFactory.getLog(TableMapReduceUtil.class);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapper.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapper.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapper.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapper.java Thu Mar 1 17:53:03 2012
@@ -19,6 +19,8 @@
*/
package org.apache.hadoop.hbase.mapreduce;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.mapreduce.Mapper;
@@ -31,6 +33,8 @@ import org.apache.hadoop.mapreduce.Mappe
* @param The type of the value.
* @see org.apache.hadoop.mapreduce.Mapper
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public abstract class TableMapper
extends Mapper {
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputCommitter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputCommitter.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputCommitter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputCommitter.java Thu Mar 1 17:53:03 2012
@@ -21,6 +21,8 @@ package org.apache.hadoop.hbase.mapreduc
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
@@ -28,6 +30,8 @@ import org.apache.hadoop.mapreduce.TaskA
/**
* Small committer class that does not do anything.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class TableOutputCommitter extends OutputCommitter {
@Override
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java Thu Mar 1 17:53:03 2012
@@ -23,6 +23,8 @@ import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -45,6 +47,8 @@ import org.apache.hadoop.mapreduce.TaskA
*
* @param The type of the key. Ignored in this class.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class TableOutputFormat extends OutputFormat
implements Configurable {
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java Thu Mar 1 17:53:03 2012
@@ -21,6 +21,8 @@ package org.apache.hadoop.hbase.mapreduc
import java.io.IOException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
@@ -33,6 +35,8 @@ import org.apache.hadoop.mapreduce.TaskA
* Iterate over an HBase table data, return (ImmutableBytesWritable, Result)
* pairs.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class TableRecordReader
extends RecordReader {
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java Thu Mar 1 17:53:03 2012
@@ -22,6 +22,8 @@ import java.lang.reflect.Method;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
@@ -41,6 +43,8 @@ import org.apache.hadoop.util.StringUtil
* Iterate over an HBase table data, return (ImmutableBytesWritable, Result)
* pairs.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class TableRecordReaderImpl {
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableReducer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableReducer.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableReducer.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableReducer.java Thu Mar 1 17:53:03 2012
@@ -19,6 +19,8 @@
*/
package org.apache.hadoop.hbase.mapreduce;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Reducer;
@@ -39,6 +41,8 @@ import org.apache.hadoop.mapreduce.Reduc
* @param The type of the output key.
* @see org.apache.hadoop.mapreduce.Reducer
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public abstract class TableReducer
extends Reducer {
}
\ No newline at end of file
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java Thu Mar 1 17:53:03 2012
@@ -24,6 +24,8 @@ import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Writable;
@@ -33,6 +35,8 @@ import org.apache.hadoop.mapreduce.Input
* A table split corresponds to a key range (low, high). All references to row
* below refer to the key of the row.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class TableSplit extends InputSplit
implements Writable, Comparable {
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java Thu Mar 1 17:53:03 2012
@@ -25,6 +25,8 @@ import org.apache.hadoop.hbase.util.Base
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Counter;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import java.io.IOException;
@@ -32,6 +34,8 @@ import java.io.IOException;
/**
* Write table content out to files in hdfs.
*/
+@InterfaceAudience.Public
+@InterfaceStability.Stable
public class TsvImporterMapper
extends Mapper
{
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java Thu Mar 1 17:53:03 2012
@@ -23,6 +23,7 @@ import java.util.concurrent.atomic.Atomi
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerName;
@@ -46,6 +47,7 @@ import org.apache.zookeeper.KeeperExcept
* #blockUntilBecomingActiveMaster() is called to wait until becoming
* the active master of the cluster.
*/
+@InterfaceAudience.Private
class ActiveMasterManager extends ZooKeeperListener {
private static final Log LOG = LogFactory.getLog(ActiveMasterManager.class);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/master/AssignCallable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/master/AssignCallable.java?rev=1295710&r1=1295709&r2=1295710&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/master/AssignCallable.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/master/AssignCallable.java Thu Mar 1 17:53:03 2012
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.master;
import java.util.concurrent.Callable;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HRegionInfo;
/**
@@ -29,6 +30,7 @@ import org.apache.hadoop.hbase.HRegionIn
* Implementing as future callable we are able to act on the timeout
* asynchronously.
*/
+@InterfaceAudience.Private
public class AssignCallable implements Callable