hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ramkris...@apache.org
Subject svn commit: r1525269 [1/8] - in /hbase/trunk: hbase-client/src/main/java/org/apache/hadoop/hbase/client/ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ hbase-common/src/main/java/org/apache/hadoop/hbase/ hbase-common/src/main/java/org/apa...
Date Sat, 21 Sep 2013 18:01:35 GMT
Author: ramkrishna
Date: Sat Sep 21 18:01:32 2013
New Revision: 1525269

URL: http://svn.apache.org/r1525269
Log:
HBASE-8496 - Implement tags and the internals of how a tag should look like (Ram)


Added:
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java
    hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithTags.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/ColumnNodeType.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValuesWithTags.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TagUsage.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALCellCodecWithCompression.java
Modified:
    hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
    hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
    hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDecodingContext.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockEncodingContext.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
    hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
    hbase/trunk/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodec.java
    hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java
    hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/DecoderFactory.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayReversibleScanner.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/column/ColumnNodeReader.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/column/ColumnReader.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/row/RowNodeReader.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/PrefixTreeEncoder.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnNodeWriter.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnSectionWriter.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowNodeWriter.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/keyvalue/TestKeyValueTool.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnBuilder.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java
    hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/CellProtos.java
    hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
    hbase/trunk/hbase-protocol/src/main/protobuf/Cell.proto
    hbase/trunk/hbase-protocol/src/main/protobuf/Client.proto
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogPrettyPrinter.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/KeyValueCompression.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestKeyValueCompression.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
    hbase/trunk/hbase-server/src/test/resources/mapred-site.xml

Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java (original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java Sat Sep 21 18:01:32 2013
@@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValueUtil;
+import org.apache.hadoop.hbase.Tag;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;
@@ -96,7 +97,7 @@ public abstract class Mutation extends O
   }
 
   /*
-   * Create a nnnnnnnn with this objects row key and the Put identifier.
+   * Create a KeyValue with this objects row key and the Put identifier.
    *
    * @return a KeyValue with this objects row key and the Put identifier.
    */
@@ -105,6 +106,20 @@ public abstract class Mutation extends O
   }
 
   /**
+   * Create a KeyValue with this objects row key and the Put identifier.
+   * @param family
+   * @param qualifier
+   * @param ts
+   * @param value
+   * @param tags - Specify the Tags as an Array {@link KeyValue.Tag}
+   * @return a KeyValue with this objects row key and the Put identifier.
+   */
+  KeyValue createPutKeyValue(byte[] family, byte[] qualifier, long ts, byte[] value, Tag[] tags) {
+    KeyValue kvWithTag = new KeyValue(this.row, family, qualifier, ts, value, tags);
+    return kvWithTag;
+  }
+
+  /**
    * Compile the column family (i.e. schema) information
    * into a Map. Useful for parsing and aggregation by debugging,
    * logging, and administration tools.

Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java (original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java Sat Sep 21 18:01:32 2013
@@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.Tag;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -112,6 +113,10 @@ public class Put extends Mutation implem
     return add(family, qualifier, this.ts, value);
   }
 
+  public Put add(byte[] family, byte [] qualifier, byte [] value, Tag[] tag) {
+    return add(family, qualifier, this.ts, value, tag);
+  }
+
   /**
    * Add the specified column and value, with the specified timestamp as
    * its version to this Put operation.
@@ -133,6 +138,18 @@ public class Put extends Mutation implem
   }
 
   /**
+   * Forms a keyvalue with tags
+   */
+  @SuppressWarnings("unchecked")
+  public Put add(byte[] family, byte[] qualifier, long ts, byte[] value, Tag[] tag) {
+    List<Cell> list = getCellList(family);
+    KeyValue kv = createPutKeyValue(family, qualifier, ts, value, tag);
+    list.add(kv);
+    familyMap.put(kv.getFamily(), list);
+    return this;
+  }
+
+  /**
    * Add the specified KeyValue to this Put operation.  Operation assumes that
    * the passed KeyValue is immutable and its backing array will not be modified
    * for the duration of this Put.

Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java (original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java Sat Sep 21 18:01:32 2013
@@ -40,7 +40,6 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
@@ -50,6 +49,8 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
 import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Tag;
 import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
@@ -99,12 +100,12 @@ import org.apache.hadoop.hbase.protobuf.
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest;
+import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad;
 import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos;
 import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo;
-import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
 import org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos;
@@ -471,7 +472,18 @@ public final class ProtobufUtil {
           if (qv.hasTimestamp()) {
             ts = qv.getTimestamp();
           }
-          put.add(family, qualifier, ts, value);
+          byte[] tags;
+          if (qv.hasTags()) {
+            tags = qv.getTags().toByteArray();
+            Object[] array = Tag.createTags(tags, 0, (short)tags.length).toArray();
+            Tag[] tagArray = new Tag[array.length];
+            for(int i = 0; i< array.length; i++) {
+              tagArray[i] = (Tag)array[i];
+            }
+            put.add(family, qualifier, ts, value, tagArray);
+          } else {
+            put.add(family, qualifier, ts, value);
+          }
         }
       }
     }
@@ -972,6 +984,9 @@ public final class ProtobufUtil {
         valueBuilder.setQualifier(ByteString.copyFrom(kv.getQualifier()));
         valueBuilder.setValue(ByteString.copyFrom(kv.getValue()));
         valueBuilder.setTimestamp(kv.getTimestamp());
+        if(cell.getTagsLength() > 0) {
+          valueBuilder.setTags(ByteString.copyFrom(CellUtil.getTagArray(kv)));
+        }
         if (type == MutationType.DELETE) {
           KeyValue.Type keyValueType = KeyValue.Type.codeToType(kv.getType());
           valueBuilder.setDeleteType(toDeleteType(keyValueType));

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java Sat Sep 21 18:01:32 2013
@@ -53,6 +53,9 @@ public final class CellUtil {
       cell.getQualifierLength());
   }
 
+  public static ByteRange fillTagRange(Cell cell, ByteRange range) {
+    return range.set(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
+  }
 
   /***************** get individual arrays for tests ************/
 
@@ -79,6 +82,12 @@ public final class CellUtil {
     copyValueTo(cell, output, 0);
     return output;
   }
+  
+  public static byte[] getTagArray(Cell cell){
+    byte[] output = new byte[cell.getTagsLength()];
+    copyTagTo(cell, output, 0);
+    return output;
+  }
 
 
   /******************** copyTo **********************************/
@@ -103,10 +112,22 @@ public final class CellUtil {
 
   public static int copyValueTo(Cell cell, byte[] destination, int destinationOffset) {
     System.arraycopy(cell.getValueArray(), cell.getValueOffset(), destination, destinationOffset,
-      cell.getValueLength());
+        cell.getValueLength());
     return destinationOffset + cell.getValueLength();
   }
 
+  /**
+   * Copies the tags info into the tag portion of the cell
+   * @param cell
+   * @param destination
+   * @param destinationOffset
+   * @return position after tags
+   */
+  public static int copyTagTo(Cell cell, byte[] destination, int destinationOffset) {
+    System.arraycopy(cell.getTagsArray(), cell.getTagsOffset(), destination, destinationOffset,
+        cell.getTagsLength());
+    return destinationOffset + cell.getTagsLength();
+  }
 
   /********************* misc *************************************/
 
@@ -134,18 +155,23 @@ public final class CellUtil {
     return new KeyValue(row, family, qualifier, timestamp,
       KeyValue.Type.codeToType(type), value);
   }
-  
+
   public static Cell createCell(final byte[] row, final byte[] family, final byte[] qualifier,
       final long timestamp, final byte type, final byte[] value, final long memstoreTS) {
-    // I need a Cell Factory here. Using KeyValue for now. TODO.
-    // TODO: Make a new Cell implementation that just carries these
-    // byte arrays.
     KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp,
         KeyValue.Type.codeToType(type), value);
     keyValue.setMvccVersion(memstoreTS);
     return keyValue;
   }
 
+  public static Cell createCell(final byte[] row, final byte[] family, final byte[] qualifier,
+      final long timestamp, final byte type, final byte[] value, byte[] tags, final long memstoreTS) {
+    KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp,
+        KeyValue.Type.codeToType(type), value, tags);
+    keyValue.setMvccVersion(memstoreTS);
+    return keyValue;
+  }
+
   /**
    * @param cellScannerables
    * @return CellScanner interface over <code>cellIterables</code>

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java Sat Sep 21 18:01:32 2013
@@ -27,9 +27,12 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Comparator;
 import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.logging.Log;
@@ -66,7 +69,14 @@ import com.google.common.primitives.Long
  * The <code>rowlength</code> maximum is <code>Short.MAX_SIZE</code>, column family length maximum
  * is <code>Byte.MAX_SIZE</code>, and column qualifier + key length must be <
  * <code>Integer.MAX_SIZE</code>. The column does not contain the family/qualifier delimiter,
- * {@link #COLUMN_FAMILY_DELIMITER}
+ * {@link #COLUMN_FAMILY_DELIMITER}<br>
+ * KeyValue can optionally contain Tags. When it contains tags, it is added in the byte array after
+ * the value part. The format for this part is: <code>&lt;tagslength>&lt;tagsbytes></code>.
+ * <code>tagslength</code> maximum is <code>Short.MAX_SIZE</code>. The <code>tagsbytes</code>
+ * contain one or more tags where as each tag is of the form
+ * <code>&lt;taglength>&lt;tagtype>&lt;tagbytes></code>.  <code>tagtype</code> is one byte and
+ * <code>taglength</code> maximum is <code>Short.MAX_SIZE</code> and it includes 1 byte type length
+ * and actual tag bytes length.
  */
 @InterfaceAudience.Private
 public class KeyValue implements Cell, HeapSize, Cloneable {
@@ -127,6 +137,11 @@ public class KeyValue implements Cell, H
   // Size of the length ints in a KeyValue datastructure.
   public static final int KEYVALUE_INFRASTRUCTURE_SIZE = ROW_OFFSET;
 
+  /** Size of the tags length field in bytes */
+  public static final int TAGS_LENGTH_SIZE = Bytes.SIZEOF_SHORT;
+
+  public static final int KEYVALUE_WITH_TAGS_INFRASTRUCTURE_SIZE = ROW_OFFSET + TAGS_LENGTH_SIZE;
+
   /**
    * Computes the number of bytes that a <code>KeyValue</code> instance with the provided
    * characteristics would take up for its underlying data structure.
@@ -140,8 +155,46 @@ public class KeyValue implements Cell, H
    */
   public static long getKeyValueDataStructureSize(int rlength,
       int flength, int qlength, int vlength) {
-    return KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE +
-            getKeyDataStructureSize(rlength, flength, qlength) + vlength;
+    return KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE
+        + getKeyDataStructureSize(rlength, flength, qlength) + vlength;
+  }
+
+  /**
+   * Computes the number of bytes that a <code>KeyValue</code> instance with the provided
+   * characteristics would take up for its underlying data structure.
+   *
+   * @param rlength row length
+   * @param flength family length
+   * @param qlength qualifier length
+   * @param vlength value length
+   * @param tagsLength total length of the tags
+   *
+   * @return the <code>KeyValue</code> data structure length
+   */
+  public static long getKeyValueDataStructureSize(int rlength, int flength, int qlength,
+      int vlength, int tagsLength) {
+    if (tagsLength == 0) {
+      return getKeyValueDataStructureSize(rlength, flength, qlength, vlength);
+    }
+    return KeyValue.KEYVALUE_WITH_TAGS_INFRASTRUCTURE_SIZE
+        + getKeyDataStructureSize(rlength, flength, qlength) + vlength + tagsLength;
+  }
+
+  /**
+   * Computes the number of bytes that a <code>KeyValue</code> instance with the provided
+   * characteristics would take up for its underlying data structure.
+   *
+   * @param klength key length
+   * @param vlength value length
+   * @param tagsLength total length of the tags
+   *
+   * @return the <code>KeyValue</code> data structure length
+   */
+  public static long getKeyValueDataStructureSize(int klength, int vlength, int tagsLength) {
+    if (tagsLength == 0) {
+      return KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE + klength + vlength;
+    }
+    return KeyValue.KEYVALUE_WITH_TAGS_INFRASTRUCTURE_SIZE + klength + vlength + tagsLength;
   }
 
   /**
@@ -202,6 +255,38 @@ public class KeyValue implements Cell, H
   }
 
   /**
+   * @return an iterator over the tags in this KeyValue.
+   */
+  public Iterator<Tag> tagsIterator() {
+    // Subtract -1 to point to the end of the complete tag byte[]
+    final int endOffset = this.offset + this.length - 1;
+    return new Iterator<Tag>() {
+      private int pos = getTagsOffset();
+
+      @Override
+      public boolean hasNext() {
+        return this.pos < endOffset;
+      }
+
+      @Override
+      public Tag next() {
+        if (hasNext()) {
+          short curTagLen = Bytes.toShort(bytes, this.pos);
+          Tag tag = new Tag(bytes, pos, (short) (curTagLen + Bytes.SIZEOF_SHORT));
+          this.pos += Bytes.SIZEOF_SHORT + curTagLen;
+          return tag;
+        }
+        return null;
+      }
+
+      @Override
+      public void remove() {
+        throw new UnsupportedOperationException();
+      }
+    };
+  }
+
+  /**
    * Lowest possible key.
    * Makes a Key with highest possible Timestamp, empty row and column.  No
    * key can be equal or lower than this one in memstore or in store file.
@@ -371,6 +456,42 @@ public class KeyValue implements Cell, H
    * @param family family name
    * @param qualifier column qualifier
    * @param timestamp version timestamp
+   * @param value column value
+   * @param tags tags
+   * @throws IllegalArgumentException
+   */
+  public KeyValue(final byte[] row, final byte[] family,
+      final byte[] qualifier, final long timestamp, final byte[] value,
+      final Tag[] tags) {
+    this(row, family, qualifier, timestamp, value, Arrays.asList(tags));
+  }
+
+  /**
+   * Constructs KeyValue structure filled with specified values.
+   * @param row row key
+   * @param family family name
+   * @param qualifier column qualifier
+   * @param timestamp version timestamp
+   * @param value column value
+   * @param tags tags non-empty list of tags or null
+   * @throws IllegalArgumentException
+   */
+  public KeyValue(final byte[] row, final byte[] family,
+      final byte[] qualifier, final long timestamp, final byte[] value,
+      final List<Tag> tags) {
+    this(row, 0, row==null ? 0 : row.length,
+      family, 0, family==null ? 0 : family.length,
+      qualifier, 0, qualifier==null ? 0 : qualifier.length,
+      timestamp, Type.Put,
+      value, 0, value==null ? 0 : value.length, tags);
+  }
+
+  /**
+   * Constructs KeyValue structure filled with specified values.
+   * @param row row key
+   * @param family family name
+   * @param qualifier column qualifier
+   * @param timestamp version timestamp
    * @param type key type
    * @param value column value
    * @throws IllegalArgumentException
@@ -387,6 +508,144 @@ public class KeyValue implements Cell, H
    * <p>
    * Column is split into two fields, family and qualifier.
    * @param row row key
+   * @param family family name
+   * @param qualifier column qualifier
+   * @param timestamp version timestamp
+   * @param type key type
+   * @param value column value
+   * @throws IllegalArgumentException
+   */
+  public KeyValue(final byte[] row, final byte[] family,
+      final byte[] qualifier, final long timestamp, Type type,
+      final byte[] value, final List<Tag> tags) {
+    this(row, family, qualifier, 0, qualifier==null ? 0 : qualifier.length,
+        timestamp, type, value, 0, value==null ? 0 : value.length, tags);
+  }
+
+  /**
+   * Constructs KeyValue structure filled with specified values.
+   * @param row row key
+   * @param family family name
+   * @param qualifier column qualifier
+   * @param timestamp version timestamp
+   * @param type key type
+   * @param value column value
+   * @throws IllegalArgumentException
+   */
+  public KeyValue(final byte[] row, final byte[] family,
+      final byte[] qualifier, final long timestamp, Type type,
+      final byte[] value, final byte[] tags) {
+    this(row, family, qualifier, 0, qualifier==null ? 0 : qualifier.length,
+        timestamp, type, value, 0, value==null ? 0 : value.length, tags);
+  }
+
+  /**
+   * Constructs KeyValue structure filled with specified values.
+   * @param row row key
+   * @param family family name
+   * @param qualifier column qualifier
+   * @param qoffset qualifier offset
+   * @param qlength qualifier length
+   * @param timestamp version timestamp
+   * @param type key type
+   * @param value column value
+   * @param voffset value offset
+   * @param vlength value length
+   * @throws IllegalArgumentException
+   */
+  public KeyValue(byte [] row, byte [] family,
+      byte [] qualifier, int qoffset, int qlength, long timestamp, Type type,
+      byte [] value, int voffset, int vlength, List<Tag> tags) {
+    this(row, 0, row==null ? 0 : row.length,
+        family, 0, family==null ? 0 : family.length,
+        qualifier, qoffset, qlength, timestamp, type,
+        value, voffset, vlength, tags);
+  }
+
+  /**
+   * @param row
+   * @param family
+   * @param qualifier
+   * @param qoffset
+   * @param qlength
+   * @param timestamp
+   * @param type
+   * @param value
+   * @param voffset
+   * @param vlength
+   * @param tags
+   */
+  public KeyValue(byte [] row, byte [] family,
+      byte [] qualifier, int qoffset, int qlength, long timestamp, Type type,
+      byte [] value, int voffset, int vlength, byte[] tags) {
+    this(row, 0, row==null ? 0 : row.length,
+        family, 0, family==null ? 0 : family.length,
+        qualifier, qoffset, qlength, timestamp, type,
+        value, voffset, vlength, tags, 0, tags==null ? 0 : tags.length);
+  }
+
+  /**
+   * Constructs KeyValue structure filled with specified values.
+   * <p>
+   * Column is split into two fields, family and qualifier.
+   * @param row row key
+   * @throws IllegalArgumentException
+   */
+  public KeyValue(final byte [] row, final int roffset, final int rlength,
+      final byte [] family, final int foffset, final int flength,
+      final byte [] qualifier, final int qoffset, final int qlength,
+      final long timestamp, final Type type,
+      final byte [] value, final int voffset, final int vlength) {
+    this(row, roffset, rlength, family, foffset, flength, qualifier, qoffset,
+      qlength, timestamp, type, value, voffset, vlength, null);
+  }
+  
+  /**
+   * Constructs KeyValue structure filled with specified values. Uses the provided buffer as the
+   * data buffer.
+   * <p>
+   * Column is split into two fields, family and qualifier.
+   *
+   * @param buffer the bytes buffer to use
+   * @param boffset buffer offset
+   * @param row row key
+   * @param roffset row offset
+   * @param rlength row length
+   * @param family family name
+   * @param foffset family offset
+   * @param flength family length
+   * @param qualifier column qualifier
+   * @param qoffset qualifier offset
+   * @param qlength qualifier length
+   * @param timestamp version timestamp
+   * @param type key type
+   * @param value column value
+   * @param voffset value offset
+   * @param vlength value length
+   * @param tags non-empty list of tags or null
+   * @throws IllegalArgumentException an illegal value was passed or there is insufficient space
+   * remaining in the buffer
+   */
+  public KeyValue(byte [] buffer, final int boffset,
+      final byte [] row, final int roffset, final int rlength,
+      final byte [] family, final int foffset, final int flength,
+      final byte [] qualifier, final int qoffset, final int qlength,
+      final long timestamp, final Type type,
+      final byte [] value, final int voffset, final int vlength,
+      final Tag[] tags) {
+     this.bytes  = buffer;
+     this.length = writeByteArray(buffer, boffset,
+         row, roffset, rlength,
+         family, foffset, flength, qualifier, qoffset, qlength,
+        timestamp, type, value, voffset, vlength, tags);
+     this.offset = boffset;
+   }
+
+  /**
+   * Constructs KeyValue structure filled with specified values.
+   * <p>
+   * Column is split into two fields, family and qualifier.
+   * @param row row key
    * @param roffset row offset
    * @param rlength row length
    * @param family family name
@@ -400,16 +659,48 @@ public class KeyValue implements Cell, H
    * @param value column value
    * @param voffset value offset
    * @param vlength value length
+   * @param tags tags
    * @throws IllegalArgumentException
    */
   public KeyValue(final byte [] row, final int roffset, final int rlength,
       final byte [] family, final int foffset, final int flength,
       final byte [] qualifier, final int qoffset, final int qlength,
       final long timestamp, final Type type,
-      final byte [] value, final int voffset, final int vlength) {
+      final byte [] value, final int voffset, final int vlength,
+      final List<Tag> tags) {
+    this.bytes = createByteArray(row, roffset, rlength,
+        family, foffset, flength, qualifier, qoffset, qlength,
+        timestamp, type, value, voffset, vlength, tags);
+    this.length = bytes.length;
+    this.offset = 0;
+  }
+
+  /**
+   * @param row
+   * @param roffset
+   * @param rlength
+   * @param family
+   * @param foffset
+   * @param flength
+   * @param qualifier
+   * @param qoffset
+   * @param qlength
+   * @param timestamp
+   * @param type
+   * @param value
+   * @param voffset
+   * @param vlength
+   * @param tags
+   */
+  public KeyValue(final byte [] row, final int roffset, final int rlength,
+      final byte [] family, final int foffset, final int flength,
+      final byte [] qualifier, final int qoffset, final int qlength,
+      final long timestamp, final Type type,
+      final byte [] value, final int voffset, final int vlength,
+      final byte[] tags, final int tagsOffset, final int tagsLength) {
     this.bytes = createByteArray(row, roffset, rlength,
         family, foffset, flength, qualifier, qoffset, qlength,
-        timestamp, type, value, voffset, vlength);
+        timestamp, type, value, voffset, vlength, tags, tagsOffset, tagsLength);
     this.length = bytes.length;
     this.offset = 0;
   }
@@ -432,9 +723,30 @@ public class KeyValue implements Cell, H
       final int qlength,
       final long timestamp, final Type type,
       final int vlength) {
-    this.bytes = createEmptyByteArray(rlength,
-        flength, qlength,
-        timestamp, type, vlength);
+    this(rlength, flength, qlength, timestamp, type, vlength, 0);
+  }
+
+  /**
+   * Constructs an empty KeyValue structure, with specified sizes.
+   * This can be used to partially fill up KeyValues.
+   * <p>
+   * Column is split into two fields, family and qualifier.
+   * @param rlength row length
+   * @param flength family length
+   * @param qlength qualifier length
+   * @param timestamp version timestamp
+   * @param type key type
+   * @param vlength value length
+   * @param tagsLength
+   * @throws IllegalArgumentException
+   */
+  public KeyValue(final int rlength,
+      final int flength,
+      final int qlength,
+      final long timestamp, final Type type,
+      final int vlength, final int tagsLength) {
+    this.bytes = createEmptyByteArray(rlength, flength, qlength, timestamp, type, vlength,
+        tagsLength);
     this.length = bytes.length;
     this.offset = 0;
   }
@@ -459,7 +771,7 @@ public class KeyValue implements Cell, H
    * @return The newly created byte array.
    */
   private static byte[] createEmptyByteArray(final int rlength, int flength,
-      int qlength, final long timestamp, final Type type, int vlength) {
+      int qlength, final long timestamp, final Type type, int vlength, int tagsLength) {
     if (rlength > Short.MAX_VALUE) {
       throw new IllegalArgumentException("Row > " + Short.MAX_VALUE);
     }
@@ -470,6 +782,7 @@ public class KeyValue implements Cell, H
     if (qlength > Integer.MAX_VALUE - rlength - flength) {
       throw new IllegalArgumentException("Qualifier > " + Integer.MAX_VALUE);
     }
+    checkForTagsLength(tagsLength);
     // Key length
     long longkeylength = getKeyDataStructureSize(rlength, flength, qlength);
     if (longkeylength > Integer.MAX_VALUE) {
@@ -484,8 +797,8 @@ public class KeyValue implements Cell, H
     }
 
     // Allocate right-sized byte array.
-    byte [] bytes =
-        new byte[(int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength)];
+    byte[] bytes= new byte[(int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength,
+        tagsLength)];
     // Write the correct size markers
     int pos = 0;
     pos = Bytes.putInt(bytes, pos, keylength);
@@ -496,6 +809,10 @@ public class KeyValue implements Cell, H
     pos += flength + qlength;
     pos = Bytes.putLong(bytes, pos, timestamp);
     pos = Bytes.putByte(bytes, pos, type.getCode());
+    pos += keylength + vlength;
+    if (tagsLength > 0) {
+      pos = Bytes.putShort(bytes, pos, (short)(tagsLength & 0x0000ffff));
+    }
     return bytes;
   }
 
@@ -518,7 +835,6 @@ public class KeyValue implements Cell, H
       final byte [] qualifier, int qlength,
       final byte [] value, int vlength)
           throws IllegalArgumentException {
-
     if (rlength > Short.MAX_VALUE) {
       throw new IllegalArgumentException("Row > " + Short.MAX_VALUE);
     }
@@ -579,12 +895,21 @@ public class KeyValue implements Cell, H
       final byte [] family, final int foffset, int flength,
       final byte [] qualifier, final int qoffset, int qlength,
       final long timestamp, final Type type,
-      final byte [] value, final int voffset, int vlength) {
+      final byte [] value, final int voffset, int vlength, Tag[] tags) {
 
     checkParameters(row, rlength, family, flength, qualifier, qlength, value, vlength);
 
+    // Calculate length of tags area
+    int tagsLength = 0;
+    if (tags != null && tags.length > 0) {
+      for (Tag t: tags) {
+        tagsLength += t.getLength();
+      }
+    }
+    checkForTagsLength(tagsLength);
     int keyLength = (int) getKeyDataStructureSize(rlength, flength, qlength);
-    int keyValueLength = (int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength);
+    int keyValueLength = (int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength,
+        tagsLength);
     if (keyValueLength > buffer.length - boffset) {
       throw new IllegalArgumentException("Buffer size " + (buffer.length - boffset) + " < " +
           keyValueLength);
@@ -608,13 +933,24 @@ public class KeyValue implements Cell, H
     if (value != null && value.length > 0) {
       pos = Bytes.putBytes(buffer, pos, value, voffset, vlength);
     }
-
+    // Write the number of tags. If it is 0 then it means there are no tags.
+    if (tagsLength > 0) {
+      pos = Bytes.putShort(buffer, pos, (short) tagsLength);
+      for (Tag t : tags) {
+        pos = Bytes.putBytes(buffer, pos, t.getBuffer(), t.getOffset(), t.getLength());
+      }
+    }
     return keyValueLength;
   }
 
+  private static void checkForTagsLength(int tagsLength) {
+    if (tagsLength > Short.MAX_VALUE) {
+      throw new IllegalArgumentException("tagslength "+ tagsLength + " > " + Short.MAX_VALUE);
+    }
+  }
+
   /**
    * Write KeyValue format into a byte array.
-   *
    * @param row row key
    * @param roffset row offset
    * @param rlength row length
@@ -635,17 +971,66 @@ public class KeyValue implements Cell, H
       final int rlength, final byte [] family, final int foffset, int flength,
       final byte [] qualifier, final int qoffset, int qlength,
       final long timestamp, final Type type,
-      final byte [] value, final int voffset, int vlength) {
+      final byte [] value, final int voffset, 
+      int vlength, byte[] tags, int tagsOffset, int tagsLength) {
 
     checkParameters(row, rlength, family, flength, qualifier, qlength, value, vlength);
-
+    checkForTagsLength(tagsLength);
     // Allocate right-sized byte array.
     int keyLength = (int) getKeyDataStructureSize(rlength, flength, qlength);
     byte [] bytes =
-        new byte[(int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength)];
+        new byte[(int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength, tagsLength)];
+    // Write key, value and key row length.
+    int pos = 0;
+    pos = Bytes.putInt(bytes, pos, keyLength);
+    pos = Bytes.putInt(bytes, pos, vlength);
+    pos = Bytes.putShort(bytes, pos, (short)(rlength & 0x0000ffff));
+    pos = Bytes.putBytes(bytes, pos, row, roffset, rlength);
+    pos = Bytes.putByte(bytes, pos, (byte)(flength & 0x0000ff));
+    if(flength != 0) {
+      pos = Bytes.putBytes(bytes, pos, family, foffset, flength);
+    }
+    if(qlength != 0) {
+      pos = Bytes.putBytes(bytes, pos, qualifier, qoffset, qlength);
+    }
+    pos = Bytes.putLong(bytes, pos, timestamp);
+    pos = Bytes.putByte(bytes, pos, type.getCode());
+    if (value != null && value.length > 0) {
+      pos = Bytes.putBytes(bytes, pos, value, voffset, vlength);
+    }
+    // Add the tags after the value part
+    if (tagsLength > 0) {
+      pos = Bytes.putShort(bytes, pos, (short) (tagsLength));
+      pos = Bytes.putBytes(bytes, pos, tags, tagsOffset, tagsLength);
+    }
+    return bytes;
+  }
+  
+  private static byte [] createByteArray(final byte [] row, final int roffset,
+      final int rlength, final byte [] family, final int foffset, int flength,
+      final byte [] qualifier, final int qoffset, int qlength,
+      final long timestamp, final Type type,
+      final byte [] value, final int voffset, int vlength, List<Tag> tags) {
+
+    checkParameters(row, rlength, family, flength, qualifier, qlength, value, vlength);
+
+    // Calculate length of tags area
+    int tagsLength = 0;
+    if (tags != null && !tags.isEmpty()) {
+      for (Tag t : tags) {
+        tagsLength += t.getLength();
+      }
+    }
+    checkForTagsLength(tagsLength);
+    // Allocate right-sized byte array.
+    int keyLength = (int) getKeyDataStructureSize(rlength, flength, qlength);
+    byte[] bytes = new byte[(int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength,
+        tagsLength)];
+
     // Write key, value and key row length.
     int pos = 0;
     pos = Bytes.putInt(bytes, pos, keyLength);
+
     pos = Bytes.putInt(bytes, pos, vlength);
     pos = Bytes.putShort(bytes, pos, (short)(rlength & 0x0000ffff));
     pos = Bytes.putBytes(bytes, pos, row, roffset, rlength);
@@ -661,9 +1046,17 @@ public class KeyValue implements Cell, H
     if (value != null && value.length > 0) {
       pos = Bytes.putBytes(bytes, pos, value, voffset, vlength);
     }
+    // Add the tags after the value part
+    if (tagsLength > 0) {
+      pos = Bytes.putShort(bytes, pos, (short) (tagsLength));
+      for (Tag t : tags) {
+        pos = Bytes.putBytes(bytes, pos, t.getBuffer(), t.getOffset(), t.getLength());
+      }
+    }
     return bytes;
   }
 
+
   /**
    * Needed doing 'contains' on List.  Only compares the key portion, not the value.
    */
@@ -744,13 +1137,6 @@ public class KeyValue implements Cell, H
   }
 
   /**
-   * Use for logging.
-   * @param b Key portion of a KeyValue.
-   * @param o Offset to start of key
-   * @param l Length of key.
-   * @return Key as a String.
-   */
-  /**
    * Produces a string map for this key/value pair. Useful for programmatic use
    * and manipulation of the data stored in an HLogKey, for example, printing
    * as JSON. Values are left out due to their tendency to be large. If needed,
@@ -765,9 +1151,24 @@ public class KeyValue implements Cell, H
     stringMap.put("qualifier", Bytes.toStringBinary(getQualifier()));
     stringMap.put("timestamp", getTimestamp());
     stringMap.put("vlen", getValueLength());
+    List<Tag> tags = getTags();
+    if (tags != null) {
+      List<String> tagsString = new ArrayList<String>();
+      for (Tag t : tags) {
+        tagsString.add((t.getType()) + ":" +Bytes.toStringBinary(t.getValue()));
+      }
+      stringMap.put("tag", tagsString);
+    }
     return stringMap;
   }
 
+  /**
+   * Use for logging.
+   * @param b Key portion of a KeyValue.
+   * @param o Offset to start of key
+   * @param l Length of key.
+   * @return Key as a String.
+   */
   public static String keyToString(final byte [] b, final int o, final int l) {
     if (b == null) return "";
     int rowlength = Bytes.toShort(b, o);
@@ -839,9 +1240,9 @@ public class KeyValue implements Cell, H
    * @return length of entire KeyValue, in bytes
    */
   private static int getLength(byte [] bytes, int offset) {
-    return ROW_OFFSET +
-        Bytes.toInt(bytes, offset) +
-        Bytes.toInt(bytes, offset + Bytes.SIZEOF_INT);
+    int klength = ROW_OFFSET + Bytes.toInt(bytes, offset);
+    int vlength = Bytes.toInt(bytes, offset + Bytes.SIZEOF_INT);
+    return klength + vlength;
   }
 
   /**
@@ -876,11 +1277,12 @@ public class KeyValue implements Cell, H
   }
 
   /**
-   * @return Value offset
+   * @return the value offset
    */
   @Override
   public int getValueOffset() {
-    return getKeyOffset() + getKeyLength();
+    int voffset = getKeyOffset() + getKeyLength();
+    return voffset;
   }
 
   /**
@@ -888,7 +1290,8 @@ public class KeyValue implements Cell, H
    */
   @Override
   public int getValueLength() {
-    return Bytes.toInt(this.bytes, this.offset + Bytes.SIZEOF_INT);
+    int vlength = Bytes.toInt(this.bytes, this.offset + Bytes.SIZEOF_INT);
+    return vlength;
   }
 
   /**
@@ -1185,6 +1588,55 @@ public class KeyValue implements Cell, H
     return CellUtil.cloneQualifier(this);
   }
 
+  /**
+   * This returns the offset where the tag actually starts.
+   */
+  @Override
+  public int getTagsOffset() {
+    short tagsLen = getTagsLength();
+    if (tagsLen == 0) {
+      return this.offset + this.length;
+    }
+    return this.offset + this.length - tagsLen;
+  }
+
+  /**
+   * This returns the total length of the tag bytes
+   */
+  @Override
+  public short getTagsLength() {
+    int tagsLen = this.length - (getKeyLength() + getValueLength() + KEYVALUE_INFRASTRUCTURE_SIZE);
+    if (tagsLen > 0) {
+      // There are some Tag bytes in the byte[]. So reduce 2 bytes which is added to denote the tags
+      // length
+      tagsLen -= TAGS_LENGTH_SIZE;
+    }
+    return (short) tagsLen;
+  }
+
+  /**
+   * This method may not be right.  But we cannot use the CellUtil.getTagIterator because we don't know
+   * getKeyOffset and getKeyLength
+   * Cannnot use the getKeyOffset and getKeyLength in CellUtil as they are not part of the Cell interface.
+   * Returns any tags embedded in the KeyValue.
+   * @return The tags
+   */
+  public List<Tag> getTags() {
+    short tagsLength = getTagsLength();
+    if (tagsLength == 0) {
+      return new ArrayList<Tag>();
+    }
+    return Tag.createTags(getBuffer(), getTagsOffset(), tagsLength);
+  }
+
+  /**
+   * @return the backing array of the entire KeyValue (all KeyValue fields are in a single array)
+   */
+  @Override
+  public byte[] getTagsArray() {
+    return bytes;
+  }
+
   //---------------------------------------------------------------------------
   //
   //  Compare specified fields against those contained in this KeyValue
@@ -2169,7 +2621,7 @@ public class KeyValue implements Cell, H
 
     int len = writeByteArray(buffer, boffset, row, roffset, rlength, family, foffset, flength,
         qualifier, qoffset, qlength, HConstants.LATEST_TIMESTAMP, KeyValue.Type.Maximum,
-        null, 0, 0);
+        null, 0, 0, null);
     return new KeyValue(buffer, boffset, len);
   }
 
@@ -2424,22 +2876,4 @@ public class KeyValue implements Cell, H
     sum += Bytes.SIZEOF_LONG;// memstoreTS
     return ClassSize.align(sum);
   }
-
-  // -----
-  // KV tags stubs
-  @Override
-  public int getTagsOffset() {
-    throw new UnsupportedOperationException("Not implememnted");
-  }
-
-  @Override
-  public short getTagsLength() {
-    throw new UnsupportedOperationException("Not implememnted");
-  }
-
-  @Override
-  public byte[] getTagsArray() {
-    throw new UnsupportedOperationException("Not implememnted");
-  }
-
 }

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java Sat Sep 21 18:01:32 2013
@@ -93,12 +93,12 @@ public class KeyValueTestUtil {
   }
   
   public static List<KeyValue> rewindThenToList(final ByteBuffer bb,
-      final boolean includesMemstoreTS) {
+      final boolean includesMemstoreTS, final boolean useTags) {
     bb.rewind();
     List<KeyValue> kvs = Lists.newArrayList();
     KeyValue kv = null;
     while (true) {
-      kv = KeyValueUtil.nextShallowCopy(bb, includesMemstoreTS);
+      kv = KeyValueUtil.nextShallowCopy(bb, includesMemstoreTS, useTags);
       if (kv == null) {
         break;
       }

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java Sat Sep 21 18:01:32 2013
@@ -24,9 +24,9 @@ import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.util.ByteBufferUtils;
-import org.apache.hadoop.hbase.util.SimpleByteRange;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.IterableUtils;
+import org.apache.hadoop.hbase.util.SimpleByteRange;
 import org.apache.hadoop.io.WritableUtils;
 
 import com.google.common.base.Function;
@@ -41,8 +41,9 @@ public class KeyValueUtil {
   /**************** length *********************/
 
   public static int length(final Cell cell) {
-    return (int)KeyValue.getKeyValueDataStructureSize(cell.getRowLength(), cell.getFamilyLength(),
-      cell.getQualifierLength(), cell.getValueLength());
+    return (int) (KeyValue.getKeyValueDataStructureSize(cell.getRowLength(),
+        cell.getFamilyLength(), cell.getQualifierLength(), cell.getValueLength(),
+        cell.getTagsLength()));
   }
 
   protected static int keyLength(final Cell cell) {
@@ -71,7 +72,8 @@ public class KeyValueUtil {
   /**************** copy key only *********************/
 
   public static KeyValue copyToNewKeyValue(final Cell cell) {
-    KeyValue kvCell = new KeyValue(copyToNewByteArray(cell));
+    byte[] bytes = copyToNewByteArray(cell);
+    KeyValue kvCell = new KeyValue(bytes, 0, bytes.length);
     kvCell.setMvccVersion(cell.getMvccVersion());
     return kvCell;
   }
@@ -112,8 +114,12 @@ public class KeyValueUtil {
     pos = Bytes.putInt(output, pos, keyLength(cell));
     pos = Bytes.putInt(output, pos, cell.getValueLength());
     pos = appendKeyToByteArrayWithoutValue(cell, output, pos);
-    CellUtil.copyValueTo(cell, output, pos);
-    return pos + cell.getValueLength();
+    pos = CellUtil.copyValueTo(cell, output, pos);
+    if ((cell.getTagsLength() > 0)) {
+      pos = Bytes.putShort(output, pos, cell.getTagsLength());
+      pos = CellUtil.copyTagTo(cell, output, pos);
+    }
+    return pos;
   }
 
   public static ByteBuffer copyToNewByteBuffer(final Cell cell) {
@@ -142,20 +148,30 @@ public class KeyValueUtil {
   /**
    * Creates a new KeyValue object positioned in the supplied ByteBuffer and sets the ByteBuffer's
    * position to the start of the next KeyValue. Does not allocate a new array or copy data.
+   * @param bb
+   * @param includesMvccVersion
+   * @param includesTags 
    */
-  public static KeyValue nextShallowCopy(final ByteBuffer bb, final boolean includesMvccVersion) {
+  public static KeyValue nextShallowCopy(final ByteBuffer bb, final boolean includesMvccVersion,
+      boolean includesTags) {
     if (bb.isDirect()) {
       throw new IllegalArgumentException("only supports heap buffers");
     }
     if (bb.remaining() < 1) {
       return null;
     }
+    KeyValue keyValue = null;
     int underlyingArrayOffset = bb.arrayOffset() + bb.position();
     int keyLength = bb.getInt();
     int valueLength = bb.getInt();
-    int kvLength = KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE + keyLength + valueLength;
-    KeyValue keyValue = new KeyValue(bb.array(), underlyingArrayOffset, kvLength);
     ByteBufferUtils.skip(bb, keyLength + valueLength);
+    short tagsLength = 0;
+    if (includesTags) {
+      tagsLength = bb.getShort();
+      ByteBufferUtils.skip(bb, tagsLength);
+    }
+    int kvLength = (int) KeyValue.getKeyValueDataStructureSize(keyLength, valueLength, tagsLength);
+    keyValue = new KeyValue(bb.array(), underlyingArrayOffset, kvLength);
     if (includesMvccVersion) {
       long mvccVersion = ByteBufferUtils.readVLong(bb);
       keyValue.setMvccVersion(mvccVersion);

Added: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java?rev=1525269&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java (added)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java Sat Sep 21 18:01:32 2013
@@ -0,0 +1,174 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.util.Bytes;
+
+/**
+ * <code>&lt;taglength>&lt;tagtype>&lt;tagbytes></code>. <code>tagtype</code> is
+ * one byte and <code>taglength</code> maximum is <code>Short.MAX_SIZE</code>.
+ * It includes 1 byte type length and actual tag bytes length.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class Tag {
+  public final static int TYPE_LENGTH_SIZE = Bytes.SIZEOF_BYTE;
+  public final static int TAG_LENGTH_SIZE = Bytes.SIZEOF_SHORT;
+  public final static int INFRASTRUCTURE_SIZE = TYPE_LENGTH_SIZE + TAG_LENGTH_SIZE;
+
+  private byte type;
+  private byte[] bytes;
+  private int offset = 0;
+  private short length = 0;
+
+  // The special tag will write the length of each tag and that will be
+  // followed by the type and then the actual tag.
+  // So every time the length part is parsed we need to add + 1 byte to it to
+  // get the type and then get the actual tag.
+  public Tag(byte tagType, String tag) {
+    this(tagType, Bytes.toBytes(tag));
+  }
+
+  /**
+   * @param tagType
+   * @param tag
+   */
+  public Tag(byte tagType, byte[] tag) {
+    // <length of tag - 2 bytes><type code - 1 byte><tag>
+    short tagLength = (short) ((tag.length & 0x0000ffff) + TYPE_LENGTH_SIZE);
+    length = (short) (TAG_LENGTH_SIZE + tagLength);
+    bytes = new byte[length];
+    int pos = Bytes.putShort(bytes, 0, tagLength);
+    pos = Bytes.putByte(bytes, pos, tagType);
+    Bytes.putBytes(bytes, pos, tag, 0, tag.length);
+    this.type = tagType;
+  }
+
+  /**
+   * Creates a Tag from the specified byte array and offset. Presumes
+   * <code>bytes</code> content starting at <code>offset</code> is formatted as
+   * a Tag blob.
+   * The bytes to include the tag type, tag length and actual tag bytes.
+   * @param bytes
+   *          byte array
+   * @param offset
+   *          offset to start of Tag
+   */
+  public Tag(byte[] bytes, int offset) {
+    this(bytes, offset, getLength(bytes, offset));
+  }
+
+  private static short getLength(byte[] bytes, int offset) {
+    return (short) (TAG_LENGTH_SIZE + Bytes.toShort(bytes, offset));
+  }
+
+  /**
+   * Creates a Tag from the specified byte array, starting at offset, and for
+   * length <code>length</code>. Presumes <code>bytes</code> content starting at
+   * <code>offset</code> is formatted as a Tag blob.
+   * @param bytes
+   *          byte array
+   * @param offset
+   *          offset to start of the Tag
+   * @param length
+   *          length of the Tag
+   */
+  public Tag(byte[] bytes, int offset, short length) {
+    this.bytes = bytes;
+    this.offset = offset;
+    this.length = length;
+    this.type = bytes[offset + TAG_LENGTH_SIZE];
+  }
+
+  /**
+   * @return The byte array backing this Tag.
+   */
+  public byte[] getBuffer() {
+    return this.bytes;
+  }
+
+  /**
+   * @return the tag type
+   */
+  public byte getType() {
+    return this.type;
+  }
+
+  /**
+   * @return Length of actual tag bytes within the backed buffer
+   */
+  public int getTagLength() {
+    return this.length - INFRASTRUCTURE_SIZE;
+  }
+
+  /**
+   * @return Offset of actual tag bytes within the backed buffer
+   */
+  public int getTagOffset() {
+    return this.offset + INFRASTRUCTURE_SIZE;
+  }
+
+  public byte[] getValue() {
+    int tagLength = getTagLength();
+    byte[] tag = new byte[tagLength];
+    Bytes.putBytes(tag, 0, bytes, getTagOffset(), tagLength);
+    return tag;
+  }
+
+  /**
+   * Creates the list of tags from the byte array b. Expected that b is in the
+   * expected tag format
+   * @param b
+   * @param offset
+   * @param length
+   * @return List of tags
+   */
+  public static List<Tag> createTags(byte[] b, int offset, short length) {
+    List<Tag> tags = new ArrayList<Tag>();
+    int pos = offset;
+    while (pos < offset + length) {
+      short tagLen = Bytes.toShort(b, pos);
+      tags.add(new Tag(b, pos, (short) (tagLen + TAG_LENGTH_SIZE)));
+      pos += TAG_LENGTH_SIZE + tagLen;
+    }
+    return tags;
+  }
+
+  /**
+   * Returns the total length of the entire tag entity
+   * @return
+   */
+  short getLength() {
+    return this.length;
+  }
+
+  /**
+   * Returns the offset of the entire tag entity
+   * @return
+   */
+  int getOffset() {
+    return this.offset;
+  }
+}
\ No newline at end of file

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java Sat Sep 21 18:01:32 2013
@@ -53,6 +53,8 @@ public class CellCodec implements Codec 
       this.out.write(cell.getTypeByte());
       // Value
       write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
+      // Write tags
+      write(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
       // MvccVersion
       this.out.write(Bytes.toBytes(cell.getMvccVersion()));
     }
@@ -85,11 +87,12 @@ public class CellCodec implements Codec 
       long timestamp = Bytes.toLong(longArray);
       byte type = (byte) this.in.read();
       byte [] value = readByteArray(in);
+      byte[] tags = readByteArray(in);
       // Read memstore version
       byte[] memstoreTSArray = new byte[Bytes.SIZEOF_LONG];
       IOUtils.readFully(this.in, memstoreTSArray);
       long memstoreTS = Bytes.toLong(memstoreTSArray);
-      return CellUtil.createCell(row, family, qualifier, timestamp, type, value, memstoreTS);
+      return CellUtil.createCell(row, family, qualifier, timestamp, type, value, tags, memstoreTS);
     }
 
     /**

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java Sat Sep 21 18:01:32 2013
@@ -26,8 +26,8 @@ import org.apache.hadoop.hbase.HConstant
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.KVComparator;
 import org.apache.hadoop.hbase.KeyValue.SamePrefixComparator;
-import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
 import org.apache.hadoop.hbase.io.hfile.BlockType;
+import org.apache.hadoop.hbase.io.hfile.HFileContext;
 import org.apache.hadoop.hbase.util.ByteBufferUtils;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.WritableUtils;
@@ -42,8 +42,15 @@ abstract class BufferedDataBlockEncoder 
 
   @Override
   public ByteBuffer decodeKeyValues(DataInputStream source,
-      boolean includesMemstoreTS) throws IOException {
-    return decodeKeyValues(source, 0, 0, includesMemstoreTS);
+      HFileBlockDecodingContext blkDecodingCtx) throws IOException {
+    if (blkDecodingCtx.getClass() != HFileBlockDefaultDecodingContext.class) {
+      throw new IOException(this.getClass().getName() + " only accepts "
+          + HFileBlockDefaultDecodingContext.class.getName() + " as the decoding context.");
+    }
+
+    HFileBlockDefaultDecodingContext decodingCtx =
+        (HFileBlockDefaultDecodingContext) blkDecodingCtx;
+    return internalDecodeKeyValues(source, 0, 0, decodingCtx);
   }
 
   protected static class SeekerState {
@@ -51,6 +58,8 @@ abstract class BufferedDataBlockEncoder 
     protected int keyLength;
     protected int valueLength;
     protected int lastCommonPrefix;
+    protected int tagLength = 0;
+    protected int tagOffset = -1;
 
     /** We need to store a copy of the key. */
     protected byte[] keyBuffer = new byte[INITIAL_KEY_BUFFER_SIZE];
@@ -112,21 +121,30 @@ abstract class BufferedDataBlockEncoder 
   protected abstract static class
       BufferedEncodedSeeker<STATE extends SeekerState>
       implements EncodedSeeker {
-
+    protected HFileBlockDecodingContext decodingCtx;
     protected final KVComparator comparator;
     protected final SamePrefixComparator<byte[]> samePrefixComparator;
     protected ByteBuffer currentBuffer;
     protected STATE current = createSeekerState(); // always valid
     protected STATE previous = createSeekerState(); // may not be valid
 
-    @SuppressWarnings("unchecked")
-    public BufferedEncodedSeeker(KVComparator comparator) {
+    public BufferedEncodedSeeker(KVComparator comparator,
+        HFileBlockDecodingContext decodingCtx) {
       this.comparator = comparator;
       if (comparator instanceof SamePrefixComparator) {
         this.samePrefixComparator = (SamePrefixComparator<byte[]>) comparator;
       } else {
         this.samePrefixComparator = null;
       }
+      this.decodingCtx = decodingCtx;
+    }
+    
+    protected boolean includesMvcc() {
+      return this.decodingCtx.getHFileContext().shouldIncludeMvcc();
+    }
+
+    protected boolean includesTags() {
+      return this.decodingCtx.getHFileContext().shouldIncludeTags();
     }
 
     @Override
@@ -152,21 +170,33 @@ abstract class BufferedDataBlockEncoder 
 
     @Override
     public ByteBuffer getKeyValueBuffer() {
-      ByteBuffer kvBuffer = ByteBuffer.allocate(
-          2 * Bytes.SIZEOF_INT + current.keyLength + current.valueLength);
+      ByteBuffer kvBuffer = createKVBuffer();
       kvBuffer.putInt(current.keyLength);
       kvBuffer.putInt(current.valueLength);
       kvBuffer.put(current.keyBuffer, 0, current.keyLength);
       kvBuffer.put(currentBuffer.array(),
           currentBuffer.arrayOffset() + current.valueOffset,
           current.valueLength);
+      if (current.tagLength > 0) {
+        kvBuffer.putShort((short) current.tagLength);
+        kvBuffer.put(currentBuffer.array(), currentBuffer.arrayOffset() + current.tagOffset,
+            current.tagLength);
+      }
+      return kvBuffer;
+    }
+
+    protected ByteBuffer createKVBuffer() {
+      int kvBufSize = (int) KeyValue.getKeyValueDataStructureSize(current.keyLength,
+          current.valueLength, current.tagLength);
+      ByteBuffer kvBuffer = ByteBuffer.allocate(kvBufSize);
       return kvBuffer;
     }
 
     @Override
     public KeyValue getKeyValue() {
       ByteBuffer kvBuf = getKeyValueBuffer();
-      KeyValue kv = new KeyValue(kvBuf.array(), kvBuf.arrayOffset());
+      KeyValue kv = new KeyValue(kvBuf.array(), kvBuf.arrayOffset(), kvBuf.array().length
+          - kvBuf.arrayOffset());
       kv.setMvccVersion(current.memstoreTS);
       return kv;
     }
@@ -188,6 +218,12 @@ abstract class BufferedDataBlockEncoder 
       return true;
     }
 
+    public void decodeTags() {
+      current.tagLength = ByteBufferUtils.readCompressedInt(currentBuffer);
+      current.tagOffset = currentBuffer.position();
+      ByteBufferUtils.skip(currentBuffer, current.tagLength);
+    }
+
     @Override
     public int seekToKeyInBlock(byte[] key, int offset, int length,
         boolean seekBefore) {
@@ -276,8 +312,13 @@ abstract class BufferedDataBlockEncoder 
   }
 
   protected final void afterEncodingKeyValue(ByteBuffer in,
-      DataOutputStream out, boolean includesMemstoreTS) {
-    if (includesMemstoreTS) {
+      DataOutputStream out, HFileBlockDefaultEncodingContext encodingCtx) throws IOException {
+    if (encodingCtx.getHFileContext().shouldIncludeTags()) {
+      int tagsLength = in.getShort();
+      ByteBufferUtils.putCompressedInt(out, tagsLength);
+      ByteBufferUtils.moveBufferToStream(out, in, tagsLength);
+    }
+    if (encodingCtx.getHFileContext().shouldIncludeMvcc()) {
       // Copy memstore timestamp from the byte buffer to the output stream.
       long memstoreTS = -1;
       try {
@@ -291,8 +332,13 @@ abstract class BufferedDataBlockEncoder 
   }
 
   protected final void afterDecodingKeyValue(DataInputStream source,
-      ByteBuffer dest, boolean includesMemstoreTS) {
-    if (includesMemstoreTS) {
+      ByteBuffer dest, HFileBlockDefaultDecodingContext decodingCtx) throws IOException {
+    if (decodingCtx.getHFileContext().shouldIncludeTags()) {
+      int tagsLength = ByteBufferUtils.readCompressedInt(source);
+      dest.putShort((short)tagsLength);
+      ByteBufferUtils.copyFromStreamToBuffer(dest, source, tagsLength);
+    }
+    if (decodingCtx.getHFileContext().shouldIncludeMvcc()) {
       long memstoreTS = -1;
       try {
         // Copy memstore timestamp from the data input stream to the byte
@@ -307,33 +353,32 @@ abstract class BufferedDataBlockEncoder 
   }
 
   @Override
-  public HFileBlockEncodingContext newDataBlockEncodingContext(
-      Algorithm compressionAlgorithm,
-      DataBlockEncoding encoding, byte[] header) {
-    return new HFileBlockDefaultEncodingContext(
-        compressionAlgorithm, encoding, header);
+  public HFileBlockEncodingContext newDataBlockEncodingContext(DataBlockEncoding encoding,
+      byte[] header, HFileContext meta) {
+    return new HFileBlockDefaultEncodingContext(encoding, header, meta);
   }
 
   @Override
-  public HFileBlockDecodingContext newDataBlockDecodingContext(
-      Algorithm compressionAlgorithm) {
-    return new HFileBlockDefaultDecodingContext(compressionAlgorithm);
+  public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta) {
+    return new HFileBlockDefaultDecodingContext(meta);
   }
 
   /**
    * Compress KeyValues and write them to output buffer.
    * @param out Where to write compressed data.
    * @param in Source of KeyValue for compression.
-   * @param includesMemstoreTS true if including memstore timestamp after every
-   *          key-value pair
+   * @param encodingCtx use the Encoding ctx associated with the current block
    * @throws IOException If there is an error writing to output stream.
    */
   public abstract void internalEncodeKeyValues(DataOutputStream out,
-      ByteBuffer in, boolean includesMemstoreTS) throws IOException;
+      ByteBuffer in, HFileBlockDefaultEncodingContext encodingCtx) throws IOException;
+
+  public abstract ByteBuffer internalDecodeKeyValues(DataInputStream source,
+      int allocateHeaderLength, int skipLastBytes, HFileBlockDefaultDecodingContext decodingCtx)
+      throws IOException;
 
   @Override
   public void encodeKeyValues(ByteBuffer in,
-      boolean includesMemstoreTS,
       HFileBlockEncodingContext blkEncodingCtx) throws IOException {
     if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
       throw new IOException (this.getClass().getName() + " only accepts "
@@ -347,7 +392,7 @@ abstract class BufferedDataBlockEncoder 
     DataOutputStream dataOut =
         ((HFileBlockDefaultEncodingContext) encodingCtx)
         .getOutputStreamForEncoder();
-    internalEncodeKeyValues(dataOut, in, includesMemstoreTS);
+    internalEncodeKeyValues(dataOut, in, encodingCtx);
     if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
       encodingCtx.postEncoding(BlockType.ENCODED_DATA);
     } else {

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java Sat Sep 21 18:01:32 2013
@@ -34,24 +34,12 @@ import org.apache.hadoop.hbase.util.Byte
 public class CopyKeyDataBlockEncoder extends BufferedDataBlockEncoder {
   @Override
   public void internalEncodeKeyValues(DataOutputStream out,
-      ByteBuffer in, boolean includesMemstoreTS) throws IOException {
+      ByteBuffer in, HFileBlockDefaultEncodingContext encodingCtx) throws IOException {
     in.rewind();
     ByteBufferUtils.putInt(out, in.limit());
     ByteBufferUtils.moveBufferToStream(out, in, in.limit());
   }
 
-  @Override
-  public ByteBuffer decodeKeyValues(DataInputStream source,
-      int preserveHeaderLength, int skipLastBytes, boolean includesMemstoreTS)
-      throws IOException {
-    int decompressedSize = source.readInt();
-    ByteBuffer buffer = ByteBuffer.allocate(decompressedSize +
-        preserveHeaderLength);
-    buffer.position(preserveHeaderLength);
-    ByteBufferUtils.copyFromStreamToBuffer(buffer, source, decompressedSize);
-
-    return buffer;
-  }
 
   @Override
   public ByteBuffer getFirstKeyInBlock(ByteBuffer block) {
@@ -68,8 +56,8 @@ public class CopyKeyDataBlockEncoder ext
 
   @Override
   public EncodedSeeker createSeeker(KVComparator comparator,
-      final boolean includesMemstoreTS) {
-    return new BufferedEncodedSeeker<SeekerState>(comparator) {
+      final HFileBlockDecodingContext decodingCtx) {
+    return new BufferedEncodedSeeker<SeekerState>(comparator, decodingCtx) {
       @Override
       protected void decodeNext() {
         current.keyLength = currentBuffer.getInt();
@@ -78,7 +66,11 @@ public class CopyKeyDataBlockEncoder ext
         currentBuffer.get(current.keyBuffer, 0, current.keyLength);
         current.valueOffset = currentBuffer.position();
         ByteBufferUtils.skip(currentBuffer, current.valueLength);
-        if (includesMemstoreTS) {
+        if (includesTags()) {
+          current.tagLength = currentBuffer.getShort();
+          ByteBufferUtils.skip(currentBuffer, current.tagLength);
+        }
+        if (includesMvcc()) {
           current.memstoreTS = ByteBufferUtils.readVLong(currentBuffer);
         } else {
           current.memstoreTS = 0;
@@ -95,4 +87,16 @@ public class CopyKeyDataBlockEncoder ext
     };
   }
 
+  @Override
+  public ByteBuffer internalDecodeKeyValues(DataInputStream source, int allocateHeaderLength,
+      int skipLastBytes, HFileBlockDefaultDecodingContext decodingCtx) throws IOException {
+    int decompressedSize = source.readInt();
+    ByteBuffer buffer = ByteBuffer.allocate(decompressedSize +
+        allocateHeaderLength);
+    buffer.position(allocateHeaderLength);
+    ByteBufferUtils.copyFromStreamToBuffer(buffer, source, decompressedSize);
+
+    return buffer;
+  }
+
 }

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java Sat Sep 21 18:01:32 2013
@@ -23,8 +23,7 @@ import java.nio.ByteBuffer;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.KVComparator;
-import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
-import org.apache.hadoop.io.RawComparator;
+import org.apache.hadoop.hbase.io.hfile.HFileContext;
 
 /**
  * Encoding of KeyValue. It aims to be fast and efficient using assumptions:
@@ -38,7 +37,7 @@ import org.apache.hadoop.io.RawComparato
  *
  * After encoding, it also optionally compresses the encoded data if a
  * compression algorithm is specified in HFileBlockEncodingContext argument of
- * {@link #encodeKeyValues(ByteBuffer, boolean, HFileBlockEncodingContext)}.
+ * {@link #encodeKeyValues(ByteBuffer, HFileBlockEncodingContext)}.
  */
 @InterfaceAudience.Private
 public interface DataBlockEncoder {
@@ -49,44 +48,23 @@ public interface DataBlockEncoder {
    *
    * @param in
    *          Source of KeyValue for compression.
-   * @param includesMemstoreTS
-   *          true if including memstore timestamp after every key-value pair
-   * @param encodingContext
+   * @param encodingCtx
    *          the encoding context which will contain encoded uncompressed bytes
    *          as well as compressed encoded bytes if compression is enabled, and
    *          also it will reuse resources across multiple calls.
    * @throws IOException
    *           If there is an error writing to output stream.
    */
-  void encodeKeyValues(
-    ByteBuffer in, boolean includesMemstoreTS, HFileBlockEncodingContext encodingContext
-  ) throws IOException;
+  void encodeKeyValues(ByteBuffer in, HFileBlockEncodingContext encodingCtx) throws IOException;
 
   /**
    * Decode.
    * @param source Compressed stream of KeyValues.
-   * @param includesMemstoreTS true if including memstore timestamp after every
-   *          key-value pair
+   * @param decodingCtx
    * @return Uncompressed block of KeyValues.
    * @throws IOException If there is an error in source.
    */
-  ByteBuffer decodeKeyValues(
-    DataInputStream source, boolean includesMemstoreTS
-  ) throws IOException;
-
-  /**
-   * Uncompress.
-   * @param source encoded stream of KeyValues.
-   * @param allocateHeaderLength allocate this many bytes for the header.
-   * @param skipLastBytes Do not copy n last bytes.
-   * @param includesMemstoreTS true if including memstore timestamp after every
-   *          key-value pair
-   * @return Uncompressed block of KeyValues.
-   * @throws IOException If there is an error in source.
-   */
-  ByteBuffer decodeKeyValues(
-    DataInputStream source, int allocateHeaderLength, int skipLastBytes, boolean includesMemstoreTS
-  )
+  ByteBuffer decodeKeyValues(DataInputStream source, HFileBlockDecodingContext decodingCtx)
       throws IOException;
 
   /**
@@ -102,42 +80,36 @@ public interface DataBlockEncoder {
   /**
    * Create a HFileBlock seeker which find KeyValues within a block.
    * @param comparator what kind of comparison should be used
-   * @param includesMemstoreTS true if including memstore timestamp after every
-   *          key-value pair
+   * @param decodingCtx
    * @return A newly created seeker.
    */
-  EncodedSeeker createSeeker(
-    KVComparator comparator, boolean includesMemstoreTS
-  );
+  EncodedSeeker createSeeker(KVComparator comparator, 
+      HFileBlockDecodingContext decodingCtx);
 
   /**
    * Creates a encoder specific encoding context
    *
-   * @param compressionAlgorithm
-   *          compression algorithm used if the final data needs to be
-   *          compressed
    * @param encoding
    *          encoding strategy used
    * @param headerBytes
    *          header bytes to be written, put a dummy header here if the header
    *          is unknown
+   * @param meta
+   *          HFile meta data
    * @return a newly created encoding context
    */
   HFileBlockEncodingContext newDataBlockEncodingContext(
-    Algorithm compressionAlgorithm, DataBlockEncoding encoding, byte[] headerBytes
-  );
+      DataBlockEncoding encoding, byte[] headerBytes, HFileContext meta);
 
   /**
    * Creates an encoder specific decoding context, which will prepare the data
    * before actual decoding
    *
-   * @param compressionAlgorithm
-   *          compression algorithm used if the data needs to be decompressed
+   * @param meta
+   *          HFile meta data        
    * @return a newly created decoding context
    */
-  HFileBlockDecodingContext newDataBlockDecodingContext(
-    Algorithm compressionAlgorithm
-  );
+  HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta);
 
   /**
    * An interface which enable to seek while underlying data is encoded.

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java Sat Sep 21 18:01:32 2013
@@ -318,7 +318,7 @@ public class DiffKeyDeltaEncoder extends
 
   @Override
   public void internalEncodeKeyValues(DataOutputStream out,
-      ByteBuffer in, boolean includesMemstoreTS) throws IOException {
+      ByteBuffer in, HFileBlockDefaultEncodingContext encodingCtx) throws IOException {
     in.rewind();
     ByteBufferUtils.putInt(out, in.limit());
     DiffCompressionState previousState = new DiffCompressionState();
@@ -326,7 +326,7 @@ public class DiffKeyDeltaEncoder extends
     while (in.hasRemaining()) {
       compressSingleKeyValue(previousState, currentState,
           out, in);
-      afterEncodingKeyValue(in, out, includesMemstoreTS);
+      afterEncodingKeyValue(in, out, encodingCtx);
 
       // swap previousState <-> currentState
       DiffCompressionState tmp = previousState;
@@ -335,26 +335,6 @@ public class DiffKeyDeltaEncoder extends
     }
   }
 
-  @Override
-  public ByteBuffer decodeKeyValues(DataInputStream source,
-      int allocHeaderLength, int skipLastBytes, boolean includesMemstoreTS)
-      throws IOException {
-    int decompressedSize = source.readInt();
-    ByteBuffer buffer = ByteBuffer.allocate(decompressedSize +
-        allocHeaderLength);
-    buffer.position(allocHeaderLength);
-    DiffCompressionState state = new DiffCompressionState();
-    while (source.available() > skipLastBytes) {
-      uncompressSingleKeyValue(source, buffer, state);
-      afterDecodingKeyValue(source, buffer, includesMemstoreTS);
-    }
-
-    if (source.available() != skipLastBytes) {
-      throw new IllegalStateException("Read too much bytes.");
-    }
-
-    return buffer;
-  }
 
   @Override
   public ByteBuffer getFirstKeyInBlock(ByteBuffer block) {
@@ -424,8 +404,8 @@ public class DiffKeyDeltaEncoder extends
 
   @Override
   public EncodedSeeker createSeeker(KVComparator comparator,
-      final boolean includesMemstoreTS) {
-    return new BufferedEncodedSeeker<DiffSeekerState>(comparator) {
+      HFileBlockDecodingContext decodingCtx) {
+    return new BufferedEncodedSeeker<DiffSeekerState>(comparator, decodingCtx) {
       private byte[] familyNameWithSize;
       private static final int TIMESTAMP_WITH_TYPE_LENGTH =
           Bytes.SIZEOF_LONG + Bytes.SIZEOF_BYTE;
@@ -517,7 +497,10 @@ public class DiffKeyDeltaEncoder extends
         current.valueOffset = currentBuffer.position();
         ByteBufferUtils.skip(currentBuffer, current.valueLength);
 
-        if (includesMemstoreTS) {
+        if (includesTags()) {
+          decodeTags();
+        }
+        if (includesMvcc()) {
           current.memstoreTS = ByteBufferUtils.readVLong(currentBuffer);
         } else {
           current.memstoreTS = 0;
@@ -549,4 +532,24 @@ public class DiffKeyDeltaEncoder extends
       }
     };
   }
+
+  @Override
+  public ByteBuffer internalDecodeKeyValues(DataInputStream source, int allocateHeaderLength,
+      int skipLastBytes, HFileBlockDefaultDecodingContext decodingCtx) throws IOException {
+    int decompressedSize = source.readInt();
+    ByteBuffer buffer = ByteBuffer.allocate(decompressedSize +
+        allocateHeaderLength);
+    buffer.position(allocateHeaderLength);
+    DiffCompressionState state = new DiffCompressionState();
+    while (source.available() > skipLastBytes) {
+      uncompressSingleKeyValue(source, buffer, state);
+      afterDecodingKeyValue(source, buffer, decodingCtx);
+    }
+
+    if (source.available() != skipLastBytes) {
+      throw new IllegalStateException("Read too much bytes.");
+    }
+
+    return buffer;
+  }
 }



Mime
View raw message