hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ramkris...@apache.org
Subject svn commit: r1525269 [3/8] - in /hbase/trunk: hbase-client/src/main/java/org/apache/hadoop/hbase/client/ hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ hbase-common/src/main/java/org/apache/hadoop/hbase/ hbase-common/src/main/java/org/apa...
Date Sat, 21 Sep 2013 18:01:35 GMT
Modified: hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/PrefixTreeEncoder.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/PrefixTreeEncoder.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/PrefixTreeEncoder.java (original)
+++ hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/PrefixTreeEncoder.java Sat Sep 21 18:01:32 2013
@@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.KeyValueU
 import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
 import org.apache.hadoop.hbase.codec.prefixtree.encode.column.ColumnSectionWriter;
 import org.apache.hadoop.hbase.codec.prefixtree.encode.other.CellTypeEncoder;
+import org.apache.hadoop.hbase.codec.prefixtree.encode.other.ColumnNodeType;
 import org.apache.hadoop.hbase.codec.prefixtree.encode.other.LongEncoder;
 import org.apache.hadoop.hbase.codec.prefixtree.encode.row.RowSectionWriter;
 import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
@@ -42,7 +43,6 @@ import org.apache.hadoop.hbase.util.byte
 import org.apache.hadoop.hbase.util.byterange.impl.ByteRangeTreeSet;
 import org.apache.hadoop.hbase.util.vint.UFIntTool;
 import org.apache.hadoop.io.WritableUtils;
-
 /**
  * This is the primary class for converting a CellOutputStream into an encoded byte[]. As Cells are
  * added they are completely copied into the various encoding structures. This is important because
@@ -86,6 +86,7 @@ public class PrefixTreeEncoder implement
   protected ByteRange rowRange;
   protected ByteRange familyRange;
   protected ByteRange qualifierRange;
+  protected ByteRange tagsRange;
 
   /*
    * incoming Cell fields are copied into these arrays
@@ -94,7 +95,9 @@ public class PrefixTreeEncoder implement
   protected long[] mvccVersions;
   protected byte[] typeBytes;
   protected int[] valueOffsets;
+  protected int[] tagsOffsets;
   protected byte[] values;
+  protected byte[] tags;
 
   protected PrefixTreeBlockMeta blockMeta;
 
@@ -114,7 +117,7 @@ public class PrefixTreeEncoder implement
    */
   protected ByteRangeSet familyDeduplicator;
   protected ByteRangeSet qualifierDeduplicator;
-
+  protected ByteRangeSet tagsDeduplicator;
   /*
    * Feed sorted byte[]s into these tokenizers which will convert the byte[]s to an in-memory
    * trie structure with nodes connected by memory pointers (not serializable yet).
@@ -122,6 +125,7 @@ public class PrefixTreeEncoder implement
   protected Tokenizer rowTokenizer;
   protected Tokenizer familyTokenizer;
   protected Tokenizer qualifierTokenizer;
+  protected Tokenizer tagsTokenizer;
 
   /*
    * Writers take an in-memory trie, sort the nodes, calculate offsets and lengths, and write
@@ -130,6 +134,7 @@ public class PrefixTreeEncoder implement
   protected RowSectionWriter rowWriter;
   protected ColumnSectionWriter familyWriter;
   protected ColumnSectionWriter qualifierWriter;
+  protected ColumnSectionWriter tagsWriter;
 
   /*
    * Integers used for counting cells and bytes.  We keep track of the size of the Cells as if they
@@ -138,7 +143,9 @@ public class PrefixTreeEncoder implement
   protected int totalCells = 0;
   protected int totalUnencodedBytes = 0;//numBytes if the cells were KeyValues
   protected int totalValueBytes = 0;
+  protected int totalTagBytes = 0;
   protected int maxValueLength = 0;
+  protected int maxTagLength = 0;
   protected int totalBytes = 0;//
 
 
@@ -170,6 +177,7 @@ public class PrefixTreeEncoder implement
     this.rowWriter = new RowSectionWriter();
     this.familyWriter = new ColumnSectionWriter();
     this.qualifierWriter = new ColumnSectionWriter();
+    initializeTagHelpers();
 
     reset(outputStream, includeMvccVersion);
   }
@@ -179,9 +187,11 @@ public class PrefixTreeEncoder implement
     this.includeMvccVersion = includeMvccVersion;
     this.outputStream = outputStream;
     valueOffsets[0] = 0;
-
     familyDeduplicator.reset();
     qualifierDeduplicator.reset();
+    tagsDeduplicator.reset();
+    tagsWriter.reset();
+    tagsTokenizer.reset();
     rowTokenizer.reset();
     timestampEncoder.reset();
     mvccVersionEncoder.reset();
@@ -199,6 +209,14 @@ public class PrefixTreeEncoder implement
     totalBytes = 0;
   }
 
+  protected void initializeTagHelpers() {
+    this.tagsRange = new SimpleByteRange();
+    this.tagsDeduplicator = USE_HASH_COLUMN_SORTER ? new ByteRangeHashSet()
+    : new ByteRangeTreeSet();
+    this.tagsTokenizer = new Tokenizer();
+    this.tagsWriter = new ColumnSectionWriter();
+  }
+
   /**
    * Check that the arrays used to hold cell fragments are large enough for the cell that is being
    * added. Since the PrefixTreeEncoder is cached between uses, these arrays may grow during the
@@ -259,10 +277,16 @@ public class PrefixTreeEncoder implement
     rowTokenizer.addSorted(CellUtil.fillRowRange(cell, rowRange));
     addFamilyPart(cell);
     addQualifierPart(cell);
+    addTagPart(cell);
     addAfterRowFamilyQualifier(cell);
   }
 
 
+  private void addTagPart(Cell cell) {
+    CellUtil.fillTagRange(cell, tagsRange);
+    tagsDeduplicator.add(tagsRange);
+  }
+
   /***************** internal add methods ************************/
 
   private void addAfterRowFamilyQualifier(Cell cell){
@@ -333,6 +357,7 @@ public class PrefixTreeEncoder implement
     rowWriter.writeBytes(outputStream);
     familyWriter.writeBytes(outputStream);
     qualifierWriter.writeBytes(outputStream);
+    tagsWriter.writeBytes(outputStream);
     timestampEncoder.writeBytes(outputStream);
     mvccVersionEncoder.writeBytes(outputStream);
     //CellType bytes are in the row nodes.  there is no additional type section
@@ -349,12 +374,13 @@ public class PrefixTreeEncoder implement
     blockMeta.setValueOffsetWidth(UFIntTool.numBytes(lastValueOffset));
     blockMeta.setValueLengthWidth(UFIntTool.numBytes(maxValueLength));
     blockMeta.setNumValueBytes(totalValueBytes);
-    totalBytes += totalValueBytes;
+    totalBytes += totalTagBytes + totalValueBytes;
 
     //these compile methods will add to totalBytes
     compileTypes();
     compileMvccVersions();
     compileTimestamps();
+    compileTags();
     compileQualifiers();
     compileFamilies();
     compileRows();
@@ -397,7 +423,7 @@ public class PrefixTreeEncoder implement
     blockMeta.setNumUniqueQualifiers(qualifierDeduplicator.size());
     qualifierDeduplicator.compile();
     qualifierTokenizer.addAll(qualifierDeduplicator.getSortedRanges());
-    qualifierWriter.reconstruct(blockMeta, qualifierTokenizer, false);
+    qualifierWriter.reconstruct(blockMeta, qualifierTokenizer, ColumnNodeType.QUALIFIER);
     qualifierWriter.compile();
     int numQualifierBytes = qualifierWriter.getNumBytes();
     blockMeta.setNumQualifierBytes(numQualifierBytes);
@@ -408,13 +434,24 @@ public class PrefixTreeEncoder implement
     blockMeta.setNumUniqueFamilies(familyDeduplicator.size());
     familyDeduplicator.compile();
     familyTokenizer.addAll(familyDeduplicator.getSortedRanges());
-    familyWriter.reconstruct(blockMeta, familyTokenizer, true);
+    familyWriter.reconstruct(blockMeta, familyTokenizer, ColumnNodeType.FAMILY);
     familyWriter.compile();
     int numFamilyBytes = familyWriter.getNumBytes();
     blockMeta.setNumFamilyBytes(numFamilyBytes);
     totalBytes += numFamilyBytes;
   }
 
+  protected void compileTags() {
+    blockMeta.setNumUniqueTags(tagsDeduplicator.size());
+    tagsDeduplicator.compile();
+    tagsTokenizer.addAll(tagsDeduplicator.getSortedRanges());
+    tagsWriter.reconstruct(blockMeta, tagsTokenizer, ColumnNodeType.TAGS);
+    tagsWriter.compile();
+    int numTagBytes = tagsWriter.getNumBytes();
+    blockMeta.setNumTagsBytes(numTagBytes);
+    totalBytes += numTagBytes;
+  }
+
   protected void compileRows() {
     rowWriter.reconstruct(this);
     rowWriter.compile();
@@ -476,6 +513,10 @@ public class PrefixTreeEncoder implement
     return qualifierDeduplicator;
   }
 
+  public ByteRangeSet getTagSorter() {
+    return tagsDeduplicator;
+  }
+
   public ColumnSectionWriter getFamilyWriter() {
     return familyWriter;
   }
@@ -484,6 +525,10 @@ public class PrefixTreeEncoder implement
     return qualifierWriter;
   }
 
+  public ColumnSectionWriter getTagWriter() {
+    return tagsWriter;
+  }
+
   public RowSectionWriter getRowWriter() {
     return rowWriter;
   }

Modified: hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnNodeWriter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnNodeWriter.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnNodeWriter.java (original)
+++ hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnNodeWriter.java Sat Sep 21 18:01:32 2013
@@ -23,6 +23,7 @@ import java.io.OutputStream;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
+import org.apache.hadoop.hbase.codec.prefixtree.encode.other.ColumnNodeType;
 import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
 import org.apache.hadoop.hbase.util.ByteRange;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -48,20 +49,19 @@ public class ColumnNodeWriter{
   protected TokenizerNode builderNode;
   protected PrefixTreeBlockMeta blockMeta;
 
-  protected boolean familyVsQualifier;
-
   protected int tokenLength;
   protected byte[] token;
   protected int parentStartPosition;
+  protected ColumnNodeType nodeType;
 
 
   /*************** construct **************************/
 
   public ColumnNodeWriter(PrefixTreeBlockMeta blockMeta, TokenizerNode builderNode,
-      boolean familyVsQualifier) {
+      ColumnNodeType nodeType) {
     this.blockMeta = blockMeta;
     this.builderNode = builderNode;
-    this.familyVsQualifier = familyVsQualifier;
+    this.nodeType = nodeType;
     calculateTokenLength();
   }
 
@@ -93,10 +93,12 @@ public class ColumnNodeWriter{
 
   public void writeBytes(OutputStream os) throws IOException {
     int parentOffsetWidth;
-    if (familyVsQualifier) {
+    if (this.nodeType == ColumnNodeType.FAMILY) {
       parentOffsetWidth = blockMeta.getFamilyOffsetWidth();
-    } else {
+    } else if (this.nodeType == ColumnNodeType.QUALIFIER) {
       parentOffsetWidth = blockMeta.getQualifierOffsetWidth();
+    } else {
+      parentOffsetWidth = blockMeta.getTagsOffsetWidth();
     }
     UVIntTool.writeBytes(tokenLength, os);
     os.write(token);

Modified: hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnSectionWriter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnSectionWriter.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnSectionWriter.java (original)
+++ hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnSectionWriter.java Sat Sep 21 18:01:32 2013
@@ -25,6 +25,7 @@ import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
+import org.apache.hadoop.hbase.codec.prefixtree.encode.other.ColumnNodeType;
 import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
 import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
 import org.apache.hadoop.hbase.util.CollectionUtils;
@@ -60,7 +61,7 @@ public class ColumnSectionWriter {
 
   private PrefixTreeBlockMeta blockMeta;
 
-  private boolean familyVsQualifier;
+  private ColumnNodeType nodeType;
   private Tokenizer tokenizer;
   private int numBytes = 0;
   private ArrayList<TokenizerNode> nonLeaves;
@@ -79,16 +80,16 @@ public class ColumnSectionWriter {
   }
 
   public ColumnSectionWriter(PrefixTreeBlockMeta blockMeta, Tokenizer builder,
-      boolean familyVsQualifier) {
+      ColumnNodeType nodeType) {
     this();// init collections
-    reconstruct(blockMeta, builder, familyVsQualifier);
+    reconstruct(blockMeta, builder, nodeType);
   }
 
   public void reconstruct(PrefixTreeBlockMeta blockMeta, Tokenizer builder,
-      boolean familyVsQualifier) {
+      ColumnNodeType nodeType) {
     this.blockMeta = blockMeta;
     this.tokenizer = builder;
-    this.familyVsQualifier = familyVsQualifier;
+    this.nodeType = nodeType;
   }
 
   public void reset() {
@@ -102,14 +103,19 @@ public class ColumnSectionWriter {
 	/****************** methods *******************************/
 
   public ColumnSectionWriter compile() {
-    if (familyVsQualifier) {
+    if (this.nodeType == ColumnNodeType.FAMILY) {
       // do nothing. max family length fixed at Byte.MAX_VALUE
-    } else {
+    } else if (this.nodeType == ColumnNodeType.QUALIFIER) {
       blockMeta.setMaxQualifierLength(tokenizer.getMaxElementLength());
+    } else {
+      blockMeta.setMaxTagsLength(tokenizer.getMaxElementLength());
     }
+    compilerInternals();
+    return this;
+  }
 
+  protected void compilerInternals() {
     tokenizer.setNodeFirstInsertionIndexes();
-
     tokenizer.appendNodes(nonLeaves, true, false);
 
     tokenizer.appendNodes(leaves, false, true);
@@ -121,7 +127,7 @@ public class ColumnSectionWriter {
     columnNodeWriters = Lists.newArrayListWithCapacity(CollectionUtils.nullSafeSize(allNodes));
     for (int i = 0; i < allNodes.size(); ++i) {
       TokenizerNode node = allNodes.get(i);
-      columnNodeWriters.add(new ColumnNodeWriter(blockMeta, node, familyVsQualifier));
+      columnNodeWriters.add(new ColumnNodeWriter(blockMeta, node, this.nodeType));
     }
 
     // leaf widths are known at this point, so add them up
@@ -142,10 +148,12 @@ public class ColumnSectionWriter {
         break;
       }// it fits
     }
-    if (familyVsQualifier) {
+    if (this.nodeType == ColumnNodeType.FAMILY) {
       blockMeta.setFamilyOffsetWidth(parentOffsetWidth);
-    } else {
+    } else if (this.nodeType == ColumnNodeType.QUALIFIER) {
       blockMeta.setQualifierOffsetWidth(parentOffsetWidth);
+    } else {
+      blockMeta.setTagsOffsetWidth(parentOffsetWidth);
     }
 
     int forwardIndex = 0;
@@ -165,8 +173,6 @@ public class ColumnSectionWriter {
     }
 
     tokenizer.appendOutputArrayOffsets(outputArrayOffsets);
-
-    return this;
   }
 
   public void writeBytes(OutputStream os) throws IOException {

Added: hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/ColumnNodeType.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/ColumnNodeType.java?rev=1525269&view=auto
==============================================================================
--- hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/ColumnNodeType.java (added)
+++ hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/ColumnNodeType.java Sat Sep 21 18:01:32 2013
@@ -0,0 +1,28 @@
+package org.apache.hadoop.hbase.codec.prefixtree.encode.other;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Specifies the type of columnnode writer.
+ */
+@InterfaceAudience.Private
+public enum ColumnNodeType {
+  FAMILY, QUALIFIER, TAGS;
+}
\ No newline at end of file

Modified: hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowNodeWriter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowNodeWriter.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowNodeWriter.java (original)
+++ hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowNodeWriter.java Sat Sep 21 18:01:32 2013
@@ -105,6 +105,7 @@ public class RowNodeWriter{
     if(tokenizerNode.hasOccurrences()){
       int fixedBytesPerCell = blockMeta.getFamilyOffsetWidth()
         + blockMeta.getQualifierOffsetWidth()
+        + blockMeta.getTagsOffsetWidth()
         + blockMeta.getTimestampIndexWidth()
         + blockMeta.getMvccVersionIndexWidth()
         + blockMeta.getKeyValueTypeWidth()
@@ -132,12 +133,12 @@ public class RowNodeWriter{
     //UFInt indexes and offsets for each cell in the row (if nub or leaf)
     writeFamilyNodeOffsets(os);
     writeQualifierNodeOffsets(os);
+    writeTagNodeOffsets(os);
     writeTimestampIndexes(os);
     writeMvccVersionIndexes(os);
     writeCellTypes(os);
     writeValueOffsets(os);
     writeValueLengths(os);
-
     //offsets to the children of this row trie node (if branch or nub)
     writeNextRowTrieNodeOffsets(os);
   }
@@ -220,6 +221,20 @@ public class RowNodeWriter{
     }
   }
 
+  protected void writeTagNodeOffsets(OutputStream os) throws IOException {
+    if (blockMeta.getTagsOffsetWidth() <= 0) {
+      return;
+    }
+    for (int i = 0; i < numCells; ++i) {
+      int cellInsertionIndex = tokenizerNode.getFirstInsertionIndex() + i;
+      int sortedIndex = prefixTreeEncoder.getTagSorter().getSortedIndexForInsertionId(
+        cellInsertionIndex);
+      int indexedTagOffset = prefixTreeEncoder.getTagWriter().getOutputArrayOffset(
+        sortedIndex);
+      UFIntTool.writeBytes(blockMeta.getTagsOffsetWidth(), indexedTagOffset, os);
+    }
+  }
+
   protected void writeTimestampIndexes(OutputStream os) throws IOException {
     if (blockMeta.getTimestampIndexWidth() <= 0) {
       return;
@@ -270,7 +285,6 @@ public class RowNodeWriter{
     }
   }
 
-
   /**
    * If a branch or a nub, the last thing we append are the UFInt offsets to the child row nodes.
    */

Modified: hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/keyvalue/TestKeyValueTool.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/keyvalue/TestKeyValueTool.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/keyvalue/TestKeyValueTool.java (original)
+++ hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/keyvalue/TestKeyValueTool.java Sat Sep 21 18:01:32 2013
@@ -25,6 +25,8 @@ import java.util.List;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValueTestUtil;
 import org.apache.hadoop.hbase.codec.prefixtree.row.TestRowData;
+import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataRandomKeyValuesWithTags;
+import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataTrivialWithTags;
 import org.junit.Assert;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -47,9 +49,12 @@ public class TestKeyValueTool {
 
   @Test
   public void testRoundTripToBytes() {
+    if(rows instanceof TestRowDataTrivialWithTags || rows instanceof TestRowDataRandomKeyValuesWithTags) {
+      return;
+    }
     List<KeyValue> kvs = rows.getInputs();
     ByteBuffer bb = KeyValueTestUtil.toByteBufferAndRewind(kvs, false);
-    List<KeyValue> roundTrippedKvs = KeyValueTestUtil.rewindThenToList(bb, false);
+    List<KeyValue> roundTrippedKvs = KeyValueTestUtil.rewindThenToList(bb, false, false);
     Assert.assertArrayEquals(kvs.toArray(), roundTrippedKvs.toArray());
   }
 }

Modified: hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnBuilder.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnBuilder.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnBuilder.java (original)
+++ hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnBuilder.java Sat Sep 21 18:01:32 2013
@@ -26,6 +26,7 @@ import java.util.List;
 import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
 import org.apache.hadoop.hbase.codec.prefixtree.decode.column.ColumnReader;
 import org.apache.hadoop.hbase.codec.prefixtree.encode.column.ColumnSectionWriter;
+import org.apache.hadoop.hbase.codec.prefixtree.encode.other.ColumnNodeType;
 import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
 import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
 import org.apache.hadoop.hbase.util.ByteRange;
@@ -92,12 +93,12 @@ public class TestColumnBuilder {
     }
     Assert.assertEquals(sortedUniqueColumns.size(), builderOutputArrays.size());
 
-    writer = new ColumnSectionWriter(blockMeta, builder, false);
+    writer = new ColumnSectionWriter(blockMeta, builder, ColumnNodeType.QUALIFIER);
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     writer.compile().writeBytes(baos);
     bytes = baos.toByteArray();
     buffer = new byte[blockMeta.getMaxQualifierLength()];
-    reader = new ColumnReader(buffer, false);
+    reader = new ColumnReader(buffer, ColumnNodeType.QUALIFIER);
     reader.initOnBlock(blockMeta, bytes);
 
     List<TokenizerNode> builderNodes = Lists.newArrayList();

Modified: hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java (original)
+++ hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java Sat Sep 21 18:01:32 2013
@@ -32,10 +32,12 @@ import org.apache.hadoop.hbase.codec.pre
 import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataNumberStrings;
 import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataQualifierByteOrdering;
 import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataRandomKeyValues;
+import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataRandomKeyValuesWithTags;
 import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataSearcherRowMiss;
 import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataSimple;
 import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataSingleQualifier;
 import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataTrivial;
+import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataTrivialWithTags;
 import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataUrls;
 import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataUrlsExample;
 import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
@@ -65,6 +67,7 @@ public interface TestRowData {
       //simple
       all.add(new TestRowDataEmpty());
       all.add(new TestRowDataTrivial());
+      all.add(new TestRowDataTrivialWithTags());
       all.add(new TestRowDataSimple());
       all.add(new TestRowDataDeeper());
 
@@ -83,6 +86,7 @@ public interface TestRowData {
       all.add(new TestRowDataUrlsExample());
       all.add(new TestRowDataExerciseFInts());
       all.add(new TestRowDataRandomKeyValues());
+      all.add(new TestRowDataRandomKeyValuesWithTags());
       return all;
     }
 

Modified: hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java (original)
+++ hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java Sat Sep 21 18:01:32 2013
@@ -75,6 +75,7 @@ public class TestRowEncoder {
 
 	@Before
   public void compile() throws IOException {
+    // Always run with tags. But should also ensure that KVs without tags work fine
     os = new ByteArrayOutputStream(1 << 20);
     encoder = new PrefixTreeEncoder(os, includeMemstoreTS);
 
@@ -92,7 +93,8 @@ public class TestRowEncoder {
     blockMetaReader = new PrefixTreeBlockMeta(buffer);
 
     searcher = new PrefixTreeArraySearcher(blockMetaReader, blockMetaReader.getRowTreeDepth(),
-        blockMetaReader.getMaxRowLength(), blockMetaReader.getMaxQualifierLength());
+        blockMetaReader.getMaxRowLength(), blockMetaReader.getMaxQualifierLength(),
+        blockMetaReader.getMaxTagsLength());
     searcher.initOnBlock(blockMetaReader, outputBytes, includeMemstoreTS);
   }
 

Added: hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValuesWithTags.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValuesWithTags.java?rev=1525269&view=auto
==============================================================================
--- hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValuesWithTags.java (added)
+++ hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValuesWithTags.java Sat Sep 21 18:01:32 2013
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.codec.prefixtree.row.data;
+
+import java.util.List;
+
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
+import org.apache.hadoop.hbase.util.test.RedundantKVGenerator;
+
+import com.google.common.collect.Lists;
+/**
+ * Generated KVs with tags 
+ */
+public class TestRowDataRandomKeyValuesWithTags extends BaseTestRowData {
+  static List<KeyValue> d = Lists.newArrayList();
+  static RedundantKVGenerator generator = new RedundantKVGenerator();
+  static {
+    d = generator.generateTestKeyValues(1 << 10, true);
+  }
+
+  @Override
+  public List<KeyValue> getInputs() {
+    return d;
+  }
+}

Added: hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java?rev=1525269&view=auto
==============================================================================
--- hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java (added)
+++ hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java Sat Sep 21 18:01:32 2013
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.codec.prefixtree.row.data;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
+import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
+import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
+import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Assert;
+
+import com.google.common.collect.Lists;
+
+public class TestRowDataTrivialWithTags extends BaseTestRowData{
+  static byte[] rA = Bytes.toBytes("rA"), rB = Bytes.toBytes("rB"),// turn "r"
+                                                                   // into a
+                                                                   // branch for
+                                                                   // the
+                                                                   // Searcher
+                                                                   // tests
+      cf = Bytes.toBytes("fam"), cq0 = Bytes.toBytes("q0"), v0 = Bytes.toBytes("v0");
+
+  static long ts = 55L;
+
+  static List<KeyValue> d = Lists.newArrayList();
+  static {
+    List<Tag> tagList = new ArrayList<Tag>();
+    Tag t = new Tag((byte) 1, "visisbility");
+    tagList.add(t);
+    t = new Tag((byte) 2, "ACL");
+    tagList.add(t);
+    d.add(new KeyValue(rA, cf, cq0, ts, v0, tagList));
+    d.add(new KeyValue(rB, cf, cq0, ts, v0, tagList));
+  }
+
+  @Override
+  public List<KeyValue> getInputs() {
+    return d;
+  }
+
+  @Override
+  public void individualBlockMetaAssertions(PrefixTreeBlockMeta blockMeta) {
+    // node[0] -> root[r]
+    // node[1] -> leaf[A], etc
+    Assert.assertEquals(2, blockMeta.getRowTreeDepth());
+  }
+
+  @Override
+  public void individualSearcherAssertions(CellSearcher searcher) {
+    /**
+     * The searcher should get a token mismatch on the "r" branch. Assert that
+     * it skips not only rA, but rB as well.
+     */
+    KeyValue afterLast = KeyValue.createFirstOnRow(Bytes.toBytes("zzz"));
+    CellScannerPosition position = searcher.positionAtOrAfter(afterLast);
+    Assert.assertEquals(CellScannerPosition.AFTER_LAST, position);
+    Assert.assertNull(searcher.current());
+  }
+}

Modified: hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/CellProtos.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/CellProtos.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/CellProtos.java (original)
+++ hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/CellProtos.java Sat Sep 21 18:01:32 2013
@@ -201,6 +201,16 @@ public final class CellProtos {
      * <code>optional bytes value = 6;</code>
      */
     com.google.protobuf.ByteString getValue();
+
+    // optional bytes tags = 7;
+    /**
+     * <code>optional bytes tags = 7;</code>
+     */
+    boolean hasTags();
+    /**
+     * <code>optional bytes tags = 7;</code>
+     */
+    com.google.protobuf.ByteString getTags();
   }
   /**
    * Protobuf type {@code Cell}
@@ -294,6 +304,11 @@ public final class CellProtos {
               value_ = input.readBytes();
               break;
             }
+            case 58: {
+              bitField0_ |= 0x00000040;
+              tags_ = input.readBytes();
+              break;
+            }
           }
         }
       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
@@ -430,6 +445,22 @@ public final class CellProtos {
       return value_;
     }
 
+    // optional bytes tags = 7;
+    public static final int TAGS_FIELD_NUMBER = 7;
+    private com.google.protobuf.ByteString tags_;
+    /**
+     * <code>optional bytes tags = 7;</code>
+     */
+    public boolean hasTags() {
+      return ((bitField0_ & 0x00000040) == 0x00000040);
+    }
+    /**
+     * <code>optional bytes tags = 7;</code>
+     */
+    public com.google.protobuf.ByteString getTags() {
+      return tags_;
+    }
+
     private void initFields() {
       row_ = com.google.protobuf.ByteString.EMPTY;
       family_ = com.google.protobuf.ByteString.EMPTY;
@@ -437,6 +468,7 @@ public final class CellProtos {
       timestamp_ = 0L;
       cellType_ = org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType.MINIMUM;
       value_ = com.google.protobuf.ByteString.EMPTY;
+      tags_ = com.google.protobuf.ByteString.EMPTY;
     }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
@@ -468,6 +500,9 @@ public final class CellProtos {
       if (((bitField0_ & 0x00000020) == 0x00000020)) {
         output.writeBytes(6, value_);
       }
+      if (((bitField0_ & 0x00000040) == 0x00000040)) {
+        output.writeBytes(7, tags_);
+      }
       getUnknownFields().writeTo(output);
     }
 
@@ -501,6 +536,10 @@ public final class CellProtos {
         size += com.google.protobuf.CodedOutputStream
           .computeBytesSize(6, value_);
       }
+      if (((bitField0_ & 0x00000040) == 0x00000040)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(7, tags_);
+      }
       size += getUnknownFields().getSerializedSize();
       memoizedSerializedSize = size;
       return size;
@@ -554,6 +593,11 @@ public final class CellProtos {
         result = result && getValue()
             .equals(other.getValue());
       }
+      result = result && (hasTags() == other.hasTags());
+      if (hasTags()) {
+        result = result && getTags()
+            .equals(other.getTags());
+      }
       result = result &&
           getUnknownFields().equals(other.getUnknownFields());
       return result;
@@ -591,6 +635,10 @@ public final class CellProtos {
         hash = (37 * hash) + VALUE_FIELD_NUMBER;
         hash = (53 * hash) + getValue().hashCode();
       }
+      if (hasTags()) {
+        hash = (37 * hash) + TAGS_FIELD_NUMBER;
+        hash = (53 * hash) + getTags().hashCode();
+      }
       hash = (29 * hash) + getUnknownFields().hashCode();
       memoizedHashCode = hash;
       return hash;
@@ -717,6 +765,8 @@ public final class CellProtos {
         bitField0_ = (bitField0_ & ~0x00000010);
         value_ = com.google.protobuf.ByteString.EMPTY;
         bitField0_ = (bitField0_ & ~0x00000020);
+        tags_ = com.google.protobuf.ByteString.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000040);
         return this;
       }
 
@@ -769,6 +819,10 @@ public final class CellProtos {
           to_bitField0_ |= 0x00000020;
         }
         result.value_ = value_;
+        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
+          to_bitField0_ |= 0x00000040;
+        }
+        result.tags_ = tags_;
         result.bitField0_ = to_bitField0_;
         onBuilt();
         return result;
@@ -803,6 +857,9 @@ public final class CellProtos {
         if (other.hasValue()) {
           setValue(other.getValue());
         }
+        if (other.hasTags()) {
+          setTags(other.getTags());
+        }
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
@@ -1043,6 +1100,42 @@ public final class CellProtos {
         return this;
       }
 
+      // optional bytes tags = 7;
+      private com.google.protobuf.ByteString tags_ = com.google.protobuf.ByteString.EMPTY;
+      /**
+       * <code>optional bytes tags = 7;</code>
+       */
+      public boolean hasTags() {
+        return ((bitField0_ & 0x00000040) == 0x00000040);
+      }
+      /**
+       * <code>optional bytes tags = 7;</code>
+       */
+      public com.google.protobuf.ByteString getTags() {
+        return tags_;
+      }
+      /**
+       * <code>optional bytes tags = 7;</code>
+       */
+      public Builder setTags(com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000040;
+        tags_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional bytes tags = 7;</code>
+       */
+      public Builder clearTags() {
+        bitField0_ = (bitField0_ & ~0x00000040);
+        tags_ = getDefaultInstance().getTags();
+        onChanged();
+        return this;
+      }
+
       // @@protoc_insertion_point(builder_scope:Cell)
     }
 
@@ -1116,6 +1209,16 @@ public final class CellProtos {
      * <code>optional bytes value = 6;</code>
      */
     com.google.protobuf.ByteString getValue();
+
+    // optional bytes tags = 7;
+    /**
+     * <code>optional bytes tags = 7;</code>
+     */
+    boolean hasTags();
+    /**
+     * <code>optional bytes tags = 7;</code>
+     */
+    com.google.protobuf.ByteString getTags();
   }
   /**
    * Protobuf type {@code KeyValue}
@@ -1210,6 +1313,11 @@ public final class CellProtos {
               value_ = input.readBytes();
               break;
             }
+            case 58: {
+              bitField0_ |= 0x00000040;
+              tags_ = input.readBytes();
+              break;
+            }
           }
         }
       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
@@ -1346,6 +1454,22 @@ public final class CellProtos {
       return value_;
     }
 
+    // optional bytes tags = 7;
+    public static final int TAGS_FIELD_NUMBER = 7;
+    private com.google.protobuf.ByteString tags_;
+    /**
+     * <code>optional bytes tags = 7;</code>
+     */
+    public boolean hasTags() {
+      return ((bitField0_ & 0x00000040) == 0x00000040);
+    }
+    /**
+     * <code>optional bytes tags = 7;</code>
+     */
+    public com.google.protobuf.ByteString getTags() {
+      return tags_;
+    }
+
     private void initFields() {
       row_ = com.google.protobuf.ByteString.EMPTY;
       family_ = com.google.protobuf.ByteString.EMPTY;
@@ -1353,6 +1477,7 @@ public final class CellProtos {
       timestamp_ = 0L;
       keyType_ = org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType.MINIMUM;
       value_ = com.google.protobuf.ByteString.EMPTY;
+      tags_ = com.google.protobuf.ByteString.EMPTY;
     }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
@@ -1396,6 +1521,9 @@ public final class CellProtos {
       if (((bitField0_ & 0x00000020) == 0x00000020)) {
         output.writeBytes(6, value_);
       }
+      if (((bitField0_ & 0x00000040) == 0x00000040)) {
+        output.writeBytes(7, tags_);
+      }
       getUnknownFields().writeTo(output);
     }
 
@@ -1429,6 +1557,10 @@ public final class CellProtos {
         size += com.google.protobuf.CodedOutputStream
           .computeBytesSize(6, value_);
       }
+      if (((bitField0_ & 0x00000040) == 0x00000040)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(7, tags_);
+      }
       size += getUnknownFields().getSerializedSize();
       memoizedSerializedSize = size;
       return size;
@@ -1482,6 +1614,11 @@ public final class CellProtos {
         result = result && getValue()
             .equals(other.getValue());
       }
+      result = result && (hasTags() == other.hasTags());
+      if (hasTags()) {
+        result = result && getTags()
+            .equals(other.getTags());
+      }
       result = result &&
           getUnknownFields().equals(other.getUnknownFields());
       return result;
@@ -1519,6 +1656,10 @@ public final class CellProtos {
         hash = (37 * hash) + VALUE_FIELD_NUMBER;
         hash = (53 * hash) + getValue().hashCode();
       }
+      if (hasTags()) {
+        hash = (37 * hash) + TAGS_FIELD_NUMBER;
+        hash = (53 * hash) + getTags().hashCode();
+      }
       hash = (29 * hash) + getUnknownFields().hashCode();
       memoizedHashCode = hash;
       return hash;
@@ -1646,6 +1787,8 @@ public final class CellProtos {
         bitField0_ = (bitField0_ & ~0x00000010);
         value_ = com.google.protobuf.ByteString.EMPTY;
         bitField0_ = (bitField0_ & ~0x00000020);
+        tags_ = com.google.protobuf.ByteString.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00000040);
         return this;
       }
 
@@ -1698,6 +1841,10 @@ public final class CellProtos {
           to_bitField0_ |= 0x00000020;
         }
         result.value_ = value_;
+        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
+          to_bitField0_ |= 0x00000040;
+        }
+        result.tags_ = tags_;
         result.bitField0_ = to_bitField0_;
         onBuilt();
         return result;
@@ -1732,6 +1879,9 @@ public final class CellProtos {
         if (other.hasValue()) {
           setValue(other.getValue());
         }
+        if (other.hasTags()) {
+          setTags(other.getTags());
+        }
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
@@ -1984,6 +2134,42 @@ public final class CellProtos {
         return this;
       }
 
+      // optional bytes tags = 7;
+      private com.google.protobuf.ByteString tags_ = com.google.protobuf.ByteString.EMPTY;
+      /**
+       * <code>optional bytes tags = 7;</code>
+       */
+      public boolean hasTags() {
+        return ((bitField0_ & 0x00000040) == 0x00000040);
+      }
+      /**
+       * <code>optional bytes tags = 7;</code>
+       */
+      public com.google.protobuf.ByteString getTags() {
+        return tags_;
+      }
+      /**
+       * <code>optional bytes tags = 7;</code>
+       */
+      public Builder setTags(com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000040;
+        tags_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional bytes tags = 7;</code>
+       */
+      public Builder clearTags() {
+        bitField0_ = (bitField0_ & ~0x00000040);
+        tags_ = getDefaultInstance().getTags();
+        onChanged();
+        return this;
+      }
+
       // @@protoc_insertion_point(builder_scope:KeyValue)
     }
 
@@ -2014,17 +2200,18 @@ public final class CellProtos {
       descriptor;
   static {
     java.lang.String[] descriptorData = {
-      "\n\nCell.proto\"v\n\004Cell\022\013\n\003row\030\001 \001(\014\022\016\n\006fam" +
-      "ily\030\002 \001(\014\022\021\n\tqualifier\030\003 \001(\014\022\021\n\ttimestam" +
-      "p\030\004 \001(\004\022\034\n\tcell_type\030\005 \001(\0162\t.CellType\022\r\n" +
-      "\005value\030\006 \001(\014\"y\n\010KeyValue\022\013\n\003row\030\001 \002(\014\022\016\n" +
-      "\006family\030\002 \002(\014\022\021\n\tqualifier\030\003 \002(\014\022\021\n\ttime" +
-      "stamp\030\004 \001(\004\022\033\n\010key_type\030\005 \001(\0162\t.CellType" +
-      "\022\r\n\005value\030\006 \001(\014*`\n\010CellType\022\013\n\007MINIMUM\020\000" +
-      "\022\007\n\003PUT\020\004\022\n\n\006DELETE\020\010\022\021\n\rDELETE_COLUMN\020\014" +
-      "\022\021\n\rDELETE_FAMILY\020\016\022\014\n\007MAXIMUM\020\377\001B=\n*org" +
-      ".apache.hadoop.hbase.protobuf.generatedB",
-      "\nCellProtosH\001\240\001\001"
+      "\n\nCell.proto\"\204\001\n\004Cell\022\013\n\003row\030\001 \001(\014\022\016\n\006fa" +
+      "mily\030\002 \001(\014\022\021\n\tqualifier\030\003 \001(\014\022\021\n\ttimesta" +
+      "mp\030\004 \001(\004\022\034\n\tcell_type\030\005 \001(\0162\t.CellType\022\r" +
+      "\n\005value\030\006 \001(\014\022\014\n\004tags\030\007 \001(\014\"\207\001\n\010KeyValue" +
+      "\022\013\n\003row\030\001 \002(\014\022\016\n\006family\030\002 \002(\014\022\021\n\tqualifi" +
+      "er\030\003 \002(\014\022\021\n\ttimestamp\030\004 \001(\004\022\033\n\010key_type\030" +
+      "\005 \001(\0162\t.CellType\022\r\n\005value\030\006 \001(\014\022\014\n\004tags\030" +
+      "\007 \001(\014*`\n\010CellType\022\013\n\007MINIMUM\020\000\022\007\n\003PUT\020\004\022" +
+      "\n\n\006DELETE\020\010\022\021\n\rDELETE_COLUMN\020\014\022\021\n\rDELETE" +
+      "_FAMILY\020\016\022\014\n\007MAXIMUM\020\377\001B=\n*org.apache.ha",
+      "doop.hbase.protobuf.generatedB\nCellProto" +
+      "sH\001\240\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -2036,13 +2223,13 @@ public final class CellProtos {
           internal_static_Cell_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_Cell_descriptor,
-              new java.lang.String[] { "Row", "Family", "Qualifier", "Timestamp", "CellType", "Value", });
+              new java.lang.String[] { "Row", "Family", "Qualifier", "Timestamp", "CellType", "Value", "Tags", });
           internal_static_KeyValue_descriptor =
             getDescriptor().getMessageTypes().get(1);
           internal_static_KeyValue_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_KeyValue_descriptor,
-              new java.lang.String[] { "Row", "Family", "Qualifier", "Timestamp", "KeyType", "Value", });
+              new java.lang.String[] { "Row", "Family", "Qualifier", "Timestamp", "KeyType", "Value", "Tags", });
           return null;
         }
       };

Modified: hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java (original)
+++ hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java Sat Sep 21 18:01:32 2013
@@ -9288,6 +9288,16 @@ public final class ClientProtos {
          * <code>optional .MutationProto.DeleteType delete_type = 4;</code>
          */
         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType();
+
+        // optional bytes tags = 5;
+        /**
+         * <code>optional bytes tags = 5;</code>
+         */
+        boolean hasTags();
+        /**
+         * <code>optional bytes tags = 5;</code>
+         */
+        com.google.protobuf.ByteString getTags();
       }
       /**
        * Protobuf type {@code MutationProto.ColumnValue.QualifierValue}
@@ -9366,6 +9376,11 @@ public final class ClientProtos {
                   }
                   break;
                 }
+                case 42: {
+                  bitField0_ |= 0x00000010;
+                  tags_ = input.readBytes();
+                  break;
+                }
               }
             }
           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
@@ -9470,11 +9485,28 @@ public final class ClientProtos {
           return deleteType_;
         }
 
+        // optional bytes tags = 5;
+        public static final int TAGS_FIELD_NUMBER = 5;
+        private com.google.protobuf.ByteString tags_;
+        /**
+         * <code>optional bytes tags = 5;</code>
+         */
+        public boolean hasTags() {
+          return ((bitField0_ & 0x00000010) == 0x00000010);
+        }
+        /**
+         * <code>optional bytes tags = 5;</code>
+         */
+        public com.google.protobuf.ByteString getTags() {
+          return tags_;
+        }
+
         private void initFields() {
           qualifier_ = com.google.protobuf.ByteString.EMPTY;
           value_ = com.google.protobuf.ByteString.EMPTY;
           timestamp_ = 0L;
           deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
+          tags_ = com.google.protobuf.ByteString.EMPTY;
         }
         private byte memoizedIsInitialized = -1;
         public final boolean isInitialized() {
@@ -9500,6 +9532,9 @@ public final class ClientProtos {
           if (((bitField0_ & 0x00000008) == 0x00000008)) {
             output.writeEnum(4, deleteType_.getNumber());
           }
+          if (((bitField0_ & 0x00000010) == 0x00000010)) {
+            output.writeBytes(5, tags_);
+          }
           getUnknownFields().writeTo(output);
         }
 
@@ -9525,6 +9560,10 @@ public final class ClientProtos {
             size += com.google.protobuf.CodedOutputStream
               .computeEnumSize(4, deleteType_.getNumber());
           }
+          if (((bitField0_ & 0x00000010) == 0x00000010)) {
+            size += com.google.protobuf.CodedOutputStream
+              .computeBytesSize(5, tags_);
+          }
           size += getUnknownFields().getSerializedSize();
           memoizedSerializedSize = size;
           return size;
@@ -9568,6 +9607,11 @@ public final class ClientProtos {
             result = result &&
                 (getDeleteType() == other.getDeleteType());
           }
+          result = result && (hasTags() == other.hasTags());
+          if (hasTags()) {
+            result = result && getTags()
+                .equals(other.getTags());
+          }
           result = result &&
               getUnknownFields().equals(other.getUnknownFields());
           return result;
@@ -9597,6 +9641,10 @@ public final class ClientProtos {
             hash = (37 * hash) + DELETE_TYPE_FIELD_NUMBER;
             hash = (53 * hash) + hashEnum(getDeleteType());
           }
+          if (hasTags()) {
+            hash = (37 * hash) + TAGS_FIELD_NUMBER;
+            hash = (53 * hash) + getTags().hashCode();
+          }
           hash = (29 * hash) + getUnknownFields().hashCode();
           memoizedHashCode = hash;
           return hash;
@@ -9714,6 +9762,8 @@ public final class ClientProtos {
             bitField0_ = (bitField0_ & ~0x00000004);
             deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
             bitField0_ = (bitField0_ & ~0x00000008);
+            tags_ = com.google.protobuf.ByteString.EMPTY;
+            bitField0_ = (bitField0_ & ~0x00000010);
             return this;
           }
 
@@ -9758,6 +9808,10 @@ public final class ClientProtos {
               to_bitField0_ |= 0x00000008;
             }
             result.deleteType_ = deleteType_;
+            if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+              to_bitField0_ |= 0x00000010;
+            }
+            result.tags_ = tags_;
             result.bitField0_ = to_bitField0_;
             onBuilt();
             return result;
@@ -9786,6 +9840,9 @@ public final class ClientProtos {
             if (other.hasDeleteType()) {
               setDeleteType(other.getDeleteType());
             }
+            if (other.hasTags()) {
+              setTags(other.getTags());
+            }
             this.mergeUnknownFields(other.getUnknownFields());
             return this;
           }
@@ -9954,6 +10011,42 @@ public final class ClientProtos {
             return this;
           }
 
+          // optional bytes tags = 5;
+          private com.google.protobuf.ByteString tags_ = com.google.protobuf.ByteString.EMPTY;
+          /**
+           * <code>optional bytes tags = 5;</code>
+           */
+          public boolean hasTags() {
+            return ((bitField0_ & 0x00000010) == 0x00000010);
+          }
+          /**
+           * <code>optional bytes tags = 5;</code>
+           */
+          public com.google.protobuf.ByteString getTags() {
+            return tags_;
+          }
+          /**
+           * <code>optional bytes tags = 5;</code>
+           */
+          public Builder setTags(com.google.protobuf.ByteString value) {
+            if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000010;
+            tags_ = value;
+            onChanged();
+            return this;
+          }
+          /**
+           * <code>optional bytes tags = 5;</code>
+           */
+          public Builder clearTags() {
+            bitField0_ = (bitField0_ & ~0x00000010);
+            tags_ = getDefaultInstance().getTags();
+            onChanged();
+            return this;
+          }
+
           // @@protoc_insertion_point(builder_scope:MutationProto.ColumnValue.QualifierValue)
         }
 
@@ -27723,7 +27816,7 @@ public final class ClientProtos {
       "exists\030\002 \003(\010\"\200\001\n\tCondition\022\013\n\003row\030\001 \002(\014\022",
       "\016\n\006family\030\002 \002(\014\022\021\n\tqualifier\030\003 \002(\014\022\"\n\014co" +
       "mpare_type\030\004 \002(\0162\014.CompareType\022\037\n\ncompar" +
-      "ator\030\005 \002(\0132\013.Comparator\"\227\006\n\rMutationProt" +
+      "ator\030\005 \002(\0132\013.Comparator\"\246\006\n\rMutationProt" +
       "o\022\013\n\003row\030\001 \001(\014\0220\n\013mutate_type\030\002 \001(\0162\033.Mu" +
       "tationProto.MutationType\0220\n\014column_value" +
       "\030\003 \003(\0132\032.MutationProto.ColumnValue\022\021\n\tti" +
@@ -27731,70 +27824,71 @@ public final class ClientProtos {
       "ytesPair\022:\n\ndurability\030\006 \001(\0162\031.MutationP" +
       "roto.Durability:\013USE_DEFAULT\022\036\n\ntime_ran" +
       "ge\030\007 \001(\0132\n.TimeRange\022\035\n\025associated_cell_",
-      "count\030\010 \001(\005\032\330\001\n\013ColumnValue\022\016\n\006family\030\001 " +
+      "count\030\010 \001(\005\032\347\001\n\013ColumnValue\022\016\n\006family\030\001 " +
       "\002(\014\022B\n\017qualifier_value\030\002 \003(\0132).MutationP" +
-      "roto.ColumnValue.QualifierValue\032u\n\016Quali" +
-      "fierValue\022\021\n\tqualifier\030\001 \001(\014\022\r\n\005value\030\002 " +
-      "\001(\014\022\021\n\ttimestamp\030\003 \001(\004\022.\n\013delete_type\030\004 " +
-      "\001(\0162\031.MutationProto.DeleteType\"W\n\nDurabi" +
-      "lity\022\017\n\013USE_DEFAULT\020\000\022\014\n\010SKIP_WAL\020\001\022\r\n\tA" +
-      "SYNC_WAL\020\002\022\014\n\010SYNC_WAL\020\003\022\r\n\tFSYNC_WAL\020\004\"" +
-      ">\n\014MutationType\022\n\n\006APPEND\020\000\022\r\n\tINCREMENT" +
-      "\020\001\022\007\n\003PUT\020\002\022\n\n\006DELETE\020\003\"p\n\nDeleteType\022\026\n",
-      "\022DELETE_ONE_VERSION\020\000\022\034\n\030DELETE_MULTIPLE" +
-      "_VERSIONS\020\001\022\021\n\rDELETE_FAMILY\020\002\022\031\n\025DELETE" +
-      "_FAMILY_VERSION\020\003\"r\n\rMutateRequest\022 \n\006re" +
-      "gion\030\001 \002(\0132\020.RegionSpecifier\022 \n\010mutation" +
-      "\030\002 \002(\0132\016.MutationProto\022\035\n\tcondition\030\003 \001(" +
-      "\0132\n.Condition\"<\n\016MutateResponse\022\027\n\006resul" +
-      "t\030\001 \001(\0132\007.Result\022\021\n\tprocessed\030\002 \001(\010\"\344\002\n\004" +
-      "Scan\022\027\n\006column\030\001 \003(\0132\007.Column\022!\n\tattribu" +
-      "te\030\002 \003(\0132\016.NameBytesPair\022\021\n\tstart_row\030\003 " +
-      "\001(\014\022\020\n\010stop_row\030\004 \001(\014\022\027\n\006filter\030\005 \001(\0132\007.",
-      "Filter\022\036\n\ntime_range\030\006 \001(\0132\n.TimeRange\022\027" +
-      "\n\014max_versions\030\007 \001(\r:\0011\022\032\n\014cache_blocks\030" +
-      "\010 \001(\010:\004true\022\022\n\nbatch_size\030\t \001(\r\022\027\n\017max_r" +
-      "esult_size\030\n \001(\004\022\023\n\013store_limit\030\013 \001(\r\022\024\n" +
-      "\014store_offset\030\014 \001(\r\022&\n\036load_column_famil" +
-      "ies_on_demand\030\r \001(\010\022\r\n\005small\030\016 \001(\010\"\236\001\n\013S" +
-      "canRequest\022 \n\006region\030\001 \001(\0132\020.RegionSpeci" +
-      "fier\022\023\n\004scan\030\002 \001(\0132\005.Scan\022\022\n\nscanner_id\030" +
-      "\003 \001(\004\022\026\n\016number_of_rows\030\004 \001(\r\022\025\n\rclose_s" +
-      "canner\030\005 \001(\010\022\025\n\rnext_call_seq\030\006 \001(\004\"y\n\014S",
-      "canResponse\022\030\n\020cells_per_result\030\001 \003(\r\022\022\n" +
-      "\nscanner_id\030\002 \001(\004\022\024\n\014more_results\030\003 \001(\010\022" +
-      "\013\n\003ttl\030\004 \001(\r\022\030\n\007results\030\005 \003(\0132\007.Result\"\263" +
-      "\001\n\024BulkLoadHFileRequest\022 \n\006region\030\001 \002(\0132" +
-      "\020.RegionSpecifier\0225\n\013family_path\030\002 \003(\0132 " +
-      ".BulkLoadHFileRequest.FamilyPath\022\026\n\016assi" +
-      "gn_seq_num\030\003 \001(\010\032*\n\nFamilyPath\022\016\n\006family" +
-      "\030\001 \002(\014\022\014\n\004path\030\002 \002(\t\"\'\n\025BulkLoadHFileRes" +
-      "ponse\022\016\n\006loaded\030\001 \002(\010\"a\n\026CoprocessorServ" +
-      "iceCall\022\013\n\003row\030\001 \002(\014\022\024\n\014service_name\030\002 \002",
-      "(\t\022\023\n\013method_name\030\003 \002(\t\022\017\n\007request\030\004 \002(\014" +
-      "\"d\n\031CoprocessorServiceRequest\022 \n\006region\030" +
-      "\001 \002(\0132\020.RegionSpecifier\022%\n\004call\030\002 \002(\0132\027." +
-      "CoprocessorServiceCall\"]\n\032CoprocessorSer" +
-      "viceResponse\022 \n\006region\030\001 \002(\0132\020.RegionSpe" +
-      "cifier\022\035\n\005value\030\002 \002(\0132\016.NameBytesPair\"B\n" +
-      "\013MultiAction\022 \n\010mutation\030\001 \001(\0132\016.Mutatio" +
-      "nProto\022\021\n\003get\030\002 \001(\0132\004.Get\"I\n\014ActionResul" +
-      "t\022\026\n\005value\030\001 \001(\0132\007.Result\022!\n\texception\030\002" +
-      " \001(\0132\016.NameBytesPair\"^\n\014MultiRequest\022 \n\006",
-      "region\030\001 \002(\0132\020.RegionSpecifier\022\034\n\006action" +
-      "\030\002 \003(\0132\014.MultiAction\022\016\n\006atomic\030\003 \001(\010\".\n\r" +
-      "MultiResponse\022\035\n\006result\030\001 \003(\0132\r.ActionRe" +
-      "sult2\342\002\n\rClientService\022 \n\003Get\022\013.GetReque" +
-      "st\032\014.GetResponse\022/\n\010MultiGet\022\020.MultiGetR" +
-      "equest\032\021.MultiGetResponse\022)\n\006Mutate\022\016.Mu" +
-      "tateRequest\032\017.MutateResponse\022#\n\004Scan\022\014.S" +
-      "canRequest\032\r.ScanResponse\022>\n\rBulkLoadHFi" +
-      "le\022\025.BulkLoadHFileRequest\032\026.BulkLoadHFil" +
-      "eResponse\022F\n\013ExecService\022\032.CoprocessorSe",
-      "rviceRequest\032\033.CoprocessorServiceRespons" +
-      "e\022&\n\005Multi\022\r.MultiRequest\032\016.MultiRespons" +
-      "eBB\n*org.apache.hadoop.hbase.protobuf.ge" +
-      "neratedB\014ClientProtosH\001\210\001\001\240\001\001"
+      "roto.ColumnValue.QualifierValue\032\203\001\n\016Qual" +
+      "ifierValue\022\021\n\tqualifier\030\001 \001(\014\022\r\n\005value\030\002" +
+      " \001(\014\022\021\n\ttimestamp\030\003 \001(\004\022.\n\013delete_type\030\004" +
+      " \001(\0162\031.MutationProto.DeleteType\022\014\n\004tags\030" +
+      "\005 \001(\014\"W\n\nDurability\022\017\n\013USE_DEFAULT\020\000\022\014\n\010" +
+      "SKIP_WAL\020\001\022\r\n\tASYNC_WAL\020\002\022\014\n\010SYNC_WAL\020\003\022" +
+      "\r\n\tFSYNC_WAL\020\004\">\n\014MutationType\022\n\n\006APPEND" +
+      "\020\000\022\r\n\tINCREMENT\020\001\022\007\n\003PUT\020\002\022\n\n\006DELETE\020\003\"p",
+      "\n\nDeleteType\022\026\n\022DELETE_ONE_VERSION\020\000\022\034\n\030" +
+      "DELETE_MULTIPLE_VERSIONS\020\001\022\021\n\rDELETE_FAM" +
+      "ILY\020\002\022\031\n\025DELETE_FAMILY_VERSION\020\003\"r\n\rMuta" +
+      "teRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecif" +
+      "ier\022 \n\010mutation\030\002 \002(\0132\016.MutationProto\022\035\n" +
+      "\tcondition\030\003 \001(\0132\n.Condition\"<\n\016MutateRe" +
+      "sponse\022\027\n\006result\030\001 \001(\0132\007.Result\022\021\n\tproce" +
+      "ssed\030\002 \001(\010\"\344\002\n\004Scan\022\027\n\006column\030\001 \003(\0132\007.Co" +
+      "lumn\022!\n\tattribute\030\002 \003(\0132\016.NameBytesPair\022" +
+      "\021\n\tstart_row\030\003 \001(\014\022\020\n\010stop_row\030\004 \001(\014\022\027\n\006",
+      "filter\030\005 \001(\0132\007.Filter\022\036\n\ntime_range\030\006 \001(" +
+      "\0132\n.TimeRange\022\027\n\014max_versions\030\007 \001(\r:\0011\022\032" +
+      "\n\014cache_blocks\030\010 \001(\010:\004true\022\022\n\nbatch_size" +
+      "\030\t \001(\r\022\027\n\017max_result_size\030\n \001(\004\022\023\n\013store" +
+      "_limit\030\013 \001(\r\022\024\n\014store_offset\030\014 \001(\r\022&\n\036lo" +
+      "ad_column_families_on_demand\030\r \001(\010\022\r\n\005sm" +
+      "all\030\016 \001(\010\"\236\001\n\013ScanRequest\022 \n\006region\030\001 \001(" +
+      "\0132\020.RegionSpecifier\022\023\n\004scan\030\002 \001(\0132\005.Scan" +
+      "\022\022\n\nscanner_id\030\003 \001(\004\022\026\n\016number_of_rows\030\004" +
+      " \001(\r\022\025\n\rclose_scanner\030\005 \001(\010\022\025\n\rnext_call",
+      "_seq\030\006 \001(\004\"y\n\014ScanResponse\022\030\n\020cells_per_" +
+      "result\030\001 \003(\r\022\022\n\nscanner_id\030\002 \001(\004\022\024\n\014more" +
+      "_results\030\003 \001(\010\022\013\n\003ttl\030\004 \001(\r\022\030\n\007results\030\005" +
+      " \003(\0132\007.Result\"\263\001\n\024BulkLoadHFileRequest\022 " +
+      "\n\006region\030\001 \002(\0132\020.RegionSpecifier\0225\n\013fami" +
+      "ly_path\030\002 \003(\0132 .BulkLoadHFileRequest.Fam" +
+      "ilyPath\022\026\n\016assign_seq_num\030\003 \001(\010\032*\n\nFamil" +
+      "yPath\022\016\n\006family\030\001 \002(\014\022\014\n\004path\030\002 \002(\t\"\'\n\025B" +
+      "ulkLoadHFileResponse\022\016\n\006loaded\030\001 \002(\010\"a\n\026" +
+      "CoprocessorServiceCall\022\013\n\003row\030\001 \002(\014\022\024\n\014s",
+      "ervice_name\030\002 \002(\t\022\023\n\013method_name\030\003 \002(\t\022\017" +
+      "\n\007request\030\004 \002(\014\"d\n\031CoprocessorServiceReq" +
+      "uest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022%" +
+      "\n\004call\030\002 \002(\0132\027.CoprocessorServiceCall\"]\n" +
+      "\032CoprocessorServiceResponse\022 \n\006region\030\001 " +
+      "\002(\0132\020.RegionSpecifier\022\035\n\005value\030\002 \002(\0132\016.N" +
+      "ameBytesPair\"B\n\013MultiAction\022 \n\010mutation\030" +
+      "\001 \001(\0132\016.MutationProto\022\021\n\003get\030\002 \001(\0132\004.Get" +
+      "\"I\n\014ActionResult\022\026\n\005value\030\001 \001(\0132\007.Result" +
+      "\022!\n\texception\030\002 \001(\0132\016.NameBytesPair\"^\n\014M",
+      "ultiRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpec" +
+      "ifier\022\034\n\006action\030\002 \003(\0132\014.MultiAction\022\016\n\006a" +
+      "tomic\030\003 \001(\010\".\n\rMultiResponse\022\035\n\006result\030\001" +
+      " \003(\0132\r.ActionResult2\342\002\n\rClientService\022 \n" +
+      "\003Get\022\013.GetRequest\032\014.GetResponse\022/\n\010Multi" +
+      "Get\022\020.MultiGetRequest\032\021.MultiGetResponse" +
+      "\022)\n\006Mutate\022\016.MutateRequest\032\017.MutateRespo" +
+      "nse\022#\n\004Scan\022\014.ScanRequest\032\r.ScanResponse" +
+      "\022>\n\rBulkLoadHFile\022\025.BulkLoadHFileRequest" +
+      "\032\026.BulkLoadHFileResponse\022F\n\013ExecService\022",
+      "\032.CoprocessorServiceRequest\032\033.Coprocesso" +
+      "rServiceResponse\022&\n\005Multi\022\r.MultiRequest" +
+      "\032\016.MultiResponseBB\n*org.apache.hadoop.hb" +
+      "ase.protobuf.generatedB\014ClientProtosH\001\210\001" +
+      "\001\240\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -27866,7 +27960,7 @@ public final class ClientProtos {
           internal_static_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_MutationProto_ColumnValue_QualifierValue_descriptor,
-              new java.lang.String[] { "Qualifier", "Value", "Timestamp", "DeleteType", });
+              new java.lang.String[] { "Qualifier", "Value", "Timestamp", "DeleteType", "Tags", });
           internal_static_MutateRequest_descriptor =
             getDescriptor().getMessageTypes().get(9);
           internal_static_MutateRequest_fieldAccessorTable = new

Modified: hbase/trunk/hbase-protocol/src/main/protobuf/Cell.proto
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-protocol/src/main/protobuf/Cell.proto?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-protocol/src/main/protobuf/Cell.proto (original)
+++ hbase/trunk/hbase-protocol/src/main/protobuf/Cell.proto Sat Sep 21 18:01:32 2013
@@ -48,6 +48,7 @@ message Cell {
   optional uint64 timestamp = 4;
   optional CellType cell_type = 5;
   optional bytes value = 6;
+  optional bytes tags = 7;
 }
 
 /**
@@ -61,4 +62,5 @@ message KeyValue {
   optional uint64 timestamp = 4;
   optional CellType key_type = 5;
   optional bytes value = 6;
+  optional bytes tags = 7;
 }

Modified: hbase/trunk/hbase-protocol/src/main/protobuf/Client.proto
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-protocol/src/main/protobuf/Client.proto?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-protocol/src/main/protobuf/Client.proto (original)
+++ hbase/trunk/hbase-protocol/src/main/protobuf/Client.proto Sat Sep 21 18:01:32 2013
@@ -187,6 +187,7 @@ message MutationProto {
       optional bytes value = 2;
       optional uint64 timestamp = 3;
       optional DeleteType delete_type = 4;
+      optional bytes tags = 5;
     }
   }
 }

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java Sat Sep 21 18:01:32 2013
@@ -329,4 +329,6 @@ public abstract class AbstractHFileReade
   public DataBlockEncoding getEncodingOnDisk() {
     return dataBlockEncoder.getEncodingOnDisk();
   }
+
+  public abstract int getMajorVersion();
 }

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java Sat Sep 21 18:01:32 2013
@@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.HConstant
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.KVComparator;
 import org.apache.hadoop.hbase.io.compress.Compression;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
@@ -61,9 +62,6 @@ public abstract class AbstractHFileWrite
   /** A "file info" block: a key-value map of file-wide metadata. */
   protected FileInfo fileInfo = new HFile.FileInfo();
 
-  /** Number of uncompressed bytes we allow per block. */
-  protected final int blockSize;
-
   /** Total # of key/value entries, i.e. how many times add() was called. */
   protected long entryCount = 0;
 
@@ -85,15 +83,6 @@ public abstract class AbstractHFileWrite
   /** {@link Writable}s representing meta block data. */
   protected List<Writable> metaData = new ArrayList<Writable>();
 
-  /** The compression algorithm used. NONE if no compression. */
-  protected final Compression.Algorithm compressAlgo;
-  
-  /**
-   * The data block encoding which will be used.
-   * {@link NoOpDataBlockEncoder#INSTANCE} if there is no encoding.
-   */
-  protected final HFileDataBlockEncoder blockEncoder;
-
   /** First key in a block. */
   protected byte[] firstKeyInBlock = null;
 
@@ -110,19 +99,28 @@ public abstract class AbstractHFileWrite
    */
   protected final String name;
 
+  /**
+   * The data block encoding which will be used.
+   * {@link NoOpDataBlockEncoder#INSTANCE} if there is no encoding.
+   */
+  protected final HFileDataBlockEncoder blockEncoder;
+  
+  protected final HFileContext hFileContext;
+
   public AbstractHFileWriter(CacheConfig cacheConf,
-      FSDataOutputStream outputStream, Path path, int blockSize,
-      Compression.Algorithm compressAlgo,
-      HFileDataBlockEncoder dataBlockEncoder,
-      KVComparator comparator) {
+      FSDataOutputStream outputStream, Path path, 
+      KVComparator comparator, HFileContext fileContext) {
     this.outputStream = outputStream;
     this.path = path;
     this.name = path != null ? path.getName() : outputStream.toString();
-    this.blockSize = blockSize;
-    this.compressAlgo = compressAlgo == null
-        ? HFile.DEFAULT_COMPRESSION_ALGORITHM : compressAlgo;
-    this.blockEncoder = dataBlockEncoder != null
-        ? dataBlockEncoder : NoOpDataBlockEncoder.INSTANCE;
+    this.hFileContext = fileContext;
+    if (hFileContext.getEncodingOnDisk() != DataBlockEncoding.NONE
+        || hFileContext.getEncodingInCache() != DataBlockEncoding.NONE) {
+      this.blockEncoder = new HFileDataBlockEncoderImpl(hFileContext.getEncodingOnDisk(),
+          hFileContext.getEncodingInCache());
+    } else {
+      this.blockEncoder = NoOpDataBlockEncoder.INSTANCE;
+    }
     this.comparator = comparator != null ? comparator
         : KeyValue.COMPARATOR;
 
@@ -234,7 +232,7 @@ public abstract class AbstractHFileWrite
   @Override
   public String toString() {
     return "writer=" + (path != null ? path.toString() : null) + ", name="
-        + name + ", compression=" + compressAlgo.getName();
+        + name + ", compression=" + hFileContext.getCompression().getName();
   }
 
   /**
@@ -245,7 +243,7 @@ public abstract class AbstractHFileWrite
     trailer.setMetaIndexCount(metaNames.size());
     trailer.setTotalUncompressedBytes(totalUncompressedBytes+ trailer.getTrailerSize());
     trailer.setEntryCount(entryCount);
-    trailer.setCompressionCodec(compressAlgo);
+    trailer.setCompressionCodec(hFileContext.getCompression());
 
     trailer.serialize(outputStream);
 

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java Sat Sep 21 18:01:32 2013
@@ -23,10 +23,7 @@ import java.nio.ByteBuffer;
 import java.util.zip.Checksum;
 
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.ChecksumFactory;
 import org.apache.hadoop.hbase.util.ChecksumType;
 
 /**
@@ -107,7 +104,7 @@ public class ChecksumUtil {
     // when the minorVersion is 0, thus this is a defensive check for a
     // cannot-happen case. Since this is a cannot-happen case, it is
     // better to return false to indicate a checksum validation failure.
-    if (block.getMinorVersion() < HFileBlock.MINOR_VERSION_WITH_CHECKSUM) {
+    if (!block.getHFileContext().shouldUseHBaseChecksum()) {
       return false;
     }
 

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java Sat Sep 21 18:01:32 2013
@@ -54,11 +54,6 @@ import com.google.common.io.NullOutputSt
 @InterfaceAudience.Private
 public class FixedFileTrailer {
 
-  private static final Log LOG = LogFactory.getLog(FixedFileTrailer.class);
-
-  /** HFile minor version that introduced pbuf filetrailer */
-  private static final int PBUF_TRAILER_MINOR_VERSION = 2;
-
   /**
    * We store the comparator class name as a fixed-length field in the trailer.
    */
@@ -131,18 +126,13 @@ public class FixedFileTrailer {
 
   private static int[] computeTrailerSizeByVersion() {
     int versionToSize[] = new int[HFile.MAX_FORMAT_VERSION + 1];
-    for (int version = HFile.MIN_FORMAT_VERSION;
-         version <= HFile.MAX_FORMAT_VERSION;
-         ++version) {
-      FixedFileTrailer fft = new FixedFileTrailer(version, HFileBlock.MINOR_VERSION_NO_CHECKSUM);
-      DataOutputStream dos = new DataOutputStream(new NullOutputStream());
-      try {
-        fft.serialize(dos);
-      } catch (IOException ex) {
-        // The above has no reason to fail.
-        throw new RuntimeException(ex);
-      }
-      versionToSize[version] = dos.size();
+    // We support only 2 major versions now. ie. V2, V3
+    versionToSize[2] = 212;
+    for (int version = 3; version <= HFile.MAX_FORMAT_VERSION; version++) {
+      // Max FFT size for V3 and above is taken as 1KB for future enhancements
+      // if any.
+      // Unless the trailer size exceeds 1024 this can continue
+      versionToSize[version] = 1024;
     }
     return versionToSize;
   }
@@ -184,11 +174,7 @@ public class FixedFileTrailer {
     DataOutputStream baosDos = new DataOutputStream(baos);
 
     BlockType.TRAILER.write(baosDos);
-    if (majorVersion > 2 || (majorVersion == 2 && minorVersion >= PBUF_TRAILER_MINOR_VERSION)) {
-      serializeAsPB(baosDos);
-    } else {
-      serializeAsWritable(baosDos);
-    }
+    serializeAsPB(baosDos);
 
     // The last 4 bytes of the file encode the major and minor version universally
     baosDos.writeInt(materializeVersion(majorVersion, minorVersion));
@@ -234,29 +220,6 @@ public class FixedFileTrailer {
   }
 
   /**
-   * Write trailer data as writable
-   * @param outputStream
-   * @throws IOException
-   */
-  void serializeAsWritable(DataOutputStream output) throws IOException {
-    output.writeLong(fileInfoOffset);
-    output.writeLong(loadOnOpenDataOffset);
-    output.writeInt(dataIndexCount);
-
-    output.writeLong(uncompressedDataIndexSize);
-
-    output.writeInt(metaIndexCount);
-    output.writeLong(totalUncompressedBytes);
-    output.writeLong(entryCount);
-    output.writeInt(compressionCodec.ordinal());
-
-    output.writeInt(numDataIndexLevels);
-    output.writeLong(firstDataBlockOffset);
-    output.writeLong(lastDataBlockOffset);
-    Bytes.writeStringFixedSize(output, comparatorClassName, MAX_COMPARATOR_NAME_LENGTH);
-  }
-
-  /**
    * Deserialize the fixed file trailer from the given stream. The version needs
    * to already be specified. Make sure this is consistent with
    * {@link #serialize(DataOutputStream)}.
@@ -269,7 +232,8 @@ public class FixedFileTrailer {
 
     BlockType.TRAILER.readAndCheck(inputStream);
 
-    if (majorVersion > 2 || (majorVersion == 2 && minorVersion >= PBUF_TRAILER_MINOR_VERSION)) {
+    if (majorVersion > 2
+        || (majorVersion == 2 && minorVersion >= HFileReaderV2.PBUF_TRAILER_MINOR_VERSION)) {
       deserializeFromPB(inputStream);
     } else {
       deserializeFromWritable(inputStream);
@@ -655,7 +619,7 @@ public class FixedFileTrailer {
    * Create a 4 byte serialized version number by combining the
    * minor and major version numbers.
    */
-  private static int materializeVersion(int majorVersion, int minorVersion) {
+  static int materializeVersion(int majorVersion, int minorVersion) {
     return ((majorVersion & 0x00ffffff) | (minorVersion << 24));
   }
 }

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java?rev=1525269&r1=1525268&r2=1525269&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java Sat Sep 21 18:01:32 2013
@@ -50,23 +50,21 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.KVComparator;
 import org.apache.hadoop.hbase.fs.HFileSystem;
+import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair;
 import org.apache.hadoop.hbase.protobuf.generated.HFileProtos;
-import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
 import org.apache.hadoop.hbase.util.BloomFilterWriter;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ChecksumType;
 import org.apache.hadoop.hbase.util.FSUtils;
-import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.Writable;
 
 import com.google.common.base.Preconditions;
@@ -156,7 +154,7 @@ public class HFile {
 
   /** Maximum supported HFile format version
    */
-  public static final int MAX_FORMAT_VERSION = 2;
+  public static final int MAX_FORMAT_VERSION = 3;
 
   /** Default compression name: none. */
   public final static String DEFAULT_COMPRESSION =
@@ -292,6 +290,8 @@ public class HFile {
 
     void append(byte[] key, byte[] value) throws IOException;
 
+    void append (byte[] key, byte[] value, byte[] tag) throws IOException;
+
     /** @return the path to this {@link HFile} */
     Path getPath();
 
@@ -332,15 +332,9 @@ public class HFile {
     protected FileSystem fs;
     protected Path path;
     protected FSDataOutputStream ostream;
-    protected int blockSize = HColumnDescriptor.DEFAULT_BLOCKSIZE;
-    protected Compression.Algorithm compression =
-        HFile.DEFAULT_COMPRESSION_ALGORITHM;
-    protected HFileDataBlockEncoder encoder = NoOpDataBlockEncoder.INSTANCE;
     protected KVComparator comparator = KeyValue.COMPARATOR;
     protected InetSocketAddress[] favoredNodes;
-    protected ChecksumType checksumType = HFile.DEFAULT_CHECKSUM_TYPE;
-    protected int bytesPerChecksum = DEFAULT_BYTES_PER_CHECKSUM;
-    protected boolean includeMVCCReadpoint = true;
+    private HFileContext fileContext;
 
     WriterFactory(Configuration conf, CacheConfig cacheConf) {
       this.conf = conf;
@@ -361,29 +355,6 @@ public class HFile {
       return this;
     }
 
-    public WriterFactory withBlockSize(int blockSize) {
-      this.blockSize = blockSize;
-      return this;
-    }
-
-    public WriterFactory withCompression(Compression.Algorithm compression) {
-      Preconditions.checkNotNull(compression);
-      this.compression = compression;
-      return this;
-    }
-
-    public WriterFactory withCompression(String compressAlgo) {
-      Preconditions.checkNotNull(compression);
-      this.compression = AbstractHFileWriter.compressionByName(compressAlgo);
-      return this;
-    }
-
-    public WriterFactory withDataBlockEncoder(HFileDataBlockEncoder encoder) {
-      Preconditions.checkNotNull(encoder);
-      this.encoder = encoder;
-      return this;
-    }
-
     public WriterFactory withComparator(KVComparator comparator) {
       Preconditions.checkNotNull(comparator);
       this.comparator = comparator;
@@ -396,23 +367,8 @@ public class HFile {
       return this;
     }
 
-    public WriterFactory withChecksumType(ChecksumType checksumType) {
-      Preconditions.checkNotNull(checksumType);
-      this.checksumType = checksumType;
-      return this;
-    }
-
-    public WriterFactory withBytesPerChecksum(int bytesPerChecksum) {
-      this.bytesPerChecksum = bytesPerChecksum;
-      return this;
-    }
-
-    /**
-     * @param includeMVCCReadpoint whether to write the mvcc readpoint to the file for each KV
-     * @return this (for chained invocation)
-     */
-    public WriterFactory includeMVCCReadpoint(boolean includeMVCCReadpoint) {
-      this.includeMVCCReadpoint = includeMVCCReadpoint;
+    public WriterFactory withFileContext(HFileContext fileContext) {
+      this.fileContext = fileContext;
       return this;
     }
 
@@ -424,16 +380,12 @@ public class HFile {
       if (path != null) {
         ostream = AbstractHFileWriter.createOutputStream(conf, fs, path, favoredNodes);
       }
-      return createWriter(fs, path, ostream, blockSize,
-          compression, encoder, comparator, checksumType, bytesPerChecksum, includeMVCCReadpoint);
+      return createWriter(fs, path, ostream, 
+                   comparator, fileContext);
     }
 
-    protected abstract Writer createWriter(FileSystem fs, Path path,
-        FSDataOutputStream ostream, int blockSize,
-        Compression.Algorithm compress,
-        HFileDataBlockEncoder dataBlockEncoder,
-        KVComparator comparator, ChecksumType checksumType,
-        int bytesPerChecksum, boolean includeMVCCReadpoint) throws IOException;
+    protected abstract Writer createWriter(FileSystem fs, Path path, FSDataOutputStream ostream,
+        KVComparator comparator, HFileContext fileContext) throws IOException;
   }
 
   /** The configuration key for HFile version to use for new files */
@@ -466,6 +418,8 @@ public class HFile {
     switch (version) {
     case 2:
       return new HFileWriterV2.WriterFactoryV2(conf, cacheConf);
+    case 3:
+      return new HFileWriterV3.WriterFactoryV3(conf, cacheConf);
     default:
       throw new IllegalArgumentException("Cannot create writer for HFile " +
           "format version " + version);
@@ -573,6 +527,9 @@ public class HFile {
     case 2:
       return new HFileReaderV2(
           path, trailer, fsdis, size, cacheConf, preferredEncodingInCache, hfs);
+    case 3 :
+      return new HFileReaderV3(
+          path, trailer, fsdis, size, cacheConf, preferredEncodingInCache, hfs);
     default:
       throw new CorruptHFileException("Invalid HFile version " + trailer.getMajorVersion());
     }
@@ -589,7 +546,6 @@ public class HFile {
   public static Reader createReaderWithEncoding(
       FileSystem fs, Path path, CacheConfig cacheConf,
       DataBlockEncoding preferredEncodingInCache) throws IOException {
-    final boolean closeIStream = true;
     FSDataInputStreamWrapper stream = new FSDataInputStreamWrapper(fs, path);
     return pickReaderVersion(path, stream, fs.getFileStatus(path).getLen(),
         cacheConf, preferredEncodingInCache, stream.getHfs());
@@ -648,15 +604,16 @@ public class HFile {
   }
 
   /**
-   * Metadata for this file.  Conjured by the writer.  Read in by the reader.
+   * Metadata for this file. Conjured by the writer. Read in by the reader.
    */
-  static class FileInfo implements SortedMap<byte [], byte []> {
+  public static class FileInfo implements SortedMap<byte[], byte[]> {
     static final String RESERVED_PREFIX = "hfile.";
     static final byte[] RESERVED_PREFIX_BYTES = Bytes.toBytes(RESERVED_PREFIX);
     static final byte [] LASTKEY = Bytes.toBytes(RESERVED_PREFIX + "LASTKEY");
     static final byte [] AVG_KEY_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_KEY_LEN");
     static final byte [] AVG_VALUE_LEN = Bytes.toBytes(RESERVED_PREFIX + "AVG_VALUE_LEN");
     static final byte [] COMPARATOR = Bytes.toBytes(RESERVED_PREFIX + "COMPARATOR");
+    public static final byte [] MAX_TAGS_LEN = Bytes.toBytes(RESERVED_PREFIX + "MAX_TAGS_LEN");
     private final SortedMap<byte [], byte []> map = new TreeMap<byte [], byte []>(Bytes.BYTES_COMPARATOR);
 
     public FileInfo() {



Mime
View raw message