hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From anoopsamj...@apache.org
Subject git commit: HBASE-11437 Modify cell tag handling code to treat the length as unsigned. (Anoop)
Date Sun, 13 Jul 2014 06:44:49 GMT
Repository: hbase
Updated Branches:
  refs/heads/0.98 cedd53a75 -> f2d16e0a8


HBASE-11437 Modify cell tag handling code to treat the length as unsigned. (Anoop)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f2d16e0a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f2d16e0a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f2d16e0a

Branch: refs/heads/0.98
Commit: f2d16e0a8cf6fc349216ffcc9c2b6eabd3a99c0d
Parents: cedd53a
Author: anoopsjohn <anoopsamjohn@gmail.com>
Authored: Sun Jul 13 12:14:21 2014 +0530
Committer: anoopsjohn <anoopsamjohn@gmail.com>
Committed: Sun Jul 13 12:14:21 2014 +0530

----------------------------------------------------------------------
 .../hadoop/hbase/protobuf/ProtobufUtil.java     |   8 +-
 .../ipc/TestPayloadCarryingRpcController.java   |   6 ++
 .../main/java/org/apache/hadoop/hbase/Cell.java |  11 +-
 .../java/org/apache/hadoop/hbase/CellUtil.java  |  12 +--
 .../java/org/apache/hadoop/hbase/KeyValue.java  |  38 ++++---
 .../org/apache/hadoop/hbase/KeyValueUtil.java   |  11 +-
 .../main/java/org/apache/hadoop/hbase/Tag.java  |  54 +++++++---
 .../hadoop/hbase/codec/CellCodecWithTags.java   |   2 +-
 .../hadoop/hbase/io/TagCompressionContext.java  |  28 ++---
 .../io/encoding/BufferedDataBlockEncoder.java   |  13 ++-
 .../io/encoding/CopyKeyDataBlockEncoder.java    |   3 +-
 .../hbase/io/encoding/EncodedDataBlock.java     |   4 +-
 .../org/apache/hadoop/hbase/util/Bytes.java     |  45 ++++++++
 .../org/apache/hadoop/hbase/TestKeyValue.java   |   7 +-
 .../hbase/codec/TestCellCodecWithTags.java      |   6 +-
 .../hbase/codec/TestKeyValueCodecWithTags.java  |   6 +-
 .../hbase/io/TestTagCompressionContext.java     |   8 +-
 .../decode/PrefixTreeArrayScanner.java          |   2 +-
 .../codec/prefixtree/decode/PrefixTreeCell.java |  12 ++-
 .../hadoop/hbase/io/hfile/HFileReaderV3.java    |   6 +-
 .../hadoop/hbase/io/hfile/HFileWriterV3.java    |   4 +-
 .../hadoop/hbase/regionserver/HRegion.java      |  11 +-
 .../hbase/regionserver/wal/HLogSplitter.java    |   6 +-
 .../regionserver/wal/KeyValueCompression.java   |   2 +-
 .../regionserver/wal/SecureWALCellCodec.java    |   2 +-
 .../hbase/regionserver/wal/WALCellCodec.java    |   6 +-
 .../security/access/AccessControlLists.java     |   2 +-
 .../hbase/security/access/AccessController.java |   8 +-
 .../visibility/VisibilityController.java        |   8 +-
 .../visibility/VisibilityLabelFilter.java       |   2 +-
 .../visibility/VisibilityScanDeleteTracker.java |   2 +-
 .../security/visibility/VisibilityUtils.java    |  10 +-
 .../io/encoding/TestPrefixTreeEncoding.java     |  12 +--
 .../hadoop/hbase/io/hfile/TestHFileBlock.java   |   4 +-
 .../hbase/io/hfile/TestHFileWriterV3.java       |   8 +-
 .../regionserver/DataBlockEncodingTool.java     |   2 +-
 .../hadoop/hbase/regionserver/TestTags.java     | 102 ++++++++++---------
 .../replication/TestReplicationWithTags.java    |   3 +-
 .../hadoop/hbase/thrift2/ThriftUtilities.java   |   2 +-
 39 files changed, 298 insertions(+), 180 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index add8f3f..a063fd0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -1045,9 +1045,9 @@ public final class ProtobufUtil {
               kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()));
           valueBuilder.setValue(ByteStringer.wrap(
               kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()));
-          if (kv.getTagsLength() > 0) {
+          if (kv.getTagsLengthUnsigned() > 0) {
             valueBuilder.setTags(ByteStringer.wrap(kv.getTagsArray(),
-                kv.getTagsOffset(), kv.getTagsLength()));
+                kv.getTagsOffset(), kv.getTagsLengthUnsigned()));
           }
           columnBuilder.addQualifierValue(valueBuilder.build());
         }
@@ -1108,9 +1108,9 @@ public final class ProtobufUtil {
         valueBuilder.setValue(ByteStringer.wrap(
             kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()));
         valueBuilder.setTimestamp(kv.getTimestamp());
-        if(cell.getTagsLength() > 0) {
+        if(cell.getTagsLengthUnsigned() > 0) {
           valueBuilder.setTags(ByteStringer.wrap(kv.getTagsArray(), kv.getTagsOffset(),
-              kv.getTagsLength()));
+              kv.getTagsLengthUnsigned()));
         }
         if (type == MutationType.DELETE) {
           KeyValue.Type keyValueType = KeyValue.Type.codeToType(kv.getType());

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestPayloadCarryingRpcController.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestPayloadCarryingRpcController.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestPayloadCarryingRpcController.java
index 249cc42..dbd3505 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestPayloadCarryingRpcController.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestPayloadCarryingRpcController.java
@@ -165,6 +165,12 @@ public class TestPayloadCarryingRpcController {
               }
 
               @Override
+              public int getTagsLengthUnsigned() {
+                // unused
+                return 0;
+              }
+
+              @Override
               public short getTagsLength() {
                 // unused
                 return 0;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java
index 27b9345..deef81b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java
@@ -179,11 +179,20 @@ public interface Cell {
    * @return the first offset where the tags start in the Cell
    */
   int getTagsOffset();
-  
+
   /**
    * @return the total length of the tags in the Cell.
+   * @deprecated use {@link #getTagsLengthUnsigned()} which can handle tags length upto 65535.
    */
+  @Deprecated
   short getTagsLength();
+
+  /**
+   * @return the total length of the tags in the Cell.
+   * @deprecated From next major version this will be renamed to getTagsLength() which returns int.
+   */
+  @Deprecated
+  int getTagsLengthUnsigned();
   
   /**
    * WARNING do not use, expensive.  This gets an arraycopy of the cell's value.

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index e3ed1ec..4461320 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -54,7 +54,7 @@ public final class CellUtil {
   }
 
   public static ByteRange fillTagRange(Cell cell, ByteRange range) {
-    return range.set(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
+    return range.set(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLengthUnsigned());
   }
 
   /***************** get individual arrays for tests ************/
@@ -91,7 +91,7 @@ public final class CellUtil {
    * @return tag value in a new byte array.
    */
   public static byte[] getTagArray(Cell cell){
-    byte[] output = new byte[cell.getTagsLength()];
+    byte[] output = new byte[cell.getTagsLengthUnsigned()];
     copyTagTo(cell, output, 0);
     return output;
   }
@@ -132,8 +132,8 @@ public final class CellUtil {
    */
   public static int copyTagTo(Cell cell, byte[] destination, int destinationOffset) {
     System.arraycopy(cell.getTagsArray(), cell.getTagsOffset(), destination, destinationOffset,
-        cell.getTagsLength());
-    return destinationOffset + cell.getTagsLength();
+        cell.getTagsLengthUnsigned());
+    return destinationOffset + cell.getTagsLengthUnsigned();
   }
 
   /********************* misc *************************************/
@@ -418,8 +418,8 @@ public final class CellUtil {
       @Override
       public Tag next() {
         if (hasNext()) {
-          short curTagLen = Bytes.toShort(tags, this.pos);
-          Tag tag = new Tag(tags, pos, (short) (curTagLen + Bytes.SIZEOF_SHORT));
+          int curTagLen = Bytes.readAsInt(tags, this.pos, Tag.TAG_LENGTH_SIZE);
+          Tag tag = new Tag(tags, pos, curTagLen + Tag.TAG_LENGTH_SIZE);
           this.pos += Bytes.SIZEOF_SHORT + curTagLen;
           return tag;
         }

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index e5538b7..3222965 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -143,6 +143,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
 
   public static final int KEYVALUE_WITH_TAGS_INFRASTRUCTURE_SIZE = ROW_OFFSET + TAGS_LENGTH_SIZE;
 
+  private static final int MAX_TAGS_LENGTH = (2 * Short.MAX_VALUE) + 1;
 
   /**
    * Computes the number of bytes that a <code>KeyValue</code> instance with the provided
@@ -735,7 +736,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
         c.getFamilyArray(), c.getFamilyOffset(), (int)c.getFamilyLength(), 
         c.getQualifierArray(), c.getQualifierOffset(), (int) c.getQualifierLength(), 
         c.getTimestamp(), Type.codeToType(c.getTypeByte()), c.getValueArray(), c.getValueOffset(), 
-        c.getValueLength(), c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
+        c.getValueLength(), c.getTagsArray(), c.getTagsOffset(), c.getTagsLengthUnsigned());
   }
 
   /**
@@ -790,7 +791,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
     pos = Bytes.putByte(bytes, pos, type.getCode());
     pos += vlength;
     if (tagsLength > 0) {
-      pos = Bytes.putShort(bytes, pos, (short)(tagsLength & 0x0000ffff));
+      pos = Bytes.putAsShort(bytes, pos, tagsLength);
     }
     return bytes;
   }
@@ -908,7 +909,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
     }
     // Write the number of tags. If it is 0 then it means there are no tags.
     if (tagsLength > 0) {
-      pos = Bytes.putShort(buffer, pos, (short) tagsLength);
+      pos = Bytes.putAsShort(buffer, pos, tagsLength);
       for (Tag t : tags) {
         pos = Bytes.putBytes(buffer, pos, t.getBuffer(), t.getOffset(), t.getLength());
       }
@@ -917,8 +918,8 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
   }
 
   private static void checkForTagsLength(int tagsLength) {
-    if (tagsLength > Short.MAX_VALUE) {
-      throw new IllegalArgumentException("tagslength "+ tagsLength + " > " + Short.MAX_VALUE);
+    if (tagsLength > MAX_TAGS_LENGTH) {
+      throw new IllegalArgumentException("tagslength "+ tagsLength + " > " + MAX_TAGS_LENGTH);
     }
   }
 
@@ -973,7 +974,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
     }
     // Add the tags after the value part
     if (tagsLength > 0) {
-      pos = Bytes.putShort(bytes, pos, (short) (tagsLength));
+      pos = Bytes.putAsShort(bytes, pos, tagsLength);
       pos = Bytes.putBytes(bytes, pos, tags, tagsOffset, tagsLength);
     }
     return bytes;
@@ -1033,7 +1034,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
     }
     // Add the tags after the value part
     if (tagsLength > 0) {
-      pos = Bytes.putShort(bytes, pos, (short) (tagsLength));
+      pos = Bytes.putAsShort(bytes, pos, tagsLength);
       for (Tag t : tags) {
         pos = Bytes.putBytes(bytes, pos, t.getBuffer(), t.getOffset(), t.getLength());
       }
@@ -1577,7 +1578,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
    */
   @Override
   public int getTagsOffset() {
-    short tagsLen = getTagsLength();
+    int tagsLen = getTagsLengthUnsigned();
     if (tagsLen == 0) {
       return this.offset + this.length;
     }
@@ -1588,14 +1589,21 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
    * This returns the total length of the tag bytes
    */
   @Override
-  public short getTagsLength() {
+  @Deprecated
+  public int getTagsLengthUnsigned() {
     int tagsLen = this.length - (getKeyLength() + getValueLength() + KEYVALUE_INFRASTRUCTURE_SIZE);
     if (tagsLen > 0) {
       // There are some Tag bytes in the byte[]. So reduce 2 bytes which is added to denote the tags
       // length
       tagsLen -= TAGS_LENGTH_SIZE;
     }
-    return (short) tagsLen;
+    return tagsLen;
+  }
+
+  @Override
+  @Deprecated
+  public short getTagsLength() {
+    return (short) getTagsLengthUnsigned();
   }
 
   /**
@@ -1603,7 +1611,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
    * @return The tags
    */
   public List<Tag> getTags() {
-    short tagsLength = getTagsLength();
+    int tagsLength = getTagsLengthUnsigned();
     if (tagsLength == 0) {
       return EMPTY_ARRAY_LIST;
     }
@@ -2079,8 +2087,8 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
      * @return Long.MAX_VALUE if there is no LOG_REPLAY_TAG
      */
     private long getReplaySeqNum(final Cell c) {
-      Tag tag = Tag.getTag(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength(), 
-        TagType.LOG_REPLAY_TAG_TYPE);
+      Tag tag = Tag.getTag(c.getTagsArray(), c.getTagsOffset(), c.getTagsLengthUnsigned(),
+          TagType.LOG_REPLAY_TAG_TYPE);
 
       if(tag != null) {
         return Bytes.toLong(tag.getBuffer(), tag.getTagOffset(), tag.getTagLength());
@@ -2779,8 +2787,8 @@ public class KeyValue implements Cell, HeapSize, Cloneable {
    */
   public static KeyValue cloneAndAddTags(Cell c, List<Tag> newTags) {
     List<Tag> existingTags = null;
-    if(c.getTagsLength() > 0) {
-      existingTags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
+    if(c.getTagsLengthUnsigned() > 0) {
+      existingTags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLengthUnsigned());
       existingTags.addAll(newTags);
     } else {
       existingTags = newTags;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java
index aa56f74..607a253 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java
@@ -43,7 +43,7 @@ public class KeyValueUtil {
   public static int length(final Cell cell) {
     return (int) (KeyValue.getKeyValueDataStructureSize(cell.getRowLength(),
         cell.getFamilyLength(), cell.getQualifierLength(), cell.getValueLength(),
-        cell.getTagsLength()));
+        cell.getTagsLengthUnsigned()));
   }
 
   protected static int keyLength(final Cell cell) {
@@ -115,8 +115,8 @@ public class KeyValueUtil {
     pos = Bytes.putInt(output, pos, cell.getValueLength());
     pos = appendKeyToByteArrayWithoutValue(cell, output, pos);
     pos = CellUtil.copyValueTo(cell, output, pos);
-    if ((cell.getTagsLength() > 0)) {
-      pos = Bytes.putShort(output, pos, cell.getTagsLength());
+    if ((cell.getTagsLengthUnsigned() > 0)) {
+      pos = Bytes.putAsShort(output, pos, cell.getTagsLengthUnsigned());
       pos = CellUtil.copyTagTo(cell, output, pos);
     }
     return pos;
@@ -165,9 +165,10 @@ public class KeyValueUtil {
     int keyLength = bb.getInt();
     int valueLength = bb.getInt();
     ByteBufferUtils.skip(bb, keyLength + valueLength);
-    short tagsLength = 0;
+    int tagsLength = 0;
     if (includesTags) {
-      tagsLength = bb.getShort();
+      // Read short as unsigned, high byte first
+      tagsLength = ((bb.get() & 0xff) << 8) ^ (bb.get() & 0xff);
       ByteBufferUtils.skip(bb, tagsLength);
     }
     int kvLength = (int) KeyValue.getKeyValueDataStructureSize(keyLength, valueLength, tagsLength);

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java
index 332433f..59ef118 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java
@@ -35,11 +35,12 @@ public class Tag {
   public final static int TYPE_LENGTH_SIZE = Bytes.SIZEOF_BYTE;
   public final static int TAG_LENGTH_SIZE = Bytes.SIZEOF_SHORT;
   public final static int INFRASTRUCTURE_SIZE = TYPE_LENGTH_SIZE + TAG_LENGTH_SIZE;
+  private static final int MAX_TAG_LENGTH = (2 * Short.MAX_VALUE) + 1 - TAG_LENGTH_SIZE;
 
   private final byte type;
   private final byte[] bytes;
   private int offset = 0;
-  private short length = 0;
+  private int length = 0;
 
   // The special tag will write the length of each tag and that will be
   // followed by the type and then the actual tag.
@@ -54,13 +55,19 @@ public class Tag {
    * @param tag
    */
   public Tag(byte tagType, byte[] tag) {
-    /** <length of tag - 2 bytes><type code - 1 byte><tag>
-     * taglength maximum is Short.MAX_SIZE.  It includes 1 byte type length and actual tag bytes length.
+    /**
+     * Format for a tag : <length of tag - 2 bytes><type code - 1 byte><tag> taglength is serialized
+     * using 2 bytes only but as this will be unsigned, we can have max taglength of
+     * (Short.MAX_SIZE * 2) +1. It includes 1 byte type length and actual tag bytes length.
      */
-    short tagLength = (short) ((tag.length & 0x0000ffff) + TYPE_LENGTH_SIZE);
-    length = (short) (TAG_LENGTH_SIZE + tagLength);
+    int tagLength = tag.length + TYPE_LENGTH_SIZE;
+    if (tagLength > MAX_TAG_LENGTH) {
+      throw new IllegalArgumentException(
+          "Invalid tag data being passed. Its length can not exceed " + MAX_TAG_LENGTH);
+    }
+    length = TAG_LENGTH_SIZE + tagLength;
     bytes = new byte[length];
-    int pos = Bytes.putShort(bytes, 0, tagLength);
+    int pos = Bytes.putAsShort(bytes, 0, tagLength);
     pos = Bytes.putByte(bytes, pos, tagType);
     Bytes.putBytes(bytes, pos, tag, 0, tag.length);
     this.type = tagType;
@@ -80,8 +87,8 @@ public class Tag {
     this(bytes, offset, getLength(bytes, offset));
   }
 
-  private static short getLength(byte[] bytes, int offset) {
-    return (short) (TAG_LENGTH_SIZE + Bytes.toShort(bytes, offset));
+  private static int getLength(byte[] bytes, int offset) {
+    return TAG_LENGTH_SIZE + Bytes.readAsInt(bytes, offset, TAG_LENGTH_SIZE);
   }
 
   /**
@@ -94,8 +101,29 @@ public class Tag {
    *          offset to start of the Tag
    * @param length
    *          length of the Tag
+   * @deprecated Use {@link #Tag(byte[], int, int)}
    */
+  @Deprecated
   public Tag(byte[] bytes, int offset, short length) {
+    this(bytes, offset, (int) length);
+  }
+
+  /**
+   * Creates a Tag from the specified byte array, starting at offset, and for length
+   * <code>length</code>. Presumes <code>bytes</code> content starting at <code>offset</code> is
+   * formatted as a Tag blob.
+   * @param bytes
+   *          byte array
+   * @param offset
+   *          offset to start of the Tag
+   * @param length
+   *          length of the Tag
+   */
+  public Tag(byte[] bytes, int offset, int length) {
+    if (length > MAX_TAG_LENGTH) {
+      throw new IllegalArgumentException(
+          "Invalid tag data being passed. Its length can not exceed " + MAX_TAG_LENGTH);
+    }
     this.bytes = bytes;
     this.offset = offset;
     this.length = length;
@@ -156,8 +184,8 @@ public class Tag {
     List<Tag> tags = new ArrayList<Tag>();
     int pos = offset;
     while (pos < offset + length) {
-      short tagLen = Bytes.toShort(b, pos);
-      tags.add(new Tag(b, pos, (short) (tagLen + TAG_LENGTH_SIZE)));
+      int tagLen = Bytes.readAsInt(b, pos, TAG_LENGTH_SIZE);
+      tags.add(new Tag(b, pos, tagLen + TAG_LENGTH_SIZE));
       pos += TAG_LENGTH_SIZE + tagLen;
     }
     return tags;
@@ -174,9 +202,9 @@ public class Tag {
   public static Tag getTag(byte[] b, int offset, int length, byte type) {
     int pos = offset;
     while (pos < offset + length) {
-      short tagLen = Bytes.toShort(b, pos);
+      int tagLen = Bytes.readAsInt(b, pos, TAG_LENGTH_SIZE);
       if(b[pos + TAG_LENGTH_SIZE] == type) {
-        return new Tag(b, pos, (short) (tagLen + TAG_LENGTH_SIZE));
+        return new Tag(b, pos, tagLen + TAG_LENGTH_SIZE);
       }
       pos += TAG_LENGTH_SIZE + tagLen;
     }
@@ -186,7 +214,7 @@ public class Tag {
   /**
    * Returns the total length of the entire tag entity
    */
-  short getLength() {
+  int getLength() {
     return this.length;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java
index 809a65f..84f251f 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java
@@ -55,7 +55,7 @@ public class CellCodecWithTags implements Codec {
       // Value
       write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
       // Tags
-      write(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
+      write(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLengthUnsigned());
       // MvccVersion
       this.out.write(Bytes.toBytes(cell.getMvccVersion()));
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TagCompressionContext.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TagCompressionContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TagCompressionContext.java
index 7e7fe1d..d188bf3 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TagCompressionContext.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TagCompressionContext.java
@@ -61,13 +61,13 @@ public class TagCompressionContext {
    * @param length Length of all tag bytes
    * @throws IOException
    */
-  public void compressTags(OutputStream out, byte[] in, int offset, short length)
+  public void compressTags(OutputStream out, byte[] in, int offset, int length)
       throws IOException {
     int pos = offset;
     int endOffset = pos + length;
     assert pos < endOffset;
     while (pos < endOffset) {
-      short tagLen = Bytes.toShort(in, pos);
+      int tagLen = Bytes.readAsInt(in, pos, Tag.TAG_LENGTH_SIZE);
       pos += Tag.TAG_LENGTH_SIZE;
       write(in, pos, tagLen, out);
       pos += tagLen;
@@ -81,7 +81,7 @@ public class TagCompressionContext {
    * @param length Length of all tag bytes
    * @throws IOException
    */
-  public void compressTags(OutputStream out, ByteBuffer in, short length) throws IOException {
+  public void compressTags(OutputStream out, ByteBuffer in, int length) throws IOException {
     if (in.hasArray()) {
       compressTags(out, in.array(), in.arrayOffset() + in.position(), length);
       ByteBufferUtils.skip(in, length);
@@ -100,15 +100,15 @@ public class TagCompressionContext {
    * @param length Length of all tag bytes
    * @throws IOException
    */
-  public void uncompressTags(InputStream src, byte[] dest, int offset, short length)
+  public void uncompressTags(InputStream src, byte[] dest, int offset, int length)
       throws IOException {
     int endOffset = offset + length;
     while (offset < endOffset) {
       byte status = (byte) src.read();
       if (status == Dictionary.NOT_IN_DICTIONARY) {
         // We are writing short as tagLen. So can downcast this without any risk.
-        short tagLen = (short) StreamUtils.readRawVarint32(src);
-        offset = Bytes.putShort(dest, offset, tagLen);
+        int tagLen = StreamUtils.readRawVarint32(src);
+        offset = Bytes.putAsShort(dest, offset, tagLen);
         IOUtils.readFully(src, dest, offset, tagLen);
         tagDict.addEntry(dest, offset, tagLen);
         offset += tagLen;
@@ -118,7 +118,7 @@ public class TagCompressionContext {
         if (entry == null) {
           throw new IOException("Missing dictionary entry for index " + dictIdx);
         }
-        offset = Bytes.putShort(dest, offset, (short) entry.length);
+        offset = Bytes.putAsShort(dest, offset, entry.length);
         System.arraycopy(entry, 0, dest, offset, entry.length);
         offset += entry.length;
       }
@@ -140,11 +140,11 @@ public class TagCompressionContext {
     int endOffset = offset + length;
     while (offset < endOffset) {
       byte status = src.get();
-      short tagLen;
+      int tagLen;
       if (status == Dictionary.NOT_IN_DICTIONARY) {
         // We are writing short as tagLen. So can downcast this without any risk.
-        tagLen = (short) StreamUtils.readRawVarint32(src);
-        offset = Bytes.putShort(dest, offset, tagLen);
+        tagLen = StreamUtils.readRawVarint32(src);
+        offset = Bytes.putAsShort(dest, offset, tagLen);
         src.get(dest, offset, tagLen);
         tagDict.addEntry(dest, offset, tagLen);
         offset += tagLen;
@@ -154,8 +154,8 @@ public class TagCompressionContext {
         if (entry == null) {
           throw new IOException("Missing dictionary entry for index " + dictIdx);
         }
-        tagLen = (short) entry.length;
-        offset = Bytes.putShort(dest, offset, tagLen);
+        tagLen = entry.length;
+        offset = Bytes.putAsShort(dest, offset, tagLen);
         System.arraycopy(entry, 0, dest, offset, tagLen);
         offset += tagLen;
       }
@@ -170,7 +170,7 @@ public class TagCompressionContext {
    * @param length Length of all tag bytes
    * @throws IOException
    */
-  public void uncompressTags(InputStream src, ByteBuffer dest, short length) throws IOException {
+  public void uncompressTags(InputStream src, ByteBuffer dest, int length) throws IOException {
     if (dest.hasArray()) {
       uncompressTags(src, dest.array(), dest.arrayOffset() + dest.position(), length);
     } else {
@@ -180,7 +180,7 @@ public class TagCompressionContext {
     }
   }
 
-  private void write(byte[] data, int offset, short length, OutputStream out) throws IOException {
+  private void write(byte[] data, int offset, int length, OutputStream out) throws IOException {
     short dictIdx = Dictionary.NOT_IN_DICTIONARY;
     if (tagDict != null) {
       dictIdx = tagDict.findEntry(data, offset, length);

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
index d95ee73..d184fb3 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
@@ -227,7 +227,9 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
           currentBuffer.arrayOffset() + current.valueOffset,
           current.valueLength);
       if (current.tagsLength > 0) {
-        kvBuffer.putShort((short) current.tagsLength);
+        // Put short as unsigned
+        kvBuffer.put((byte)(current.tagsLength >> 8 & 0xff));
+        kvBuffer.put((byte)(current.tagsLength & 0xff));
         if (current.tagsOffset != -1) {
           // the offset of the tags bytes in the underlying buffer is marked. So the temp
           // buffer,tagsBuffer was not been used.
@@ -401,7 +403,8 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
   protected final void afterEncodingKeyValue(ByteBuffer in,
       DataOutputStream out, HFileBlockDefaultEncodingContext encodingCtx) throws IOException {
     if (encodingCtx.getHFileContext().isIncludesTags()) {
-      short tagsLength = in.getShort();
+      // Read short as unsigned, high byte first
+      int tagsLength = ((in.get() & 0xff) << 8) ^ (in.get() & 0xff);
       ByteBufferUtils.putCompressedInt(out, tagsLength);
       // There are some tags to be written
       if (tagsLength > 0) {
@@ -431,8 +434,10 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
   protected final void afterDecodingKeyValue(DataInputStream source,
       ByteBuffer dest, HFileBlockDefaultDecodingContext decodingCtx) throws IOException {
     if (decodingCtx.getHFileContext().isIncludesTags()) {
-      short tagsLength = (short) ByteBufferUtils.readCompressedInt(source);
-      dest.putShort(tagsLength);
+      int tagsLength = ByteBufferUtils.readCompressedInt(source);
+      // Put as unsigned short
+      dest.put((byte)((tagsLength >> 8) & 0xff));
+      dest.put((byte)(tagsLength & 0xff));
       if (tagsLength > 0) {
         TagCompressionContext tagCompressionContext = decodingCtx.getTagCompressionContext();
         // When tag compression is been used in this file, tagCompressionContext will have a not

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java
index 1dc8413..48a6e99 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java
@@ -67,7 +67,8 @@ public class CopyKeyDataBlockEncoder extends BufferedDataBlockEncoder {
         current.valueOffset = currentBuffer.position();
         ByteBufferUtils.skip(currentBuffer, current.valueLength);
         if (includesTags()) {
-          current.tagsLength = currentBuffer.getShort();
+          // Read short as unsigned, high byte first
+          current.tagsLength = ((currentBuffer.get() & 0xff) << 8) ^ (currentBuffer.get() & 0xff);
           ByteBufferUtils.skip(currentBuffer, current.tagsLength);
         }
         if (includesMvcc()) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
index 9e0497e..aa34f7b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
@@ -110,11 +110,11 @@ public class EncodedDataBlock {
         int offset = decompressedData.position();
         int klen = decompressedData.getInt();
         int vlen = decompressedData.getInt();
-        short tagsLen = 0;
+        int tagsLen = 0;
         ByteBufferUtils.skip(decompressedData, klen + vlen);
         // Read the tag length in case when steam contain tags
         if (meta.isIncludesTags()) {
-          tagsLen = decompressedData.getShort();
+          tagsLen = ((decompressedData.get() & 0xff) << 8) ^ (decompressedData.get() & 0xff);
           ByteBufferUtils.skip(decompressedData, tagsLen);
         }
         KeyValue kv = new KeyValue(decompressedData.array(), offset,

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
index 4679194..88f8b0c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
@@ -757,6 +757,28 @@ public class Bytes {
   }
 
   /**
+   * Converts a byte array to an int value
+   * @param bytes byte array
+   * @param offset offset into array
+   * @param length how many bytes should be considered for creating int
+   * @return the int value
+   * @throws IllegalArgumentException if there's not enough room in the array at the offset
+   * indicated.
+   */
+  public static int readAsInt(byte[] bytes, int offset, final int length) {
+    if (offset + length > bytes.length) {
+      throw new IllegalArgumentException("offset (" + offset + ") + length (" + length
+          + ") exceed the" + " capacity of the array: " + bytes.length);
+    }
+    int n = 0;
+    for(int i = offset; i < (offset + length); i++) {
+      n <<= 8;
+      n ^= bytes[i] & 0xFF;
+    }
+    return n;
+  }
+
+  /**
    * Put an int value out to the specified byte array position.
    * @param bytes the byte array
    * @param offset position in the array
@@ -864,6 +886,29 @@ public class Bytes {
   }
 
   /**
+   * Put an int value as short out to the specified byte array position. Only the lower 2 bytes of
+   * the short will be put into the array. The caller of the API need to make sure they will not
+   * loose the value by doing so. This is useful to store an unsigned short which is represented as
+   * int in other parts.
+   * @param bytes the byte array
+   * @param offset position in the array
+   * @param val value to write out
+   * @return incremented offset
+   * @throws IllegalArgumentException if the byte array given doesn't have
+   * enough room at the offset specified.
+   */
+  public static int putAsShort(byte[] bytes, int offset, int val) {
+    if (bytes.length - offset < SIZEOF_SHORT) {
+      throw new IllegalArgumentException("Not enough room to put a short at"
+          + " offset " + offset + " in a " + bytes.length + " byte array");
+    }
+    bytes[offset+1] = (byte) val;
+    val >>= 8;
+    bytes[offset] = (byte) val;
+    return offset + SIZEOF_SHORT;
+  }
+
+  /**
    * Convert a BigDecimal value to a byte array
    *
    * @param val

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
index f565cd3..51b3aec 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
@@ -538,7 +538,7 @@ public class TestKeyValue extends TestCase {
     byte[] metaValue2 = Bytes.toBytes("metaValue2");
     KeyValue kv = new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, new Tag[] {
         new Tag((byte) 1, metaValue1), new Tag((byte) 2, metaValue2) });
-    assertTrue(kv.getTagsLength() > 0);
+    assertTrue(kv.getTagsLengthUnsigned() > 0);
     assertTrue(Bytes.equals(kv.getRow(), row));
     assertTrue(Bytes.equals(kv.getFamily(), cf));
     assertTrue(Bytes.equals(kv.getQualifier(), q));
@@ -561,7 +561,7 @@ public class TestKeyValue extends TestCase {
     assertTrue(meta1Ok);
     assertTrue(meta2Ok);
     Iterator<Tag> tagItr = CellUtil.tagsIterator(kv.getTagsArray(), kv.getTagsOffset(),
-        kv.getTagsLength());
+        kv.getTagsLengthUnsigned());
     //Iterator<Tag> tagItr = kv.tagsIterator();
     assertTrue(tagItr.hasNext());
     Tag next = tagItr.next();
@@ -575,7 +575,8 @@ public class TestKeyValue extends TestCase {
     Bytes.equals(next.getValue(), metaValue2);
     assertFalse(tagItr.hasNext());
 
-    tagItr = CellUtil.tagsIterator(kv.getTagsArray(), kv.getTagsOffset(), kv.getTagsLength());
+    tagItr = CellUtil.tagsIterator(kv.getTagsArray(), kv.getTagsOffset(),
+        kv.getTagsLengthUnsigned());
     assertTrue(tagItr.hasNext());
     next = tagItr.next();
     assertEquals(10, next.getTagLength());

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java
index 1499a91..2808cdf 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java
@@ -76,7 +76,7 @@ public class TestCellCodecWithTags {
     assertTrue(decoder.advance());
     Cell c = decoder.current();
     assertTrue(CellComparator.equals(c, cell1));
-    List<Tag> tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
+    List<Tag> tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLengthUnsigned());
     assertEquals(2, tags.size());
     Tag tag = tags.get(0);
     assertEquals(1, tag.getType());
@@ -87,7 +87,7 @@ public class TestCellCodecWithTags {
     assertTrue(decoder.advance());
     c = decoder.current();
     assertTrue(CellComparator.equals(c, cell2));
-    tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
+    tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLengthUnsigned());
     assertEquals(1, tags.size());
     tag = tags.get(0);
     assertEquals(1, tag.getType());
@@ -95,7 +95,7 @@ public class TestCellCodecWithTags {
     assertTrue(decoder.advance());
     c = decoder.current();
     assertTrue(CellComparator.equals(c, cell3));
-    tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
+    tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLengthUnsigned());
     assertEquals(3, tags.size());
     tag = tags.get(0);
     assertEquals(2, tag.getType());

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java
index d8dd7fe..9a158a9 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java
@@ -76,7 +76,7 @@ public class TestKeyValueCodecWithTags {
     assertTrue(decoder.advance());
     Cell c = decoder.current();
     assertTrue(CellComparator.equals(c, kv1));
-    List<Tag> tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
+    List<Tag> tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLengthUnsigned());
     assertEquals(2, tags.size());
     Tag tag = tags.get(0);
     assertEquals(1, tag.getType());
@@ -87,7 +87,7 @@ public class TestKeyValueCodecWithTags {
     assertTrue(decoder.advance());
     c = decoder.current();
     assertTrue(CellComparator.equals(c, kv2));
-    tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
+    tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLengthUnsigned());
     assertEquals(1, tags.size());
     tag = tags.get(0);
     assertEquals(1, tag.getType());
@@ -95,7 +95,7 @@ public class TestKeyValueCodecWithTags {
     assertTrue(decoder.advance());
     c = decoder.current();
     assertTrue(CellComparator.equals(c, kv3));
-    tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
+    tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLengthUnsigned());
     assertEquals(3, tags.size());
     tag = tags.get(0);
     assertEquals(2, tag.getType());

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java
index 82739b9..369f633 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java
@@ -47,11 +47,11 @@ public class TestTagCompressionContext {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     TagCompressionContext context = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);
     KeyValue kv1 = createKVWithTags(2);
-    short tagsLength1 = kv1.getTagsLength();
+    int tagsLength1 = kv1.getTagsLengthUnsigned();
     ByteBuffer ib = ByteBuffer.wrap(kv1.getTagsArray(), kv1.getTagsOffset(), tagsLength1);
     context.compressTags(baos, ib, tagsLength1);
     KeyValue kv2 = createKVWithTags(3);
-    short tagsLength2 = kv2.getTagsLength();
+    int tagsLength2 = kv2.getTagsLengthUnsigned();
     ib = ByteBuffer.wrap(kv2.getTagsArray(), kv2.getTagsOffset(), tagsLength2);
     context.compressTags(baos, ib, tagsLength2);
 
@@ -73,10 +73,10 @@ public class TestTagCompressionContext {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     TagCompressionContext context = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);
     KeyValue kv1 = createKVWithTags(1);
-    short tagsLength1 = kv1.getTagsLength();
+    int tagsLength1 = kv1.getTagsLengthUnsigned();
     context.compressTags(baos, kv1.getTagsArray(), kv1.getTagsOffset(), tagsLength1);
     KeyValue kv2 = createKVWithTags(3);
-    short tagsLength2 = kv2.getTagsLength();
+    int tagsLength2 = kv2.getTagsLengthUnsigned();
     context.compressTags(baos, kv2.getTagsArray(), kv2.getTagsOffset(), tagsLength2);
 
     context.clear();

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java
index 587350b..ef2a832 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java
@@ -464,7 +464,7 @@ public class PrefixTreeArrayScanner extends PrefixTreeCell implements CellScanne
   protected void populateTag() {
     int tagTreeIndex = currentRowNode.getTagOffset(currentCellIndex, blockMeta);
     tagsOffset = tagsReader.populateBuffer(tagTreeIndex).getColumnOffset();
-    tagsLength = (short)tagsReader.getColumnLength();
+    tagsLength = tagsReader.getColumnLength();
   }
 
   protected void populateTimestamp() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java
index 390e802..dbef6c2 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java
@@ -72,7 +72,7 @@ public class PrefixTreeCell implements Cell, Comparable<Cell> {
 
   protected byte[] tagsBuffer;
   protected int tagsOffset;
-  protected short tagsLength;
+  protected int tagsLength;
 
   /********************** Cell methods ******************/
 
@@ -224,13 +224,19 @@ public class PrefixTreeCell implements Cell, Comparable<Cell> {
   }
 
   @Override
-  public short getTagsLength() {
+  @Deprecated
+  public int getTagsLengthUnsigned() {
     return tagsLength;
   }
 
   @Override
+  @Deprecated
+  public short getTagsLength() {
+    return (short) tagsLength;
+  }
+
+  @Override
   public byte[] getTagsArray() {
     return this.tagsBuffer;
   }
-
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java
index 88a67ca..ea0adb0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV3.java
@@ -213,7 +213,8 @@ public class HFileReaderV3 extends HFileReaderV2 {
       }
       ByteBufferUtils.skip(blockBuffer, currKeyLen + currValueLen);
       if (reader.hfileContext.isIncludesTags()) {
-        currTagsLen = blockBuffer.getShort();
+        // Read short as unsigned, high byte first
+        currTagsLen = ((blockBuffer.get() & 0xff) << 8) ^ (blockBuffer.get() & 0xff);
         if (currTagsLen < 0 || currTagsLen > blockBuffer.limit()) {
           throw new IllegalStateException("Invalid currTagsLen " + currTagsLen + ". Block offset: "
               + block.getOffset() + ", block length: " + blockBuffer.limit() + ", position: "
@@ -262,7 +263,8 @@ public class HFileReaderV3 extends HFileReaderV2 {
         }
         ByteBufferUtils.skip(blockBuffer, klen + vlen);
         if (reader.hfileContext.isIncludesTags()) {
-          tlen = blockBuffer.getShort();
+          // Read short as unsigned, high byte first
+          tlen = ((blockBuffer.get() & 0xff) << 8) ^ (blockBuffer.get() & 0xff);
           if (tlen < 0 || tlen > blockBuffer.limit()) {
             throw new IllegalStateException("Invalid tlen " + tlen + ". Block offset: "
                 + block.getOffset() + ", block length: " + blockBuffer.limit() + ", position: "

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java
index 3fb6d89..e652d00 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java
@@ -88,7 +88,7 @@ public class HFileWriterV3 extends HFileWriterV2 {
     // Currently get the complete arrays
     append(kv.getMvccVersion(), kv.getBuffer(), kv.getKeyOffset(), kv.getKeyLength(),
         kv.getBuffer(), kv.getValueOffset(), kv.getValueLength(), kv.getBuffer(),
-        kv.getTagsOffset(), kv.getTagsLength());
+        kv.getTagsOffset(), kv.getTagsLengthUnsigned());
     this.maxMemstoreTS = Math.max(this.maxMemstoreTS, kv.getMvccVersion());
   }
   
@@ -160,7 +160,7 @@ public class HFileWriterV3 extends HFileWriterV2 {
       out.write(value, voffset, vlength);
       // Write the additional tag into the stream
       if (hFileContext.isIncludesTags()) {
-        out.writeShort((short) tagsLength);
+        out.writeShort(tagsLength);
         if (tagsLength > 0) {
           out.write(tag, tagsOffset, tagsLength);
           if (tagsLength > maxTagsLength) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 6af8315..5530920 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -5180,7 +5180,7 @@ public class HRegion implements HeapSize { // , Writable{
                 newKV = new KeyValue(row.length, kv.getFamilyLength(),
                     kv.getQualifierLength(), now, KeyValue.Type.Put,
                     oldKv.getValueLength() + kv.getValueLength(),
-                    oldKv.getTagsLength() + kv.getTagsLength());
+                    oldKv.getTagsLengthUnsigned() + kv.getTagsLengthUnsigned());
                 // copy in the value
                 System.arraycopy(oldKv.getBuffer(), oldKv.getValueOffset(),
                     newKV.getBuffer(), newKV.getValueOffset(),
@@ -5191,9 +5191,10 @@ public class HRegion implements HeapSize { // , Writable{
                     kv.getValueLength());
                 // copy in the tags
                 System.arraycopy(oldKv.getBuffer(), oldKv.getTagsOffset(), newKV.getBuffer(),
-                    newKV.getTagsOffset(), oldKv.getTagsLength());
+                    newKV.getTagsOffset(), oldKv.getTagsLengthUnsigned());
                 System.arraycopy(kv.getBuffer(), kv.getTagsOffset(), newKV.getBuffer(),
-                    newKV.getTagsOffset() + oldKv.getTagsLength(), kv.getTagsLength());
+                    newKV.getTagsOffset() + oldKv.getTagsLengthUnsigned(),
+                    kv.getTagsLengthUnsigned());
                 // copy in row, family, and qualifier
                 System.arraycopy(kv.getBuffer(), kv.getRowOffset(),
                     newKV.getBuffer(), newKV.getRowOffset(), kv.getRowLength());
@@ -5383,8 +5384,8 @@ public class HRegion implements HeapSize { // , Writable{
               // Append new incremented KeyValue to list
               byte[] q = CellUtil.cloneQualifier(kv);
               byte[] val = Bytes.toBytes(amount);
-              int oldCellTagsLen = (c == null) ? 0 : c.getTagsLength();
-              int incCellTagsLen = kv.getTagsLength();
+              int oldCellTagsLen = (c == null) ? 0 : c.getTagsLengthUnsigned();
+              int incCellTagsLen = kv.getTagsLengthUnsigned();
               KeyValue newKV = new KeyValue(row.length, family.getKey().length, q.length, now,
                   KeyValue.Type.Put, val.length, oldCellTagsLen + incCellTagsLen);
               System.arraycopy(row, 0, newKV.getBuffer(), newKV.getRowOffset(), row.length);

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java
index 59204b3..4e6e83a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java
@@ -1897,9 +1897,9 @@ public class HLogSplitter {
   private static Cell tagReplayLogSequenceNumber(WALEntry entry, Cell cell) {
     // Tag puts with original sequence number if there is no LOG_REPLAY_TAG yet
     boolean needAddRecoveryTag = true;
-    if (cell.getTagsLength() > 0) {
-      Tag tmpTag = Tag.getTag(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength(),
-        TagType.LOG_REPLAY_TAG_TYPE);
+    if (cell.getTagsLengthUnsigned() > 0) {
+      Tag tmpTag = Tag.getTag(cell.getTagsArray(), cell.getTagsOffset(),
+          cell.getTagsLengthUnsigned(), TagType.LOG_REPLAY_TAG_TYPE);
       if (tmpTag != null) {
         // found an existing log replay tag so reuse it
         needAddRecoveryTag = false;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/KeyValueCompression.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/KeyValueCompression.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/KeyValueCompression.java
index 0b22ed3..f2c8576 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/KeyValueCompression.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/KeyValueCompression.java
@@ -106,7 +106,7 @@ class KeyValueCompression {
     // we first write the KeyValue infrastructure as VInts.
     WritableUtils.writeVInt(out, keyVal.getKeyLength());
     WritableUtils.writeVInt(out, keyVal.getValueLength());
-    WritableUtils.writeVInt(out, keyVal.getTagsLength());
+    WritableUtils.writeVInt(out, keyVal.getTagsLengthUnsigned());
 
     // now we write the row key, as the row key is likely to be repeated
     // We save space only if we attempt to compress elements with duplicates

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java
index e36d741..ef6e879 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java
@@ -194,7 +194,7 @@ public class SecureWALCellCodec extends WALCellCodec {
       StreamUtils.writeRawVInt32(cout, kv.getKeyLength());
       StreamUtils.writeRawVInt32(cout, kv.getValueLength());
       // To support tags
-      StreamUtils.writeRawVInt32(cout, kv.getTagsLength());
+      StreamUtils.writeRawVInt32(cout, kv.getTagsLengthUnsigned());
 
       // Write row, qualifier, and family
       StreamUtils.writeRawVInt32(cout, kv.getRowLength());

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java
index 100e226..70dc575 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java
@@ -170,7 +170,7 @@ public class WALCellCodec implements Codec {
       StreamUtils.writeRawVInt32(out, kv.getKeyLength());
       StreamUtils.writeRawVInt32(out, kv.getValueLength());
       // To support tags
-      short tagsLength = kv.getTagsLength();
+      int tagsLength = kv.getTagsLengthUnsigned();
       StreamUtils.writeRawVInt32(out, tagsLength);
 
       // Write row, qualifier, and family; use dictionary
@@ -227,7 +227,7 @@ public class WALCellCodec implements Codec {
       int keylength = StreamUtils.readRawVarint32(in);
       int vlength = StreamUtils.readRawVarint32(in);
       
-      short tagsLength = (short) StreamUtils.readRawVarint32(in);
+      int tagsLength = StreamUtils.readRawVarint32(in);
       int length = 0;
       if(tagsLength == 0) {
         length = KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE + keylength + vlength;
@@ -266,7 +266,7 @@ public class WALCellCodec implements Codec {
 
       // tags
       if (tagsLength > 0) {
-        pos = Bytes.putShort(backingArray, pos, tagsLength);
+        pos = Bytes.putAsShort(backingArray, pos, tagsLength);
         if (compression.tagCompressionContext != null) {
           compression.tagCompressionContext.uncompressTags(in, backingArray, pos, tagsLength);
         } else {

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
index da99244..f7af40f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
@@ -683,7 +683,7 @@ public class AccessControlLists {
        throws IOException {
      List<Permission> results = Lists.newArrayList();
      Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
-        cell.getTagsLength());
+        cell.getTagsLengthUnsigned());
      while (tagsIterator.hasNext()) {
        Tag tag = tagsIterator.next();
        if (tag.getType() == ACL_TAG_TYPE) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 21f8892..0a12d7e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -759,7 +759,7 @@ public class AccessController extends BaseRegionObserver
       for (Cell cell: e.getValue()) {
         List<Tag> tags = Lists.newArrayList(new Tag(AccessControlLists.ACL_TAG_TYPE, perms));
         Iterator<Tag> tagIterator = CellUtil.tagsIterator(cell.getTagsArray(),
-            cell.getTagsOffset(), cell.getTagsLength());
+            cell.getTagsOffset(), cell.getTagsLengthUnsigned());
         while (tagIterator.hasNext()) {
           tags.add(tagIterator.next());
         }
@@ -793,9 +793,9 @@ public class AccessController extends BaseRegionObserver
     }
     for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) {
       Cell cell = cellScanner.current();
-      if (cell.getTagsLength() > 0) {
+      if (cell.getTagsLengthUnsigned() > 0) {
         Iterator<Tag> tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
-          cell.getTagsLength());
+          cell.getTagsLengthUnsigned());
         while (tagsItr.hasNext()) {
           if (tagsItr.next().getType() == AccessControlLists.ACL_TAG_TYPE) {
             throw new AccessDeniedException("Mutation contains cell with reserved type tag");
@@ -1828,7 +1828,7 @@ public class AccessController extends BaseRegionObserver
     ListMultimap<String,Permission> perms = ArrayListMultimap.create();
     if (oldCell != null) {
       Iterator<Tag> tagIterator = CellUtil.tagsIterator(oldCell.getTagsArray(),
-          oldCell.getTagsOffset(), oldCell.getTagsLength());
+          oldCell.getTagsOffset(), oldCell.getTagsLengthUnsigned());
       while (tagIterator.hasNext()) {
         Tag tag = tagIterator.next();
         if (tag.getType() != AccessControlLists.ACL_TAG_TYPE) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
index d9e5117..98a4b6c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
@@ -700,7 +700,7 @@ public class VisibilityController extends BaseRegionObserver implements MasterOb
               for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) {
                 Cell cell = cellScanner.current();
                 List<Tag> tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(),
-                    cell.getTagsLength());
+                    cell.getTagsLengthUnsigned());
                 tags.addAll(visibilityTags);
                 Cell updatedCell = new KeyValue(cell.getRowArray(), cell.getRowOffset(),
                     cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(),
@@ -867,9 +867,9 @@ public class VisibilityController extends BaseRegionObserver implements MasterOb
     if (isSystemOrSuperUser()) {
       return true;
     }
-    if (cell.getTagsLength() > 0) {
+    if (cell.getTagsLengthUnsigned() > 0) {
       Iterator<Tag> tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
-          cell.getTagsLength());
+          cell.getTagsLengthUnsigned());
       while (tagsItr.hasNext()) {
         if (reservedVisTagTypes.contains(tagsItr.next().getType())) {
           return false;
@@ -1201,7 +1201,7 @@ public class VisibilityController extends BaseRegionObserver implements MasterOb
     }
     // Adding all other tags
     Iterator<Tag> tagsItr = CellUtil.tagsIterator(newCell.getTagsArray(), newCell.getTagsOffset(),
-        newCell.getTagsLength());
+        newCell.getTagsLengthUnsigned());
     while (tagsItr.hasNext()) {
       Tag tag = tagsItr.next();
       if (tag.getType() != VisibilityUtils.VISIBILITY_TAG_TYPE) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java
index 0ff8d67..aa89a35 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java
@@ -81,7 +81,7 @@ class VisibilityLabelFilter extends FilterBase {
     }
 
     Iterator<Tag> tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
-        cell.getTagsLength());
+        cell.getTagsLengthUnsigned());
     boolean visibilityTagPresent = false;
     while (tagsItr.hasNext()) {
       boolean includeKV = true;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
index c151b74..89b49c0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
@@ -108,7 +108,7 @@ public class VisibilityScanDeleteTracker extends ScanDeleteTracker {
 
   private void extractDeleteTags(Cell delCell, Type type) {
     // If tag is present in the delete
-    if (delCell.getTagsLength() > 0) {
+    if (delCell.getTagsLengthUnsigned() > 0) {
       switch (type) {
         case DeleteFamily:
           List<Tag> delTags = new ArrayList<Tag>();

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
index 37976df..8a18922 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
@@ -177,11 +177,11 @@ public class VisibilityUtils {
   public static boolean getVisibilityTags(Cell cell, List<Tag> tags) {
     boolean sortedOrder = false;
     Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
-        cell.getTagsLength());
+        cell.getTagsLengthUnsigned());
     while (tagsIterator.hasNext()) {
       Tag tag = tagsIterator.next();
-      if(tag.getType() == VisibilityUtils.VISIBILITY_EXP_SERIALIZATION_TAG_TYPE) {
-        int serializationVersion = Bytes.toShort(tag.getValue());
+      if (tag.getType() == VisibilityUtils.VISIBILITY_EXP_SERIALIZATION_TAG_TYPE) {
+        int serializationVersion = Bytes.toShort(tag.getBuffer());
         if (serializationVersion == VisibilityConstants.VISIBILITY_SERIALIZATION_VERSION) {
           sortedOrder = true;
           continue;
@@ -201,7 +201,7 @@ public class VisibilityUtils {
    */
   public static boolean isVisibilityTagsPresent(Cell cell) {
     Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
-        cell.getTagsLength());
+        cell.getTagsLengthUnsigned());
     while (tagsIterator.hasNext()) {
       Tag tag = tagsIterator.next();
       if (tag.getType() == VisibilityUtils.VISIBILITY_TAG_TYPE) {
@@ -271,7 +271,7 @@ public class VisibilityUtils {
 
   private static List<List<Integer>> sortTagsBasedOnOrdinal(Cell cell) throws IOException {
     Iterator<Tag> tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
-        cell.getTagsLength());
+        cell.getTagsLengthUnsigned());
     List<List<Integer>> fullTagsList = new ArrayList<List<Integer>>();
     while (tagsItr.hasNext()) {
       Tag tag = tagsItr.next();

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
index 8ef3261..e4733d6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
@@ -162,9 +162,9 @@ public class TestPrefixTreeEncoding {
         fail("Current kv " + currentKV + " is smaller than previous keyvalue " + previousKV);
       }
       if (!includesTag) {
-        assertFalse(currentKV.getTagsLength() > 0);
+        assertFalse(currentKV.getTagsLengthUnsigned() > 0);
       } else {
-        Assert.assertTrue(currentKV.getTagsLength() > 0);
+        Assert.assertTrue(currentKV.getTagsLengthUnsigned() > 0);
       }
       previousKV = currentKV;
     } while (seeker.next());
@@ -279,9 +279,9 @@ public class TestPrefixTreeEncoding {
       userDataStream.write(kv.getBuffer(), kv.getKeyOffset(), kv.getKeyLength());
       userDataStream.write(kv.getBuffer(), kv.getValueOffset(), kv.getValueLength());
       if (useTags) {
-        userDataStream.writeShort(kv.getTagsLength());
+        userDataStream.writeShort(kv.getTagsLengthUnsigned());
         userDataStream.write(kv.getBuffer(), kv.getValueOffset() + kv.getValueLength()
-            + Bytes.SIZEOF_SHORT, kv.getTagsLength());
+            + Bytes.SIZEOF_SHORT, kv.getTagsLengthUnsigned());
       }
     }
     return ByteBuffer.wrap(baosInMemory.toByteArray());
@@ -316,9 +316,9 @@ public class TestPrefixTreeEncoding {
       userDataStream.write(kv.getBuffer(), kv.getKeyOffset(), kv.getKeyLength());
       userDataStream.write(kv.getBuffer(), kv.getValueOffset(), kv.getValueLength());
       if (useTags) {
-        userDataStream.writeShort(kv.getTagsLength());
+        userDataStream.writeShort(kv.getTagsLengthUnsigned());
         userDataStream.write(kv.getBuffer(), kv.getValueOffset() + kv.getValueLength()
-            + Bytes.SIZEOF_SHORT, kv.getTagsLength());
+            + Bytes.SIZEOF_SHORT, kv.getTagsLengthUnsigned());
       }
     }
     return ByteBuffer.wrap(baosInMemory.toByteArray());

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
index 8c8ee76..59deb49 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
@@ -189,8 +189,8 @@ public class TestHFileBlock {
       // always write the taglength
       totalSize += kv.getLength();
       if (useTag) {
-        dataOutputStream.writeShort(kv.getTagsLength());
-        dataOutputStream.write(kv.getBuffer(), kv.getTagsOffset(), kv.getTagsLength());
+        dataOutputStream.writeShort(kv.getTagsLengthUnsigned());
+        dataOutputStream.write(kv.getBuffer(), kv.getTagsOffset(), kv.getTagsLengthUnsigned());
       }
       if (includesMemstoreTS) {
         long memstoreTS = randomizer.nextLong();

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
index c0f8751..2fc701a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java
@@ -237,7 +237,7 @@ public class TestHFileWriterV3 {
         buf.get(value);
         byte[] tagValue = null;
         if (useTags) {
-          int tagLen = buf.getShort();
+          int tagLen = ((buf.get() & 0xff) << 8) ^ (buf.get() & 0xff);
           tagValue = new byte[tagLen];
           buf.get(tagValue);
         }
@@ -257,9 +257,9 @@ public class TestHFileWriterV3 {
         if (useTags) {
           assertNotNull(tagValue);
           KeyValue tkv =  keyValues.get(entriesRead);
-          assertEquals(tagValue.length, tkv.getTagsLength());
-          assertTrue(Bytes.compareTo(tagValue, 0, tagValue.length, tkv.getBuffer(),
-              tkv.getTagsOffset(), tkv.getTagsLength()) == 0);
+          assertEquals(tagValue.length, tkv.getTagsLengthUnsigned());
+          assertTrue(Bytes.compareTo(tagValue, 0, tagValue.length, tkv.getTagsArray(),
+              tkv.getTagsOffset(), tkv.getTagsLengthUnsigned()) == 0);
         }
         ++entriesRead;
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
index 5a9c3b6..ead913a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
@@ -208,7 +208,7 @@ public class DataBlockEncodingTool {
     }
 
     rawKVs = uncompressedOutputStream.toByteArray();
-    boolean useTag = (currentKV.getTagsLength() > 0);
+    boolean useTag = (currentKV.getTagsLengthUnsigned() > 0);
     for (DataBlockEncoding encoding : encodings) {
       if (encoding == DataBlockEncoding.NONE) {
         continue;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
index fa86df7..3de6ed3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
@@ -27,6 +27,7 @@ import java.util.List;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
@@ -63,7 +64,7 @@ import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
 
 /**
- *  Class that test tags
+ * Class that test tags
  */
 @Category(MediumTests.class)
 public class TestTags {
@@ -123,9 +124,9 @@ public class TestTags {
       table.put(put);
       admin.flush(tableName.getName());
       List<HRegion> regions = TEST_UTIL.getHBaseCluster().getRegions(tableName.getName());
-      for(HRegion region : regions) {
+      for (HRegion region : regions) {
         Store store = region.getStore(fam);
-        while(!(store.getStorefilesCount() > 0)) {
+        while (!(store.getStorefilesCount() > 0)) {
           Thread.sleep(10);
         }
       }
@@ -137,9 +138,9 @@ public class TestTags {
       table.put(put1);
       admin.flush(tableName.getName());
       regions = TEST_UTIL.getHBaseCluster().getRegions(tableName.getName());
-      for(HRegion region : regions) {
+      for (HRegion region : regions) {
         Store store = region.getStore(fam);
-        while(!(store.getStorefilesCount() > 1)) {
+        while (!(store.getStorefilesCount() > 1)) {
           Thread.sleep(10);
         }
       }
@@ -152,15 +153,15 @@ public class TestTags {
 
       admin.flush(tableName.getName());
       regions = TEST_UTIL.getHBaseCluster().getRegions(tableName.getName());
-      for(HRegion region : regions) {
+      for (HRegion region : regions) {
         Store store = region.getStore(fam);
-        while(!(store.getStorefilesCount() > 2)) {
+        while (!(store.getStorefilesCount() > 2)) {
           Thread.sleep(10);
         }
       }
       result(fam, row, qual, row2, table, value, value2, row1, value1);
       admin.compact(tableName.getName());
-      while(admin.getCompactionState(tableName.getName()) != CompactionState.NONE) {
+      while (admin.getCompactionState(tableName.getName()) != CompactionState.NONE) {
         Thread.sleep(10);
       }
       result(fam, row, qual, row2, table, value, value2, row1, value1);
@@ -201,9 +202,9 @@ public class TestTags {
       table.put(put);
       admin.flush(tableName.getName());
       List<HRegion> regions = TEST_UTIL.getHBaseCluster().getRegions(tableName.getName());
-      for(HRegion region : regions) {
+      for (HRegion region : regions) {
         Store store = region.getStore(fam);
-        while(!(store.getStorefilesCount() > 0)) {
+        while (!(store.getStorefilesCount() > 0)) {
           Thread.sleep(10);
         }
       }
@@ -214,9 +215,9 @@ public class TestTags {
       table.put(put1);
       admin.flush(tableName.getName());
       regions = TEST_UTIL.getHBaseCluster().getRegions(tableName.getName());
-      for(HRegion region : regions) {
+      for (HRegion region : regions) {
         Store store = region.getStore(fam);
-        while(!(store.getStorefilesCount() > 1)) {
+        while (!(store.getStorefilesCount() > 1)) {
           Thread.sleep(10);
         }
       }
@@ -228,9 +229,9 @@ public class TestTags {
 
       admin.flush(tableName.getName());
       regions = TEST_UTIL.getHBaseCluster().getRegions(tableName.getName());
-      for(HRegion region : regions) {
+      for (HRegion region : regions) {
         Store store = region.getStore(fam);
-        while(!(store.getStorefilesCount() > 2)) {
+        while (!(store.getStorefilesCount() > 2)) {
           Thread.sleep(10);
         }
       }
@@ -240,7 +241,7 @@ public class TestTags {
         Result[] next = scanner.next(3);
         for (Result result : next) {
           CellScanner cellScanner = result.cellScanner();
-          boolean advance = cellScanner.advance();
+          cellScanner.advance();
           KeyValue current = (KeyValue) cellScanner.current();
           assertTrue(current.getValueOffset() + current.getValueLength() == current.getLength());
         }
@@ -249,7 +250,7 @@ public class TestTags {
           scanner.close();
       }
       admin.compact(tableName.getName());
-      while(admin.getCompactionState(tableName.getName()) != CompactionState.NONE) {
+      while (admin.getCompactionState(tableName.getName()) != CompactionState.NONE) {
         Thread.sleep(10);
       }
       s = new Scan(row);
@@ -258,7 +259,7 @@ public class TestTags {
         Result[] next = scanner.next(3);
         for (Result result : next) {
           CellScanner cellScanner = result.cellScanner();
-          boolean advance = cellScanner.advance();
+          cellScanner.advance();
           KeyValue current = (KeyValue) cellScanner.current();
           assertTrue(current.getValueOffset() + current.getValueLength() == current.getLength());
         }
@@ -273,6 +274,7 @@ public class TestTags {
       }
     }
   }
+
   @Test
   public void testFlushAndCompactionwithCombinations() throws Exception {
     HTable table = null;
@@ -302,7 +304,8 @@ public class TestTags {
       Put put = new Put(row);
       byte[] value = Bytes.toBytes("value");
       put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value);
-      put.setAttribute("visibility", Bytes.toBytes("ram"));
+      int bigTagLen = Short.MAX_VALUE + 5;
+      put.setAttribute("visibility", new byte[bigTagLen]);
       table.put(put);
       Put put1 = new Put(row1);
       byte[] value1 = Bytes.toBytes("1000dfsdf");
@@ -310,9 +313,9 @@ public class TestTags {
       table.put(put1);
       admin.flush(tableName.getName());
       List<HRegion> regions = TEST_UTIL.getHBaseCluster().getRegions(tableName.getName());
-      for(HRegion region : regions) {
+      for (HRegion region : regions) {
         Store store = region.getStore(fam);
-        while(!(store.getStorefilesCount() > 0)) {
+        while (!(store.getStorefilesCount() > 0)) {
           Thread.sleep(10);
         }
       }
@@ -323,9 +326,9 @@ public class TestTags {
       table.put(put1);
       admin.flush(tableName.getName());
       regions = TEST_UTIL.getHBaseCluster().getRegions(tableName.getName());
-      for(HRegion region : regions) {
+      for (HRegion region : regions) {
         Store store = region.getStore(fam);
-        while(!(store.getStorefilesCount() > 1)) {
+        while (!(store.getStorefilesCount() > 1)) {
           Thread.sleep(10);
         }
       }
@@ -340,57 +343,60 @@ public class TestTags {
       table.put(put2);
       admin.flush(tableName.getName());
       regions = TEST_UTIL.getHBaseCluster().getRegions(tableName.getName());
-      for(HRegion region : regions) {
+      for (HRegion region : regions) {
         Store store = region.getStore(fam);
-        while(!(store.getStorefilesCount() > 2)) {
+        while (!(store.getStorefilesCount() > 2)) {
           Thread.sleep(10);
         }
       }
+      TestCoprocessorForTags.checkTagPresence = true;
       Scan s = new Scan(row);
+      s.setCaching(1);
       ResultScanner scanner = table.getScanner(s);
       try {
-        Result[] next = scanner.next(5);
-        for (Result result : next) {
-          CellScanner cellScanner = result.cellScanner();
-          boolean advance = cellScanner.advance();
+        Result next = null;
+        while ((next = scanner.next()) != null) {
+          CellScanner cellScanner = next.cellScanner();
+          cellScanner.advance();
           KeyValue current = (KeyValue) cellScanner.current();
-          // System.out.println(current);
-          int tagsLength = current.getTagsLength();
-          if (tagsLength == 0) {
-            assertTrue(current.getValueOffset() + current.getValueLength() == current.getLength());
+          if (CellUtil.matchingRow(current, row)) {
+            assertEquals(1, TestCoprocessorForTags.tags.size());
+            Tag tag = TestCoprocessorForTags.tags.get(0);
+            assertEquals(bigTagLen, tag.getTagLength());
           } else {
-            // even if taglength is going to be > 0 the byte array would be same
-            assertTrue(current.getValueOffset() + current.getValueLength() != current.getLength());
+            assertEquals(0, TestCoprocessorForTags.tags.size());
           }
         }
       } finally {
         if (scanner != null) {
           scanner.close();
         }
+        TestCoprocessorForTags.checkTagPresence = false;
       }
-      while(admin.getCompactionState(tableName.getName()) != CompactionState.NONE) {
+      while (admin.getCompactionState(tableName.getName()) != CompactionState.NONE) {
         Thread.sleep(10);
       }
-      s = new Scan(row);
+      TestCoprocessorForTags.checkTagPresence = true;
       scanner = table.getScanner(s);
       try {
-        Result[] next = scanner.next(5);
-        for (Result result : next) {
-          CellScanner cellScanner = result.cellScanner();
-          boolean advance = cellScanner.advance();
+        Result next = null;
+        while ((next = scanner.next()) != null) {
+          CellScanner cellScanner = next.cellScanner();
+          cellScanner.advance();
           KeyValue current = (KeyValue) cellScanner.current();
-          // System.out.println(current);
-          if (current.getTagsLength() == 0) {
-            assertTrue(current.getValueOffset() + current.getValueLength() == current.getLength());
+          if (CellUtil.matchingRow(current, row)) {
+            assertEquals(1, TestCoprocessorForTags.tags.size());
+            Tag tag = TestCoprocessorForTags.tags.get(0);
+            assertEquals(bigTagLen, tag.getTagLength());
           } else {
-            // even if taglength is going to be > 0 the byte array would be same
-            assertTrue(current.getValueOffset() + current.getValueLength() != current.getLength());
+            assertEquals(0, TestCoprocessorForTags.tags.size());
           }
         }
       } finally {
         if (scanner != null) {
           scanner.close();
         }
+        TestCoprocessorForTags.checkTagPresence = false;
       }
     } finally {
       if (table != null) {
@@ -544,9 +550,6 @@ public class TestTags {
     try {
       scanner = table.getScanner(s);
       Result next = scanner.next();
-      CellScanner cellScanner = next.cellScanner();
-      boolean advance = cellScanner.advance();
-      KeyValue current = (KeyValue) cellScanner.current();
 
       assertTrue(Bytes.equals(next.getRow(), row));
       assertTrue(Bytes.equals(next.getValue(fam, qual), value));
@@ -630,7 +633,8 @@ public class TestTags {
           CellScanner cellScanner = result.cellScanner();
           if (cellScanner.advance()) {
             Cell cell = cellScanner.current();
-            tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
+            tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(),
+                cell.getTagsLengthUnsigned());
           }
         }
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
index 491a9db..f99ee58 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
@@ -240,7 +240,8 @@ public class TestReplicationWithTags {
         // Check tag presence in the 1st cell in 1st Result
         if (!results.isEmpty()) {
           Cell cell = results.get(0);
-          tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
+          tags = Tag
+              .asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLengthUnsigned());
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/f2d16e0a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
----------------------------------------------------------------------
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
index 5ca2415..19a1dd9 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
@@ -161,7 +161,7 @@ public class ThriftUtilities {
       col.setQualifier(CellUtil.cloneQualifier(kv));
       col.setTimestamp(kv.getTimestamp());
       col.setValue(CellUtil.cloneValue(kv));
-      if (kv.getTagsLength() > 0) {
+      if (kv.getTagsLengthUnsigned() > 0) {
         col.setTags(CellUtil.getTagArray(kv));
       }
       columnValues.add(col);


Mime
View raw message