hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From te...@apache.org
Subject svn commit: r1443289 [1/6] - in /hbase/trunk: ./ hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/ hbase-common/src/main/java/org/apache/hadoop/hbase/util/ hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/ hbase-common/src/mai...
Date Thu, 07 Feb 2013 00:36:27 GMT
Author: tedyu
Date: Thu Feb  7 00:36:24 2013
New Revision: 1443289

URL: http://svn.apache.org/viewvc?rev=1443289&view=rev
Log:
HBASE-4676 Prefix Compression - Trie data block encoding (Matt Corgan)


Added:
    hbase/trunk/hbase-prefix-tree/
    hbase/trunk/hbase-prefix-tree/pom.xml
    hbase/trunk/hbase-prefix-tree/src/
    hbase/trunk/hbase-prefix-tree/src/main/
    hbase/trunk/hbase-prefix-tree/src/main/java/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeBlockMeta.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeCodec.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeSeeker.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/ArraySearcherPool.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/DecoderFactory.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/PrefixTreeArrayReversibleScanner.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/PrefixTreeCell.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/column/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/column/ColumnNodeReader.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/column/ColumnReader.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/row/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/row/RowNodeReader.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/timestamp/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/timestamp/MvccVersionDecoder.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/timestamp/TimestampDecoder.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/EncoderFactory.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/EncoderPool.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/PrefixTreeEncoder.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/ThreadLocalEncoderPool.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/column/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/column/ColumnNodeWriter.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/column/ColumnSectionWriter.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/other/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/other/CellTypeEncoder.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/other/LongEncoder.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/row/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/row/RowNodeWriter.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/row/RowSectionWriter.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/tokenize/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/tokenize/TokenDepthComparator.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/tokenize/Tokenizer.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/tokenize/TokenizerRowSearchPosition.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/encode/tokenize/TokenizerRowSearchResult.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/scanner/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/scanner/CellScanner.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/scanner/CellSearcher.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/scanner/ReversibleCellScanner.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/util/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/util/byterange/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/util/byterange/ByteRangeSet.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/util/byterange/impl/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/util/byterange/impl/ByteRangeHashSet.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/util/byterange/impl/ByteRangeTreeSet.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/util/vint/
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/util/vint/UFIntTool.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/util/vint/UVIntTool.java
    hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/util/vint/UVLongTool.java
    hbase/trunk/hbase-prefix-tree/src/test/
    hbase/trunk/hbase-prefix-tree/src/test/java/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/keyvalue/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/keyvalue/TestKeyValueTool.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/PrefixTreeTestConstants.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/blockmeta/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/blockmeta/TestBlockMeta.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/builder/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/builder/TestTokenizer.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/builder/TestTokenizerData.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/builder/TestTreeDepth.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/builder/data/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/builder/data/TestTokenizerDataBasic.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/builder/data/TestTokenizerDataEdgeCase.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/column/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/column/TestColumnBuilder.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/column/TestColumnData.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/column/data/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/column/data/TestColumnDataRandom.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/column/data/TestColumnDataSimple.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/BaseTestRowData.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/TestPrefixTreeSearcher.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/TestRowData.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/TestRowEncoder.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataComplexQualifiers.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataDeeper.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataDifferentTimestamps.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataEmpty.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataExerciseFInts.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataMultiFamilies.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataNub.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataNumberStrings.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataQualifierByteOrdering.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValues.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataSimple.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataSingleQualifier.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataTrivial.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataUrls.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/row/data/TestRowDataUrlsExample.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/timestamp/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/timestamp/TestTimestampData.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/timestamp/TestTimestampEncoder.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/timestamp/data/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/timestamp/data/TestTimestampDataBasic.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/timestamp/data/TestTimestampDataNumbers.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/codec/prefixtree/timestamp/data/TestTimestampDataRepeats.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/util/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/util/bytes/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/util/bytes/TestByteRange.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/util/comparator/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/util/comparator/ByteArrayComparator.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/util/number/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/util/number/NumberFormatter.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/util/number/RandomNumberUtils.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/util/vint/
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/util/vint/TestFIntTool.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/util/vint/TestVIntTool.java
    hbase/trunk/hbase-prefix-tree/src/test/java/org/apache/hbase/util/vint/TestVLongTool.java
Modified:
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeTool.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hbase/cell/CellComparator.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hbase/cell/CellOutputStream.java
    hbase/trunk/hbase-server/pom.xml
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
    hbase/trunk/pom.xml

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java?rev=1443289&r1=1443288&r2=1443289&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java Thu Feb  7 00:36:24 2013
@@ -38,7 +38,10 @@ public enum DataBlockEncoding {
   // id 1 is reserved for the BITSET algorithm to be added later
   PREFIX(2, createEncoder("org.apache.hadoop.hbase.io.encoding.PrefixKeyDeltaEncoder")),
   DIFF(3, createEncoder("org.apache.hadoop.hbase.io.encoding.DiffKeyDeltaEncoder")),
-  FAST_DIFF(4, createEncoder("org.apache.hadoop.hbase.io.encoding.FastDiffDeltaEncoder"));
+  FAST_DIFF(4, createEncoder("org.apache.hadoop.hbase.io.encoding.FastDiffDeltaEncoder")),
+  // id 5 is reserved for the COPY_KEY algorithm for benchmarking
+  // COPY_KEY(5, createEncoder("org.apache.hadoop.hbase.io.encoding.CopyKeyDataBlockEncoder")),
+  PREFIX_TREE(6, createEncoder("org.apache.hbase.codec.prefixtree.PrefixTreeCodec"));
 
   private final short id;
   private final byte[] idInBytes;

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeTool.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeTool.java?rev=1443289&r1=1443288&r2=1443289&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeTool.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeTool.java Thu Feb  7 00:36:24 2013
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hbase.util;
 
+import java.io.IOException;
+import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.Collection;
 
@@ -50,4 +52,14 @@ public class ByteRangeTool {
     return ranges;
   }
 
+  public static void write(OutputStream os, ByteRange byteRange) throws IOException {
+    os.write(byteRange.getBytes(), byteRange.getOffset(), byteRange.getLength());
+  }
+
+  public static void write(OutputStream os, ByteRange byteRange, int byteRangeInnerOffset)
+      throws IOException {
+    os.write(byteRange.getBytes(), byteRange.getOffset() + byteRangeInnerOffset,
+      byteRange.getLength() - byteRangeInnerOffset);
+  }
+
 }

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java?rev=1443289&r1=1443288&r2=1443289&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java Thu Feb  7 00:36:24 2013
@@ -27,8 +27,10 @@ import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
 import java.security.AccessController;
 import java.security.PrivilegedAction;
+import java.util.Collection;
 import java.util.Comparator;
 import java.util.Iterator;
+import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -43,6 +45,7 @@ import org.apache.hadoop.io.WritableUtil
 import sun.misc.Unsafe;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.Lists;
 
 /**
  * Utility class that handles byte arrays, conversions to/from other types,
@@ -1718,4 +1721,44 @@ public class Bytes {
     return out;
   }
 
+  public static boolean equals(List<byte[]> a, List<byte[]> b) {
+    if (a == null) {
+      if (b == null) {
+        return true;
+      }
+      return false;
+    }
+    if (b == null) {
+      return false;
+    }
+    if (a.size() != b.size()) {
+      return false;
+    }
+    for (int i = 0; i < a.size(); ++i) {
+      if (!Bytes.equals(a.get(i), b.get(i))) {
+        return false;
+      }
+    }
+    return true;
+  }
+
+  public static boolean isSorted(Collection<byte[]> arrays) {
+    byte[] previous = new byte[0];
+    for (byte[] array : IterableUtils.nullSafe(arrays)) {
+      if (Bytes.compareTo(previous, array) > 0) {
+        return false;
+      }
+      previous = array;
+    }
+    return true;
+  }
+
+  public static List<byte[]> getUtf8ByteArrays(List<String> strings) {
+    List<byte[]> byteArrays = Lists.newArrayListWithCapacity(CollectionUtils.nullSafeSize(strings));
+    for (String s : IterableUtils.nullSafe(strings)) {
+      byteArrays.add(Bytes.toBytes(s));
+    }
+    return byteArrays;
+  }
+
 }

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java?rev=1443289&r1=1443288&r2=1443289&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java Thu Feb  7 00:36:24 2013
@@ -28,12 +28,15 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.util.ByteBufferUtils;
 import org.apache.hadoop.io.WritableUtils;
 
+import com.google.common.primitives.Bytes;
+
 /**
  * Generate list of key values which are very useful to test data block encoding
  * and compression.
  */
 public class RedundantKVGenerator {
   // row settings
+  static byte[] DEFAULT_COMMON_PREFIX = new byte[0];
   static int DEFAULT_NUMBER_OF_ROW_PREFIXES = 10;
   static int DEFAULT_AVERAGE_PREFIX_LENGTH = 6;
   static int DEFAULT_PREFIX_LENGTH_VARIANCE = 3;
@@ -107,6 +110,7 @@ public class RedundantKVGenerator {
       ) {
     this.randomizer = randomizer;
 
+    this.commonPrefix = DEFAULT_COMMON_PREFIX;
     this.numberOfRowPrefixes = numberOfRowPrefixes;
     this.averagePrefixLength = averagePrefixLength;
     this.prefixLengthVariance = prefixLengthVariance;
@@ -115,7 +119,7 @@ public class RedundantKVGenerator {
     this.numberOfRows = numberOfRows;
 
     this.chanceForSameQualifier = chanceForSameQualifier;
-    this.chanceForSimiliarQualifier = chanceForSimiliarQualifier;
+    this.chanceForSimilarQualifier = chanceForSimiliarQualifier;
     this.averageQualifierLength = averageQualifierLength;
     this.qualifierLengthVariance = qualifierLengthVariance;
 
@@ -131,6 +135,7 @@ public class RedundantKVGenerator {
   private Random randomizer;
 
   // row settings
+  private byte[] commonPrefix;//global prefix before rowPrefixes
   private int numberOfRowPrefixes;
   private int averagePrefixLength = 6;
   private int prefixLengthVariance = 3;
@@ -138,9 +143,12 @@ public class RedundantKVGenerator {
   private int suffixLengthVariance = 3;
   private int numberOfRows = 500;
 
+  //family
+  private byte[] family;
+
   // qualifier
   private float chanceForSameQualifier = 0.5f;
-  private float chanceForSimiliarQualifier = 0.4f;
+  private float chanceForSimilarQualifier = 0.4f;
   private int averageQualifierLength = 9;
   private int qualifierLengthVariance = 3;
 
@@ -161,7 +169,8 @@ public class RedundantKVGenerator {
           prefixLengthVariance;
       byte[] newPrefix = new byte[prefixLength];
       randomizer.nextBytes(newPrefix);
-      prefixes.add(newPrefix);
+      byte[] newPrefixWithCommon = newPrefix;
+      prefixes.add(newPrefixWithCommon);
     }
 
     // generate rest of the row
@@ -173,7 +182,8 @@ public class RedundantKVGenerator {
       int randomPrefix = randomizer.nextInt(prefixes.size());
       byte[] row = new byte[prefixes.get(randomPrefix).length +
                             suffixLength];
-      rows.add(row);
+      byte[] rowWithCommonPrefix = Bytes.concat(commonPrefix, row);
+      rows.add(rowWithCommonPrefix);
     }
 
     return rows;
@@ -188,20 +198,22 @@ public class RedundantKVGenerator {
     List<KeyValue> result = new ArrayList<KeyValue>();
 
     List<byte[]> rows = generateRows();
-    Map<Integer, List<byte[]>> rowsToQualifier =
-        new HashMap<Integer, List<byte[]>>();
+    Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<Integer, List<byte[]>>();
 
-    byte[] family = new byte[columnFamilyLength];
-    randomizer.nextBytes(family);
+    if(family==null){
+      family = new byte[columnFamilyLength];
+      randomizer.nextBytes(family);
+    }
 
-    long baseTimestamp = Math.abs(randomizer.nextLong()) /
-        baseTimestampDivide;
+    long baseTimestamp = Math.abs(randomizer.nextLong()) / baseTimestampDivide;
 
     byte[] value = new byte[valueLength];
 
     for (int i = 0; i < howMany; ++i) {
-      long timestamp = baseTimestamp + randomizer.nextInt(
-          timestampDiffSize);
+      long timestamp = baseTimestamp;
+      if(timestampDiffSize > 0){
+        timestamp += randomizer.nextInt(timestampDiffSize);
+      }
       Integer rowId = randomizer.nextInt(rows.size());
       byte[] row = rows.get(rowId);
 
@@ -209,13 +221,11 @@ public class RedundantKVGenerator {
       // occasionally completely different
       byte[] qualifier;
       float qualifierChance = randomizer.nextFloat();
-      if (!rowsToQualifier.containsKey(rowId) ||
-          qualifierChance > chanceForSameQualifier +
-          chanceForSimiliarQualifier) {
+      if (!rowsToQualifier.containsKey(rowId)
+          || qualifierChance > chanceForSameQualifier + chanceForSimilarQualifier) {
         int qualifierLength = averageQualifierLength;
-        qualifierLength +=
-            randomizer.nextInt(2 * qualifierLengthVariance + 1) -
-            qualifierLengthVariance;
+        qualifierLength += randomizer.nextInt(2 * qualifierLengthVariance + 1)
+            - qualifierLengthVariance;
         qualifier = new byte[qualifierLength];
         randomizer.nextBytes(qualifier);
 
@@ -227,8 +237,8 @@ public class RedundantKVGenerator {
       } else if (qualifierChance > chanceForSameQualifier) {
         // similar qualifier
         List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
-        byte[] originalQualifier = previousQualifiers.get(
-            randomizer.nextInt(previousQualifiers.size()));
+        byte[] originalQualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers
+            .size()));
 
         qualifier = new byte[originalQualifier.length];
         int commonPrefix = randomizer.nextInt(qualifier.length);
@@ -241,8 +251,7 @@ public class RedundantKVGenerator {
       } else {
         // same qualifier
         List<byte[]> previousQualifiers = rowsToQualifier.get(rowId);
-        qualifier = previousQualifiers.get(
-            randomizer.nextInt(previousQualifiers.size()));
+        qualifier = previousQualifiers.get(randomizer.nextInt(previousQualifiers.size()));
       }
 
       if (randomizer.nextFloat() < chanceForZeroValue) {
@@ -286,5 +295,99 @@ public class RedundantKVGenerator {
 
     return result;
   }
+  
+  
+  /************************ get/set ***********************************/
+  
+  public RedundantKVGenerator setCommonPrefix(byte[] prefix){
+    this.commonPrefix = prefix;
+    return this;
+  }
+
+  public RedundantKVGenerator setRandomizer(Random randomizer) {
+    this.randomizer = randomizer;
+    return this;
+  }
+
+  public RedundantKVGenerator setNumberOfRowPrefixes(int numberOfRowPrefixes) {
+    this.numberOfRowPrefixes = numberOfRowPrefixes;
+    return this;
+  }
+
+  public RedundantKVGenerator setAveragePrefixLength(int averagePrefixLength) {
+    this.averagePrefixLength = averagePrefixLength;
+    return this;
+  }
 
+  public RedundantKVGenerator setPrefixLengthVariance(int prefixLengthVariance) {
+    this.prefixLengthVariance = prefixLengthVariance;
+    return this;
+  }
+
+  public RedundantKVGenerator setAverageSuffixLength(int averageSuffixLength) {
+    this.averageSuffixLength = averageSuffixLength;
+    return this;
+  }
+
+  public RedundantKVGenerator setSuffixLengthVariance(int suffixLengthVariance) {
+    this.suffixLengthVariance = suffixLengthVariance;
+    return this;
+  }
+
+  public RedundantKVGenerator setNumberOfRows(int numberOfRows) {
+    this.numberOfRows = numberOfRows;
+    return this;
+  }
+
+  public RedundantKVGenerator setChanceForSameQualifier(float chanceForSameQualifier) {
+    this.chanceForSameQualifier = chanceForSameQualifier;
+    return this;
+  }
+
+  public RedundantKVGenerator setChanceForSimilarQualifier(float chanceForSimiliarQualifier) {
+    this.chanceForSimilarQualifier = chanceForSimiliarQualifier;
+    return this;
+  }
+
+  public RedundantKVGenerator setAverageQualifierLength(int averageQualifierLength) {
+    this.averageQualifierLength = averageQualifierLength;
+    return this;
+  }
+
+  public RedundantKVGenerator setQualifierLengthVariance(int qualifierLengthVariance) {
+    this.qualifierLengthVariance = qualifierLengthVariance;
+    return this;
+  }
+
+  public RedundantKVGenerator setColumnFamilyLength(int columnFamilyLength) {
+    this.columnFamilyLength = columnFamilyLength;
+    return this;
+  }
+
+  public RedundantKVGenerator setFamily(byte[] family) {
+    this.family = family;
+    this.columnFamilyLength = family.length;
+    return this;
+  }
+
+  public RedundantKVGenerator setValueLength(int valueLength) {
+    this.valueLength = valueLength;
+    return this;
+  }
+
+  public RedundantKVGenerator setChanceForZeroValue(float chanceForZeroValue) {
+    this.chanceForZeroValue = chanceForZeroValue;
+    return this;
+  }
+
+  public RedundantKVGenerator setBaseTimestampDivide(int baseTimestampDivide) {
+    this.baseTimestampDivide = baseTimestampDivide;
+    return this;
+  }
+
+  public RedundantKVGenerator setTimestampDiffSize(int timestampDiffSize) {
+    this.timestampDiffSize = timestampDiffSize;
+    return this;
+  }
+  
 }

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hbase/cell/CellComparator.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hbase/cell/CellComparator.java?rev=1443289&r1=1443288&r2=1443289&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hbase/cell/CellComparator.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hbase/cell/CellComparator.java Thu Feb  7 00:36:24 2013
@@ -79,22 +79,39 @@ public class CellComparator implements C
   /**************** equals ****************************/
 
   public static boolean equals(Cell a, Cell b){
-    if (!areKeyLengthsEqual(a, b)) {
-      return false;
-    }
-    //TODO compare byte[]'s in reverse since later bytes more likely to differ
-    return 0 == compareStatic(a, b);
+    return equalsRow(a, b)
+        && equalsFamily(a, b)
+        && equalsQualifier(a, b)
+        && equalsTimestamp(a, b)
+        && equalsType(a, b);
   }
 
   public static boolean equalsRow(Cell a, Cell b){
-    if(!areRowLengthsEqual(a, b)){
-      return false;
-    }
-    return 0 == Bytes.compareTo(
+    return Bytes.equals(
       a.getRowArray(), a.getRowOffset(), a.getRowLength(),
       b.getRowArray(), b.getRowOffset(), b.getRowLength());
   }
 
+  public static boolean equalsFamily(Cell a, Cell b){
+    return Bytes.equals(
+      a.getFamilyArray(), a.getFamilyOffset(), a.getFamilyLength(),
+      b.getFamilyArray(), b.getFamilyOffset(), b.getFamilyLength());
+  }
+
+  public static boolean equalsQualifier(Cell a, Cell b){
+    return Bytes.equals(
+      a.getQualifierArray(), a.getQualifierOffset(), a.getQualifierLength(),
+      b.getQualifierArray(), b.getQualifierOffset(), b.getQualifierLength());
+  }
+
+  public static boolean equalsTimestamp(Cell a, Cell b){
+    return a.getTimestamp() == b.getTimestamp();
+  }
+
+  public static boolean equalsType(Cell a, Cell b){
+    return a.getTypeByte() == b.getTypeByte();
+  }
+
 
   /********************* hashCode ************************/
 

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hbase/cell/CellOutputStream.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hbase/cell/CellOutputStream.java?rev=1443289&r1=1443288&r2=1443289&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hbase/cell/CellOutputStream.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hbase/cell/CellOutputStream.java Thu Feb  7 00:36:24 2013
@@ -18,6 +18,8 @@
 
 package org.apache.hbase.cell;
 
+import java.io.IOException;
+
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hbase.Cell;
@@ -45,6 +47,6 @@ public interface CellOutputStream {
    * that can then be read from the implementation to be sent to disk, put in the block cache, or
    * sent over the network.
    */
-  void flush();
+  void flush() throws IOException;
 
 }

Added: hbase/trunk/hbase-prefix-tree/pom.xml
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/pom.xml?rev=1443289&view=auto
==============================================================================
--- hbase/trunk/hbase-prefix-tree/pom.xml (added)
+++ hbase/trunk/hbase-prefix-tree/pom.xml Thu Feb  7 00:36:24 2013
@@ -0,0 +1,62 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <artifactId>hbase</artifactId>
+    <groupId>org.apache.hbase</groupId>
+    <version>0.95-SNAPSHOT</version>
+    <relativePath>..</relativePath>
+  </parent>
+
+  <artifactId>hbase-prefix-tree</artifactId>
+  <name>HBase - Prefix Tree</name>
+  <description>Prefix Tree Data Block Encoder</description>
+
+  <build>
+    <plugins>
+      <plugin>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <!-- Always skip the second part executions, since we only run
+        simple unit tests in this module. -->
+        <executions>
+          <execution>
+            <id>secondPartTestsExecution</id>
+            <phase>test</phase>
+            <goals>
+              <goal>test</goal>
+            </goals>
+            <configuration>
+              <skip>true</skip>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+    </dependency>
+  </dependencies>
+
+</project>

Added: hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeBlockMeta.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeBlockMeta.java?rev=1443289&view=auto
==============================================================================
--- hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeBlockMeta.java (added)
+++ hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeBlockMeta.java Thu Feb  7 00:36:24 2013
@@ -0,0 +1,841 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hbase.codec.prefixtree;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hbase.codec.prefixtree.encode.other.LongEncoder;
+import org.apache.hbase.util.vint.UVIntTool;
+import org.apache.hbase.util.vint.UVLongTool;
+
+/**
+ * Information about the block.  Stored at the beginning of the byte[].  Contains things
+ * like minimum timestamp and width of FInts in the row tree.
+ *
+ * Most fields stored in VInts that get decoded on the first access of each new block.
+ */
+@InterfaceAudience.Private
+public class PrefixTreeBlockMeta {
+
+  /******************* static fields ********************/
+
+  public static final int VERSION = 0;
+
+  public static final int MAX_FAMILY_LENGTH = Byte.MAX_VALUE;// hard-coded in KeyValue
+
+  public static final int
+	  NUM_LONGS = 2,
+    NUM_INTS = 22,
+    NUM_SHORTS = 0,//keyValueTypeWidth not persisted
+    NUM_SINGLE_BYTES = 2,
+    MAX_BYTES = Bytes.SIZEOF_LONG * NUM_LONGS
+        + Bytes.SIZEOF_SHORT * NUM_SHORTS
+        + Bytes.SIZEOF_INT * NUM_INTS
+        + NUM_SINGLE_BYTES;
+
+
+  /**************** transient fields *********************/
+
+  protected int arrayOffset;
+  protected int bufferOffset;
+
+
+  /**************** persisted fields **********************/
+
+  // PrefixTree version to allow future format modifications
+  protected int version;
+  protected int numMetaBytes;
+  protected int numKeyValueBytes;
+  protected boolean includesMvccVersion;//probably don't need this explicitly, but only 1 byte
+
+  // split the byte[] into 6 sections for the different data types
+  protected int numRowBytes;
+  protected int numFamilyBytes;
+  protected int numQualifierBytes;
+  protected int numTimestampBytes;
+  protected int numMvccVersionBytes;
+  protected int numValueBytes;
+
+  // number of bytes in each section of fixed width FInts
+  protected int nextNodeOffsetWidth;
+  protected int familyOffsetWidth;
+  protected int qualifierOffsetWidth;
+  protected int timestampIndexWidth;
+  protected int mvccVersionIndexWidth;
+  protected int valueOffsetWidth;
+  protected int valueLengthWidth;
+
+  // used to pre-allocate structures for reading
+  protected int rowTreeDepth;
+  protected int maxRowLength;
+  protected int maxQualifierLength;
+
+  // the timestamp from which the deltas are calculated
+  protected long minTimestamp;
+  protected int timestampDeltaWidth;
+  protected long minMvccVersion;
+  protected int mvccVersionDeltaWidth;
+
+  protected boolean allSameType;
+  protected byte allTypes;
+
+  protected int numUniqueRows;
+  protected int numUniqueFamilies;
+  protected int numUniqueQualifiers;
+
+
+  /***************** constructors ********************/
+
+  public PrefixTreeBlockMeta() {
+  }
+
+  public PrefixTreeBlockMeta(InputStream is) throws IOException{
+    this.version = VERSION;
+    this.arrayOffset = 0;
+    this.bufferOffset = 0;
+    readVariableBytesFromInputStream(is);
+  }
+
+  /**
+   * @param buffer positioned at start of PtBlockMeta
+   */
+  public PrefixTreeBlockMeta(ByteBuffer buffer) {
+    initOnBlock(buffer);
+  }
+
+  public void initOnBlock(ByteBuffer buffer) {
+    arrayOffset = buffer.arrayOffset();
+    bufferOffset = buffer.position();
+    readVariableBytesFromArray(buffer.array(), arrayOffset + bufferOffset);
+  }
+
+
+	/**************** operate on each field **********************/
+
+  public int calculateNumMetaBytes(){
+    int numBytes = 0;
+    numBytes += UVIntTool.numBytes(version);
+    numBytes += UVLongTool.numBytes(numMetaBytes);
+    numBytes += UVIntTool.numBytes(numKeyValueBytes);
+    ++numBytes;//os.write(getIncludesMvccVersion());
+
+    numBytes += UVIntTool.numBytes(numRowBytes);
+    numBytes += UVIntTool.numBytes(numFamilyBytes);
+    numBytes += UVIntTool.numBytes(numQualifierBytes);
+    numBytes += UVIntTool.numBytes(numTimestampBytes);
+    numBytes += UVIntTool.numBytes(numMvccVersionBytes);
+    numBytes += UVIntTool.numBytes(numValueBytes);
+
+    numBytes += UVIntTool.numBytes(nextNodeOffsetWidth);
+    numBytes += UVIntTool.numBytes(familyOffsetWidth);
+    numBytes += UVIntTool.numBytes(qualifierOffsetWidth);
+    numBytes += UVIntTool.numBytes(timestampIndexWidth);
+    numBytes += UVIntTool.numBytes(mvccVersionIndexWidth);
+    numBytes += UVIntTool.numBytes(valueOffsetWidth);
+    numBytes += UVIntTool.numBytes(valueLengthWidth);
+
+    numBytes += UVIntTool.numBytes(rowTreeDepth);
+    numBytes += UVIntTool.numBytes(maxRowLength);
+    numBytes += UVIntTool.numBytes(maxQualifierLength);
+
+    numBytes += UVLongTool.numBytes(minTimestamp);
+    numBytes += UVIntTool.numBytes(timestampDeltaWidth);
+    numBytes += UVLongTool.numBytes(minMvccVersion);
+    numBytes += UVIntTool.numBytes(mvccVersionDeltaWidth);
+    ++numBytes;//os.write(getAllSameTypeByte());
+    ++numBytes;//os.write(allTypes);
+
+    numBytes += UVIntTool.numBytes(numUniqueRows);
+    numBytes += UVIntTool.numBytes(numUniqueFamilies);
+    numBytes += UVIntTool.numBytes(numUniqueQualifiers);
+    return numBytes;
+  }
+
+  public void writeVariableBytesToOutputStream(OutputStream os) throws IOException{
+      UVIntTool.writeBytes(version, os);
+      UVIntTool.writeBytes(numMetaBytes, os);
+      UVIntTool.writeBytes(numKeyValueBytes, os);
+      os.write(getIncludesMvccVersionByte());
+
+      UVIntTool.writeBytes(numRowBytes, os);
+      UVIntTool.writeBytes(numFamilyBytes, os);
+      UVIntTool.writeBytes(numQualifierBytes, os);
+      UVIntTool.writeBytes(numTimestampBytes, os);
+      UVIntTool.writeBytes(numMvccVersionBytes, os);
+      UVIntTool.writeBytes(numValueBytes, os);
+
+      UVIntTool.writeBytes(nextNodeOffsetWidth, os);
+      UVIntTool.writeBytes(familyOffsetWidth, os);
+      UVIntTool.writeBytes(qualifierOffsetWidth, os);
+      UVIntTool.writeBytes(timestampIndexWidth, os);
+      UVIntTool.writeBytes(mvccVersionIndexWidth, os);
+      UVIntTool.writeBytes(valueOffsetWidth, os);
+      UVIntTool.writeBytes(valueLengthWidth, os);
+
+      UVIntTool.writeBytes(rowTreeDepth, os);
+      UVIntTool.writeBytes(maxRowLength, os);
+      UVIntTool.writeBytes(maxQualifierLength, os);
+
+      UVLongTool.writeBytes(minTimestamp, os);
+      UVIntTool.writeBytes(timestampDeltaWidth, os);
+      UVLongTool.writeBytes(minMvccVersion, os);
+      UVIntTool.writeBytes(mvccVersionDeltaWidth, os);
+      os.write(getAllSameTypeByte());
+      os.write(allTypes);
+
+      UVIntTool.writeBytes(numUniqueRows, os);
+      UVIntTool.writeBytes(numUniqueFamilies, os);
+      UVIntTool.writeBytes(numUniqueQualifiers, os);
+  }
+
+  public void readVariableBytesFromInputStream(InputStream is) throws IOException{
+      version = UVIntTool.getInt(is);
+      numMetaBytes = UVIntTool.getInt(is);
+      numKeyValueBytes = UVIntTool.getInt(is);
+      setIncludesMvccVersion((byte) is.read());
+
+      numRowBytes = UVIntTool.getInt(is);
+      numFamilyBytes = UVIntTool.getInt(is);
+      numQualifierBytes = UVIntTool.getInt(is);
+      numTimestampBytes = UVIntTool.getInt(is);
+      numMvccVersionBytes = UVIntTool.getInt(is);
+      numValueBytes = UVIntTool.getInt(is);
+
+      nextNodeOffsetWidth = UVIntTool.getInt(is);
+      familyOffsetWidth = UVIntTool.getInt(is);
+      qualifierOffsetWidth = UVIntTool.getInt(is);
+      timestampIndexWidth = UVIntTool.getInt(is);
+      mvccVersionIndexWidth = UVIntTool.getInt(is);
+      valueOffsetWidth = UVIntTool.getInt(is);
+      valueLengthWidth = UVIntTool.getInt(is);
+
+      rowTreeDepth = UVIntTool.getInt(is);
+      maxRowLength = UVIntTool.getInt(is);
+      maxQualifierLength = UVIntTool.getInt(is);
+
+      minTimestamp = UVLongTool.getLong(is);
+      timestampDeltaWidth = UVIntTool.getInt(is);
+      minMvccVersion = UVLongTool.getLong(is);
+      mvccVersionDeltaWidth = UVIntTool.getInt(is);
+
+      setAllSameType((byte) is.read());
+      allTypes = (byte) is.read();
+
+      numUniqueRows = UVIntTool.getInt(is);
+      numUniqueFamilies = UVIntTool.getInt(is);
+      numUniqueQualifiers = UVIntTool.getInt(is);
+  }
+
+  public void readVariableBytesFromArray(byte[] bytes, int offset) {
+    int position = offset;
+
+    version = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(version);
+    numMetaBytes = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(numMetaBytes);
+    numKeyValueBytes = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(numKeyValueBytes);
+    setIncludesMvccVersion(bytes[position]);
+    ++position;
+
+    numRowBytes = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(numRowBytes);
+    numFamilyBytes = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(numFamilyBytes);
+    numQualifierBytes = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(numQualifierBytes);
+    numTimestampBytes = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(numTimestampBytes);
+    numMvccVersionBytes = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(numMvccVersionBytes);
+    numValueBytes = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(numValueBytes);
+
+    nextNodeOffsetWidth = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(nextNodeOffsetWidth);
+    familyOffsetWidth = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(familyOffsetWidth);
+    qualifierOffsetWidth = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(qualifierOffsetWidth);
+    timestampIndexWidth = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(timestampIndexWidth);
+    mvccVersionIndexWidth = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(mvccVersionIndexWidth);
+    valueOffsetWidth = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(valueOffsetWidth);
+    valueLengthWidth = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(valueLengthWidth);
+
+    rowTreeDepth = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(rowTreeDepth);
+    maxRowLength = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(maxRowLength);
+    maxQualifierLength = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(maxQualifierLength);
+
+    minTimestamp = UVLongTool.getLong(bytes, position);
+    position += UVLongTool.numBytes(minTimestamp);
+    timestampDeltaWidth = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(timestampDeltaWidth);
+    minMvccVersion = UVLongTool.getLong(bytes, position);
+    position += UVLongTool.numBytes(minMvccVersion);
+    mvccVersionDeltaWidth = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(mvccVersionDeltaWidth);
+
+    setAllSameType(bytes[position]);
+    ++position;
+    allTypes = bytes[position];
+    ++position;
+
+    numUniqueRows = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(numUniqueRows);
+    numUniqueFamilies = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(numUniqueFamilies);
+    numUniqueQualifiers = UVIntTool.getInt(bytes, position);
+    position += UVIntTool.numBytes(numUniqueQualifiers);
+  }
+
+	//TODO method that can read directly from ByteBuffer instead of InputStream
+
+
+  /*************** methods *************************/
+
+  public int getKeyValueTypeWidth() {
+    return allSameType ? 0 : 1;
+  }
+
+  public byte getIncludesMvccVersionByte() {
+    return includesMvccVersion ? (byte) 1 : (byte) 0;
+  }
+
+  public void setIncludesMvccVersion(byte includesMvccVersionByte) {
+    includesMvccVersion = includesMvccVersionByte != 0;
+  }
+
+  public byte getAllSameTypeByte() {
+    return allSameType ? (byte) 1 : (byte) 0;
+  }
+
+  public void setAllSameType(byte allSameTypeByte) {
+    allSameType = allSameTypeByte != 0;
+  }
+
+  public boolean isAllSameTimestamp() {
+    return timestampIndexWidth == 0;
+  }
+
+  public boolean isAllSameMvccVersion() {
+    return mvccVersionIndexWidth == 0;
+  }
+
+  public void setTimestampFields(LongEncoder encoder){
+    this.minTimestamp = encoder.getMin();
+    this.timestampIndexWidth = encoder.getBytesPerIndex();
+    this.timestampDeltaWidth = encoder.getBytesPerDelta();
+    this.numTimestampBytes = encoder.getTotalCompressedBytes();
+  }
+
+  public void setMvccVersionFields(LongEncoder encoder){
+    this.minMvccVersion = encoder.getMin();
+    this.mvccVersionIndexWidth = encoder.getBytesPerIndex();
+    this.mvccVersionDeltaWidth = encoder.getBytesPerDelta();
+    this.numMvccVersionBytes = encoder.getTotalCompressedBytes();
+  }
+
+
+  /*************** Object methods *************************/
+
+  /**
+   * Generated by Eclipse
+   */
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (obj == null)
+      return false;
+    if (getClass() != obj.getClass())
+      return false;
+    PrefixTreeBlockMeta other = (PrefixTreeBlockMeta) obj;
+    if (allSameType != other.allSameType)
+      return false;
+    if (allTypes != other.allTypes)
+      return false;
+    if (arrayOffset != other.arrayOffset)
+      return false;
+    if (bufferOffset != other.bufferOffset)
+      return false;
+    if (valueLengthWidth != other.valueLengthWidth)
+      return false;
+    if (valueOffsetWidth != other.valueOffsetWidth)
+      return false;
+    if (familyOffsetWidth != other.familyOffsetWidth)
+      return false;
+    if (includesMvccVersion != other.includesMvccVersion)
+      return false;
+    if (maxQualifierLength != other.maxQualifierLength)
+      return false;
+    if (maxRowLength != other.maxRowLength)
+      return false;
+    if (mvccVersionDeltaWidth != other.mvccVersionDeltaWidth)
+      return false;
+    if (mvccVersionIndexWidth != other.mvccVersionIndexWidth)
+      return false;
+    if (minMvccVersion != other.minMvccVersion)
+      return false;
+    if (minTimestamp != other.minTimestamp)
+      return false;
+    if (nextNodeOffsetWidth != other.nextNodeOffsetWidth)
+      return false;
+    if (numValueBytes != other.numValueBytes)
+      return false;
+    if (numFamilyBytes != other.numFamilyBytes)
+      return false;
+    if (numMvccVersionBytes != other.numMvccVersionBytes)
+      return false;
+    if (numMetaBytes != other.numMetaBytes)
+      return false;
+    if (numQualifierBytes != other.numQualifierBytes)
+      return false;
+    if (numRowBytes != other.numRowBytes)
+      return false;
+    if (numTimestampBytes != other.numTimestampBytes)
+      return false;
+    if (numUniqueFamilies != other.numUniqueFamilies)
+      return false;
+    if (numUniqueQualifiers != other.numUniqueQualifiers)
+      return false;
+    if (numUniqueRows != other.numUniqueRows)
+      return false;
+    if (numKeyValueBytes != other.numKeyValueBytes)
+      return false;
+    if (qualifierOffsetWidth != other.qualifierOffsetWidth)
+      return false;
+    if (rowTreeDepth != other.rowTreeDepth)
+      return false;
+    if (timestampDeltaWidth != other.timestampDeltaWidth)
+      return false;
+    if (timestampIndexWidth != other.timestampIndexWidth)
+      return false;
+    if (version != other.version)
+      return false;
+    return true;
+  }
+
+  /**
+   * Generated by Eclipse
+   */
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + (allSameType ? 1231 : 1237);
+    result = prime * result + allTypes;
+    result = prime * result + arrayOffset;
+    result = prime * result + bufferOffset;
+    result = prime * result + valueLengthWidth;
+    result = prime * result + valueOffsetWidth;
+    result = prime * result + familyOffsetWidth;
+    result = prime * result + (includesMvccVersion ? 1231 : 1237);
+    result = prime * result + maxQualifierLength;
+    result = prime * result + maxRowLength;
+    result = prime * result + mvccVersionDeltaWidth;
+    result = prime * result + mvccVersionIndexWidth;
+    result = prime * result + (int) (minMvccVersion ^ (minMvccVersion >>> 32));
+    result = prime * result + (int) (minTimestamp ^ (minTimestamp >>> 32));
+    result = prime * result + nextNodeOffsetWidth;
+    result = prime * result + numValueBytes;
+    result = prime * result + numFamilyBytes;
+    result = prime * result + numMvccVersionBytes;
+    result = prime * result + numMetaBytes;
+    result = prime * result + numQualifierBytes;
+    result = prime * result + numRowBytes;
+    result = prime * result + numTimestampBytes;
+    result = prime * result + numUniqueFamilies;
+    result = prime * result + numUniqueQualifiers;
+    result = prime * result + numUniqueRows;
+    result = prime * result + numKeyValueBytes;
+    result = prime * result + qualifierOffsetWidth;
+    result = prime * result + rowTreeDepth;
+    result = prime * result + timestampDeltaWidth;
+    result = prime * result + timestampIndexWidth;
+    result = prime * result + version;
+    return result;
+  }
+
+  /**
+   * Generated by Eclipse
+   */
+  @Override
+  public String toString() {
+    StringBuilder builder = new StringBuilder();
+    builder.append("PtBlockMeta [arrayOffset=");
+    builder.append(arrayOffset);
+    builder.append(", bufferOffset=");
+    builder.append(bufferOffset);
+    builder.append(", version=");
+    builder.append(version);
+    builder.append(", numMetaBytes=");
+    builder.append(numMetaBytes);
+    builder.append(", numKeyValueBytes=");
+    builder.append(numKeyValueBytes);
+    builder.append(", includesMvccVersion=");
+    builder.append(includesMvccVersion);
+    builder.append(", numRowBytes=");
+    builder.append(numRowBytes);
+    builder.append(", numFamilyBytes=");
+    builder.append(numFamilyBytes);
+    builder.append(", numQualifierBytes=");
+    builder.append(numQualifierBytes);
+    builder.append(", numTimestampBytes=");
+    builder.append(numTimestampBytes);
+    builder.append(", numMvccVersionBytes=");
+    builder.append(numMvccVersionBytes);
+    builder.append(", numValueBytes=");
+    builder.append(numValueBytes);
+    builder.append(", nextNodeOffsetWidth=");
+    builder.append(nextNodeOffsetWidth);
+    builder.append(", familyOffsetWidth=");
+    builder.append(familyOffsetWidth);
+    builder.append(", qualifierOffsetWidth=");
+    builder.append(qualifierOffsetWidth);
+    builder.append(", timestampIndexWidth=");
+    builder.append(timestampIndexWidth);
+    builder.append(", mvccVersionIndexWidth=");
+    builder.append(mvccVersionIndexWidth);
+    builder.append(", valueOffsetWidth=");
+    builder.append(valueOffsetWidth);
+    builder.append(", valueLengthWidth=");
+    builder.append(valueLengthWidth);
+    builder.append(", rowTreeDepth=");
+    builder.append(rowTreeDepth);
+    builder.append(", maxRowLength=");
+    builder.append(maxRowLength);
+    builder.append(", maxQualifierLength=");
+    builder.append(maxQualifierLength);
+    builder.append(", minTimestamp=");
+    builder.append(minTimestamp);
+    builder.append(", timestampDeltaWidth=");
+    builder.append(timestampDeltaWidth);
+    builder.append(", minMvccVersion=");
+    builder.append(minMvccVersion);
+    builder.append(", mvccVersionDeltaWidth=");
+    builder.append(mvccVersionDeltaWidth);
+    builder.append(", allSameType=");
+    builder.append(allSameType);
+    builder.append(", allTypes=");
+    builder.append(allTypes);
+    builder.append(", numUniqueRows=");
+    builder.append(numUniqueRows);
+    builder.append(", numUniqueFamilies=");
+    builder.append(numUniqueFamilies);
+    builder.append(", numUniqueQualifiers=");
+    builder.append(numUniqueQualifiers);
+    builder.append("]");
+    return builder.toString();
+  }
+
+
+  /************** absolute getters *******************/
+
+  public int getAbsoluteMetaOffset() {
+    return arrayOffset + bufferOffset;
+  }
+
+  public int getAbsoluteRowOffset() {
+    return getAbsoluteMetaOffset() + numMetaBytes;
+  }
+
+  public int getAbsoluteFamilyOffset() {
+    return getAbsoluteRowOffset() + numRowBytes;
+  }
+
+  public int getAbsoluteQualifierOffset() {
+    return getAbsoluteFamilyOffset() + numFamilyBytes;
+  }
+
+  public int getAbsoluteTimestampOffset() {
+    return getAbsoluteQualifierOffset() + numQualifierBytes;
+  }
+
+  public int getAbsoluteMvccVersionOffset() {
+    return getAbsoluteTimestampOffset() + numTimestampBytes;
+  }
+
+  public int getAbsoluteValueOffset() {
+    return getAbsoluteMvccVersionOffset() + numMvccVersionBytes;
+  }
+
+
+  /*************** get/set ***************************/
+
+  public int getTimestampDeltaWidth() {
+    return timestampDeltaWidth;
+  }
+
+  public void setTimestampDeltaWidth(int timestampDeltaWidth) {
+    this.timestampDeltaWidth = timestampDeltaWidth;
+  }
+
+  public int getValueOffsetWidth() {
+    return valueOffsetWidth;
+  }
+
+  public void setValueOffsetWidth(int dataOffsetWidth) {
+    this.valueOffsetWidth = dataOffsetWidth;
+  }
+
+  public int getValueLengthWidth() {
+    return valueLengthWidth;
+  }
+
+  public void setValueLengthWidth(int dataLengthWidth) {
+    this.valueLengthWidth = dataLengthWidth;
+  }
+
+  public int getMaxRowLength() {
+    return maxRowLength;
+  }
+
+  public void setMaxRowLength(int maxRowLength) {
+    this.maxRowLength = maxRowLength;
+  }
+
+  public long getMinTimestamp() {
+    return minTimestamp;
+  }
+
+  public void setMinTimestamp(long minTimestamp) {
+    this.minTimestamp = minTimestamp;
+  }
+
+  public byte getAllTypes() {
+    return allTypes;
+  }
+
+  public void setAllTypes(byte allTypes) {
+    this.allTypes = allTypes;
+  }
+
+  public boolean isAllSameType() {
+    return allSameType;
+  }
+
+  public void setAllSameType(boolean allSameType) {
+    this.allSameType = allSameType;
+  }
+
+  public int getNextNodeOffsetWidth() {
+    return nextNodeOffsetWidth;
+  }
+
+  public void setNextNodeOffsetWidth(int nextNodeOffsetWidth) {
+    this.nextNodeOffsetWidth = nextNodeOffsetWidth;
+  }
+
+  public int getNumRowBytes() {
+    return numRowBytes;
+  }
+
+  public void setNumRowBytes(int numRowBytes) {
+    this.numRowBytes = numRowBytes;
+  }
+
+  public int getNumTimestampBytes() {
+    return numTimestampBytes;
+  }
+
+  public void setNumTimestampBytes(int numTimestampBytes) {
+    this.numTimestampBytes = numTimestampBytes;
+  }
+
+  public int getNumValueBytes() {
+    return numValueBytes;
+  }
+
+  public void setNumValueBytes(int numValueBytes) {
+    this.numValueBytes = numValueBytes;
+  }
+
+  public int getNumMetaBytes() {
+    return numMetaBytes;
+  }
+
+  public void setNumMetaBytes(int numMetaBytes) {
+    this.numMetaBytes = numMetaBytes;
+  }
+
+  public int getArrayOffset() {
+    return arrayOffset;
+  }
+
+  public void setArrayOffset(int arrayOffset) {
+    this.arrayOffset = arrayOffset;
+  }
+
+  public int getBufferOffset() {
+    return bufferOffset;
+  }
+
+  public void setBufferOffset(int bufferOffset) {
+    this.bufferOffset = bufferOffset;
+  }
+
+  public int getNumKeyValueBytes() {
+    return numKeyValueBytes;
+  }
+
+  public void setNumKeyValueBytes(int numKeyValueBytes) {
+    this.numKeyValueBytes = numKeyValueBytes;
+  }
+
+  public int getRowTreeDepth() {
+    return rowTreeDepth;
+  }
+
+  public void setRowTreeDepth(int rowTreeDepth) {
+    this.rowTreeDepth = rowTreeDepth;
+  }
+
+  public int getNumMvccVersionBytes() {
+    return numMvccVersionBytes;
+  }
+
+  public void setNumMvccVersionBytes(int numMvccVersionBytes) {
+    this.numMvccVersionBytes = numMvccVersionBytes;
+  }
+
+  public int getMvccVersionDeltaWidth() {
+    return mvccVersionDeltaWidth;
+  }
+
+  public void setMvccVersionDeltaWidth(int mvccVersionDeltaWidth) {
+    this.mvccVersionDeltaWidth = mvccVersionDeltaWidth;
+  }
+
+  public long getMinMvccVersion() {
+    return minMvccVersion;
+  }
+
+  public void setMinMvccVersion(long minMvccVersion) {
+    this.minMvccVersion = minMvccVersion;
+  }
+
+  public int getNumFamilyBytes() {
+    return numFamilyBytes;
+  }
+
+  public void setNumFamilyBytes(int numFamilyBytes) {
+    this.numFamilyBytes = numFamilyBytes;
+  }
+
+  public int getFamilyOffsetWidth() {
+    return familyOffsetWidth;
+  }
+
+  public void setFamilyOffsetWidth(int familyOffsetWidth) {
+    this.familyOffsetWidth = familyOffsetWidth;
+  }
+
+  public int getNumUniqueRows() {
+    return numUniqueRows;
+  }
+
+  public void setNumUniqueRows(int numUniqueRows) {
+    this.numUniqueRows = numUniqueRows;
+  }
+
+  public int getNumUniqueFamilies() {
+    return numUniqueFamilies;
+  }
+
+  public void setNumUniqueFamilies(int numUniqueFamilies) {
+    this.numUniqueFamilies = numUniqueFamilies;
+  }
+
+  public int getNumUniqueQualifiers() {
+    return numUniqueQualifiers;
+  }
+
+  public void setNumUniqueQualifiers(int numUniqueQualifiers) {
+    this.numUniqueQualifiers = numUniqueQualifiers;
+  }
+
+  public int getNumQualifierBytes() {
+    return numQualifierBytes;
+  }
+
+  public void setNumQualifierBytes(int numQualifierBytes) {
+    this.numQualifierBytes = numQualifierBytes;
+  }
+
+  public int getQualifierOffsetWidth() {
+    return qualifierOffsetWidth;
+  }
+
+  public void setQualifierOffsetWidth(int qualifierOffsetWidth) {
+    this.qualifierOffsetWidth = qualifierOffsetWidth;
+  }
+
+  public int getMaxQualifierLength() {
+    return maxQualifierLength;
+  }
+
+  public void setMaxQualifierLength(int maxQualifierLength) {
+    this.maxQualifierLength = maxQualifierLength;
+  }
+
+  public int getTimestampIndexWidth() {
+    return timestampIndexWidth;
+  }
+
+  public void setTimestampIndexWidth(int timestampIndexWidth) {
+    this.timestampIndexWidth = timestampIndexWidth;
+  }
+
+  public int getMvccVersionIndexWidth() {
+    return mvccVersionIndexWidth;
+  }
+
+  public void setMvccVersionIndexWidth(int mvccVersionIndexWidth) {
+    this.mvccVersionIndexWidth = mvccVersionIndexWidth;
+  }
+
+  public int getVersion() {
+    return version;
+  }
+
+  public void setVersion(int version) {
+    this.version = version;
+  }
+
+  public boolean isIncludesMvccVersion() {
+    return includesMvccVersion;
+  }
+
+  public void setIncludesMvccVersion(boolean includesMvccVersion) {
+    this.includesMvccVersion = includesMvccVersion;
+  }
+
+}

Added: hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeCodec.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeCodec.java?rev=1443289&view=auto
==============================================================================
--- hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeCodec.java (added)
+++ hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeCodec.java Thu Feb  7 00:36:24 2013
@@ -0,0 +1,209 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hbase.codec.prefixtree;
+
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValue.KeyComparator;
+import org.apache.hadoop.hbase.KeyValue.MetaKeyComparator;
+import org.apache.hadoop.hbase.KeyValue.RootKeyComparator;
+import org.apache.hadoop.hbase.KeyValueTool;
+import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
+import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;
+import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;
+import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
+import org.apache.hadoop.hbase.io.hfile.BlockType;
+import org.apache.hadoop.hbase.util.ByteBufferUtils;
+import org.apache.hadoop.io.RawComparator;
+import org.apache.hbase.codec.prefixtree.decode.DecoderFactory;
+import org.apache.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
+import org.apache.hbase.codec.prefixtree.encode.EncoderFactory;
+import org.apache.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
+import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
+
+/**
+ * This class is created via reflection in DataBlockEncoding enum. Update the enum if class name or
+ * package changes.
+ * <p/>
+ * PrefixTreeDataBlockEncoder implementation of DataBlockEncoder. This is the primary entry point
+ * for PrefixTree encoding and decoding. Encoding is delegated to instances of
+ * {@link PrefixTreeEncoder}, and decoding is delegated to instances of
+ * {@link org.apache.hbase.codec.prefixtree.scanner.CellSearcher}. Encoder and decoder instances are
+ * created and recycled by static PtEncoderFactory and PtDecoderFactory.
+ */
+@InterfaceAudience.Private
+public class PrefixTreeCodec implements DataBlockEncoder{
+
+  /**
+   * no-arg constructor for reflection
+   */
+  public PrefixTreeCodec() {
+  }
+
+  /**
+   * Copied from BufferedDataBlockEncoder. Almost definitely can be improved, but i'm not familiar
+   * enough with the concept of the HFileBlockEncodingContext.
+   */
+  @Override
+  public void encodeKeyValues(ByteBuffer in, boolean includesMvccVersion,
+      HFileBlockEncodingContext blkEncodingCtx) throws IOException {
+    if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
+      throw new IOException(this.getClass().getName() + " only accepts "
+          + HFileBlockDefaultEncodingContext.class.getName() + " as the " + "encoding context.");
+    }
+
+    HFileBlockDefaultEncodingContext encodingCtx
+        = (HFileBlockDefaultEncodingContext) blkEncodingCtx;
+    encodingCtx.prepareEncoding();
+    DataOutputStream dataOut = encodingCtx.getOutputStreamForEncoder();
+    internalEncodeKeyValues(dataOut, in, includesMvccVersion);
+
+    //do i need to check this, or will it always be DataBlockEncoding.PREFIX_TREE?
+    if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
+      encodingCtx.postEncoding(BlockType.ENCODED_DATA);
+    } else {
+      encodingCtx.postEncoding(BlockType.DATA);
+    }
+  }
+
+  private void internalEncodeKeyValues(DataOutputStream encodedOutputStream,
+      ByteBuffer rawKeyValues, boolean includesMvccVersion) throws IOException {
+    rawKeyValues.rewind();
+    PrefixTreeEncoder builder = EncoderFactory.checkOut(encodedOutputStream, includesMvccVersion);
+
+    try{
+      KeyValue kv;
+      while ((kv = KeyValueTool.nextShallowCopy(rawKeyValues, includesMvccVersion)) != null) {
+        builder.write(kv);
+      }
+      builder.flush();
+    }finally{
+      EncoderFactory.checkIn(builder);
+    }
+  }
+
+
+  @Override
+  public ByteBuffer decodeKeyValues(DataInputStream source, boolean includesMvccVersion)
+      throws IOException {
+    return decodeKeyValues(source, 0, 0, includesMvccVersion);
+  }
+
+
+  /**
+   * I don't think this method is called during normal HBase operation, so efficiency is not
+   * important.
+   */
+  @Override
+  public ByteBuffer decodeKeyValues(DataInputStream source, int allocateHeaderLength,
+      int skipLastBytes, boolean includesMvccVersion) throws IOException {
+    ByteBuffer sourceAsBuffer = ByteBufferUtils.drainInputStreamToBuffer(source);// waste
+    sourceAsBuffer.mark();
+    PrefixTreeBlockMeta blockMeta = new PrefixTreeBlockMeta(sourceAsBuffer);
+    sourceAsBuffer.rewind();
+    int numV1BytesWithHeader = allocateHeaderLength + blockMeta.getNumKeyValueBytes();
+    byte[] keyValueBytesWithHeader = new byte[numV1BytesWithHeader];
+    ByteBuffer result = ByteBuffer.wrap(keyValueBytesWithHeader);
+    result.rewind();
+    CellSearcher searcher = null;
+    try {
+      searcher = DecoderFactory.checkOut(sourceAsBuffer, includesMvccVersion);
+      while (searcher.next()) {
+        KeyValue currentCell = KeyValueTool.copyToNewKeyValue(searcher.getCurrent());
+        // needs to be modified for DirectByteBuffers. no existing methods to
+        // write VLongs to byte[]
+        int offset = result.arrayOffset() + result.position();
+        KeyValueTool.appendToByteArray(currentCell, result.array(), offset);
+        int keyValueLength = KeyValueTool.length(currentCell);
+        ByteBufferUtils.skip(result, keyValueLength);
+        offset += keyValueLength;
+        if (includesMvccVersion) {
+          ByteBufferUtils.writeVLong(result, currentCell.getMvccVersion());
+        }
+      }
+      result.position(result.limit());//make it appear as if we were appending
+      return result;
+    } finally {
+      DecoderFactory.checkIn(searcher);
+    }
+  }
+
+
+  @Override
+  public ByteBuffer getFirstKeyInBlock(ByteBuffer block) {
+    block.rewind();
+    PrefixTreeArraySearcher searcher = null;
+    try {
+      //should i includeMemstoreTS (second argument)?  i think PrefixKeyDeltaEncoder is, so i will
+      searcher = DecoderFactory.checkOut(block, true);
+      if (!searcher.positionAtFirstCell()) {
+        return null;
+      }
+      return KeyValueTool.copyKeyToNewByteBuffer(searcher.getCurrent());
+    } finally {
+      DecoderFactory.checkIn(searcher);
+    }
+  }
+
+  @Override
+  public HFileBlockEncodingContext newDataBlockEncodingContext(Algorithm compressionAlgorithm,
+      DataBlockEncoding encoding, byte[] header) {
+    if(DataBlockEncoding.PREFIX_TREE != encoding){
+      //i'm not sure why encoding is in the interface.  Each encoder implementation should probably
+      //know it's encoding type
+      throw new IllegalArgumentException("only DataBlockEncoding.PREFIX_TREE supported");
+    }
+    return new HFileBlockDefaultEncodingContext(compressionAlgorithm, encoding, header);
+  }
+
+  @Override
+  public HFileBlockDecodingContext newDataBlockDecodingContext(Algorithm compressionAlgorithm) {
+    return new HFileBlockDefaultDecodingContext(compressionAlgorithm);
+  }
+
+  /**
+   * Is this the correct handling of an illegal comparator?  How to prevent that from getting all
+   * the way to this point.
+   */
+  @Override
+  public EncodedSeeker createSeeker(RawComparator<byte[]> comparator, boolean includesMvccVersion) {
+    if(! (comparator instanceof KeyComparator)){
+      throw new IllegalArgumentException("comparator must be KeyValue.KeyComparator");
+    }
+    if(comparator instanceof MetaKeyComparator){
+      throw new IllegalArgumentException("DataBlockEncoding.PREFIX_TREE not compatible with META "
+          +"table");
+    }
+    if(comparator instanceof RootKeyComparator){
+      throw new IllegalArgumentException("DataBlockEncoding.PREFIX_TREE not compatible with ROOT "
+          +"table");
+    }
+
+    return new PrefixTreeSeeker(includesMvccVersion);
+  }
+
+}

Added: hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeSeeker.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeSeeker.java?rev=1443289&view=auto
==============================================================================
--- hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeSeeker.java (added)
+++ hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/PrefixTreeSeeker.java Thu Feb  7 00:36:24 2013
@@ -0,0 +1,216 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hbase.codec.prefixtree;
+
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueTool;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker;
+import org.apache.hbase.Cell;
+import org.apache.hbase.cell.CellScannerPosition;
+import org.apache.hbase.cell.CellTool;
+import org.apache.hbase.codec.prefixtree.decode.DecoderFactory;
+import org.apache.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
+
+/**
+ * These methods have the same definition as any implementation of the EncodedSeeker.
+ *
+ * In the future, the EncodedSeeker could be modified to work with the Cell interface directly.  It
+ * currently returns a new KeyValue object each time getKeyValue is called.  This is not horrible,
+ * but in order to create a new KeyValue object, we must first allocate a new byte[] and copy in
+ * the data from the PrefixTreeCell.  It is somewhat heavyweight right now.
+ */
+@InterfaceAudience.Private
+public class PrefixTreeSeeker implements EncodedSeeker {
+
+  protected ByteBuffer block;
+  protected boolean includeMvccVersion;
+  protected PrefixTreeArraySearcher ptSearcher;
+
+  public PrefixTreeSeeker(boolean includeMvccVersion) {
+    this.includeMvccVersion = includeMvccVersion;
+  }
+
+  @Override
+  public void setCurrentBuffer(ByteBuffer fullBlockBuffer) {
+    block = fullBlockBuffer;
+    ptSearcher = DecoderFactory.checkOut(block, includeMvccVersion);
+    rewind();
+  }
+
+  /**
+   * Currently unused.
+   * <p/>
+   * TODO performance leak. should reuse the searchers. hbase does not currently have a hook where
+   * this can be called
+   */
+  public void releaseCurrentSearcher(){
+    DecoderFactory.checkIn(ptSearcher);
+  }
+
+
+  @Override
+  public ByteBuffer getKeyDeepCopy() {
+    return KeyValueTool.copyKeyToNewByteBuffer(ptSearcher.getCurrent());
+  }
+
+
+  @Override
+  public ByteBuffer getValueShallowCopy() {
+    return CellTool.getValueBufferShallowCopy(ptSearcher.getCurrent());
+  }
+
+  /**
+   * currently must do deep copy into new array
+   */
+  @Override
+  public ByteBuffer getKeyValueBuffer() {
+    return KeyValueTool.copyToNewByteBuffer(ptSearcher.getCurrent());
+  }
+
+  /**
+   * currently must do deep copy into new array
+   */
+  @Override
+  public KeyValue getKeyValue() {
+    return KeyValueTool.copyToNewKeyValue(ptSearcher.getCurrent());
+  }
+
+  /**
+   * Currently unused.
+   * <p/>
+   * A nice, lightweight reference, though the underlying cell is transient.  This method may return
+   * the same reference to the backing PrefixTreeCell repeatedly, while other implementations may
+   * return a different reference for each Cell.
+   * <p/>
+   * The goal will be to transition the upper layers of HBase, like Filters and KeyValueHeap, to use
+   * this method instead of the getKeyValue() methods above.
+   */
+//  @Override
+  public Cell getCurrent() {
+    return ptSearcher.getCurrent();
+  }
+
+  @Override
+  public void rewind() {
+    ptSearcher.positionAtFirstCell();
+  }
+
+  @Override
+  public boolean next() {
+    return ptSearcher.next();
+  }
+
+//  @Override
+  public boolean advance() {
+    return ptSearcher.next();
+  }
+
+
+  private static final boolean USE_POSITION_BEFORE = false;
+
+  /**
+   * Seek forward only (should be called reseekToKeyInBlock?).
+   * <p/>
+   * If the exact key is found look at the seekBefore variable and:<br/>
+   * - if true: go to the previous key if it's true<br/>
+   * - if false: stay on the exact key
+   * <p/>
+   * If the exact key is not found, then go to the previous key *if possible*, but remember to leave
+   * the scanner in a valid state if possible.
+   * <p/>
+   * @param keyOnlyBytes KeyValue format of a Cell's key at which to position the seeker
+   * @param offset offset into the keyOnlyBytes array
+   * @param length number of bytes of the keyOnlyBytes array to use
+   * @param forceBeforeOnExactMatch if an exact match is found and seekBefore=true, back up one Cell
+   * @return 0 if the seeker is on the exact key<br/>
+   *         1 if the seeker is not on the key for any reason, including seekBefore being true
+   */
+  @Override
+  public int seekToKeyInBlock(byte[] keyOnlyBytes, int offset, int length,
+      boolean forceBeforeOnExactMatch) {
+    if (USE_POSITION_BEFORE) {
+      return seekToOrBeforeUsingPositionAtOrBefore(keyOnlyBytes, offset, length,
+        forceBeforeOnExactMatch);
+    }else{
+      return seekToOrBeforeUsingPositionAtOrAfter(keyOnlyBytes, offset, length,
+        forceBeforeOnExactMatch);
+    }
+  }
+
+
+
+  /*
+   * Support both of these options since the underlying PrefixTree supports both.  Possibly
+   * expand the EncodedSeeker to utilize them both.
+   */
+
+  protected int seekToOrBeforeUsingPositionAtOrBefore(byte[] keyOnlyBytes, int offset, int length,
+      boolean forceBeforeOnExactMatch){
+    // this does a deep copy of the key byte[] because the CellSearcher interface wants a Cell
+    KeyValue kv = KeyValue.createKeyValueFromKey(keyOnlyBytes, offset, length);
+
+    CellScannerPosition position = ptSearcher.seekForwardToOrBefore(kv);
+
+    if(CellScannerPosition.AT == position){
+      if (forceBeforeOnExactMatch) {
+        ptSearcher.previous();
+        return 1;
+      }
+      return 0;
+    }
+
+    return 1;
+  }
+
+
+  protected int seekToOrBeforeUsingPositionAtOrAfter(byte[] keyOnlyBytes, int offset, int length,
+      boolean forceBeforeOnExactMatch){
+    // this does a deep copy of the key byte[] because the CellSearcher interface wants a Cell
+    KeyValue kv = KeyValue.createKeyValueFromKey(keyOnlyBytes, offset, length);
+
+    //should probably switch this to use the seekForwardToOrBefore method
+    CellScannerPosition position = ptSearcher.seekForwardToOrAfter(kv);
+
+    if(CellScannerPosition.AT == position){
+      if (forceBeforeOnExactMatch) {
+        ptSearcher.previous();
+        return 1;
+      }
+      return 0;
+
+    }
+
+    if(CellScannerPosition.AFTER == position){
+      if(!ptSearcher.isBeforeFirst()){
+        ptSearcher.previous();
+      }
+      return 1;
+    }
+
+    if(position == CellScannerPosition.AFTER_LAST){
+      return 1;
+    }
+
+    throw new RuntimeException("unexpected CellScannerPosition:"+position);
+  }
+
+}

Added: hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/ArraySearcherPool.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/ArraySearcherPool.java?rev=1443289&view=auto
==============================================================================
--- hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/ArraySearcherPool.java (added)
+++ hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/ArraySearcherPool.java Thu Feb  7 00:36:24 2013
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hbase.codec.prefixtree.decode;
+
+import java.nio.ByteBuffer;
+import java.util.Queue;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+/**
+ * Pools PrefixTreeArraySearcher objects. Each Searcher can consist of hundreds or thousands of
+ * objects and 1 is needed for each HFile during a Get operation. With tens of thousands of
+ * Gets/second, reusing these searchers may save a lot of young gen collections.
+ * <p/>
+ * Alternative implementation would be a ByteBufferSearcherPool (not implemented yet).
+ */
+@InterfaceAudience.Private
+public class ArraySearcherPool {
+
+  /**
+   * One decoder is needed for each storefile for each Get operation so we may need hundreds at the
+   * same time, however, decoding is a CPU bound activity so should limit this to something in the
+   * realm of maximum reasonable active threads.
+   */
+  private static final Integer MAX_POOL_SIZE = 1000;
+
+  protected Queue<PrefixTreeArraySearcher> pool
+    = new LinkedBlockingQueue<PrefixTreeArraySearcher>(MAX_POOL_SIZE);
+
+  public PrefixTreeArraySearcher checkOut(ByteBuffer buffer, boolean includesMvccVersion) {
+    PrefixTreeArraySearcher searcher = pool.poll();//will return null if pool is empty
+    searcher = DecoderFactory.ensureArraySearcherValid(buffer, searcher, includesMvccVersion);
+    return searcher;
+  }
+
+  public void checkIn(PrefixTreeArraySearcher searcher) {
+    searcher.releaseBlockReference();
+    pool.offer(searcher);
+  }
+
+  @Override
+  public String toString() {
+    return ("poolSize:" + pool.size());
+  }
+
+}
\ No newline at end of file

Added: hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/DecoderFactory.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/DecoderFactory.java?rev=1443289&view=auto
==============================================================================
--- hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/DecoderFactory.java (added)
+++ hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hbase/codec/prefixtree/decode/DecoderFactory.java Thu Feb  7 00:36:24 2013
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hbase.codec.prefixtree.decode;
+
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hbase.codec.prefixtree.PrefixTreeBlockMeta;
+import org.apache.hbase.codec.prefixtree.scanner.CellSearcher;
+
+/**
+ * Static wrapper class for the ArraySearcherPool.
+ */
+@InterfaceAudience.Private
+public class DecoderFactory {
+
+  private static final ArraySearcherPool POOL = new ArraySearcherPool();
+
+  //TODO will need a PrefixTreeSearcher on top of CellSearcher
+  public static PrefixTreeArraySearcher checkOut(final ByteBuffer buffer, 
+      boolean includeMvccVersion) {
+    if (buffer.isDirect()) {
+      throw new IllegalArgumentException("DirectByteBuffers not supported yet");
+      // TODO implement PtByteBufferBlockScanner
+    }
+
+    PrefixTreeArraySearcher searcher = POOL.checkOut(buffer,
+      includeMvccVersion);
+    return searcher;
+  }
+
+  public static void checkIn(CellSearcher pSearcher) {
+    if (pSearcher == null) {
+      return;
+    }
+    if (! (pSearcher instanceof PrefixTreeArraySearcher)) {
+      throw new IllegalArgumentException("Cannot return "+pSearcher.getClass()+" to "
+          +DecoderFactory.class);
+    }
+    PrefixTreeArraySearcher searcher = (PrefixTreeArraySearcher) pSearcher;
+    POOL.checkIn(searcher);
+  }
+
+
+  /**************************** helper ******************************/
+
+  public static PrefixTreeArraySearcher ensureArraySearcherValid(ByteBuffer buffer,
+      PrefixTreeArraySearcher searcher, boolean includeMvccVersion) {
+    if (searcher == null) {
+      PrefixTreeBlockMeta blockMeta = new PrefixTreeBlockMeta(buffer);
+      searcher = new PrefixTreeArraySearcher(blockMeta, blockMeta.getRowTreeDepth(),
+          blockMeta.getMaxRowLength(), blockMeta.getMaxQualifierLength());
+      searcher.initOnBlock(blockMeta, buffer.array(), includeMvccVersion);
+      return searcher;
+    }
+
+    PrefixTreeBlockMeta blockMeta = searcher.getBlockMeta();
+    blockMeta.initOnBlock(buffer);
+    if (!searcher.areBuffersBigEnough()) {
+      int maxRowTreeStackNodes = Math.max(blockMeta.getRowTreeDepth(),
+        searcher.getMaxRowTreeStackNodes());
+      int rowBufferLength = Math.max(blockMeta.getMaxRowLength(), searcher.getRowBufferLength());
+      int qualifierBufferLength = Math.max(blockMeta.getMaxQualifierLength(),
+        searcher.getQualifierBufferLength());
+      searcher = new PrefixTreeArraySearcher(blockMeta, maxRowTreeStackNodes, rowBufferLength,
+          qualifierBufferLength);
+    }
+    //this is where we parse the BlockMeta
+    searcher.initOnBlock(blockMeta, buffer.array(), includeMvccVersion);
+    return searcher;
+  }
+
+}



Mime
View raw message