hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject [7/7] hbase git commit: HBASE-19179 Remove hbase-prefix-tree
Date Sat, 04 Nov 2017 17:14:59 GMT
HBASE-19179 Remove hbase-prefix-tree


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f812218f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f812218f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f812218f

Branch: refs/heads/master
Commit: f812218ffe58df045dfd619360efa84e802f25dd
Parents: 22b07e9
Author: Michael Stack <stack@apache.org>
Authored: Fri Nov 3 20:10:36 2017 -0700
Committer: Michael Stack <stack@apache.org>
Committed: Sat Nov 4 10:11:13 2017 -0700

----------------------------------------------------------------------
 .../hbase/io/encoding/DataBlockEncoding.java    |   2 +-
 hbase-mapreduce/pom.xml                         |   4 -
 .../hbase/mapreduce/TableMapReduceUtil.java     |  17 -
 hbase-prefix-tree/pom.xml                       | 189 ----
 .../codec/prefixtree/PrefixTreeBlockMeta.java   | 899 -------------------
 .../hbase/codec/prefixtree/PrefixTreeCodec.java | 216 -----
 .../codec/prefixtree/PrefixTreeSeeker.java      | 586 ------------
 .../prefixtree/decode/ArraySearcherPool.java    |  63 --
 .../codec/prefixtree/decode/DecoderFactory.java |  83 --
 .../PrefixTreeArrayReversibleScanner.java       | 145 ---
 .../decode/PrefixTreeArrayScanner.java          | 528 -----------
 .../decode/PrefixTreeArraySearcher.java         | 418 ---------
 .../codec/prefixtree/decode/PrefixTreeCell.java | 311 -------
 .../decode/column/ColumnNodeReader.java         | 109 ---
 .../prefixtree/decode/column/ColumnReader.java  | 108 ---
 .../prefixtree/decode/row/RowNodeReader.java    | 281 ------
 .../decode/timestamp/MvccVersionDecoder.java    |  58 --
 .../decode/timestamp/TimestampDecoder.java      |  58 --
 .../codec/prefixtree/encode/EncoderFactory.java |  56 --
 .../codec/prefixtree/encode/EncoderPool.java    |  32 -
 .../prefixtree/encode/EncoderPoolImpl.java      |  46 -
 .../prefixtree/encode/PrefixTreeEncoder.java    | 542 -----------
 .../encode/column/ColumnNodeWriter.java         | 136 ---
 .../encode/column/ColumnSectionWriter.java      | 209 -----
 .../encode/other/CellTypeEncoder.java           |  68 --
 .../prefixtree/encode/other/ColumnNodeType.java |  28 -
 .../prefixtree/encode/other/LongEncoder.java    | 183 ----
 .../prefixtree/encode/row/RowNodeWriter.java    | 300 -------
 .../prefixtree/encode/row/RowSectionWriter.java | 219 -----
 .../prefixtree/encode/tokenize/Tokenizer.java   | 241 -----
 .../encode/tokenize/TokenizerNode.java          | 639 -------------
 .../tokenize/TokenizerRowSearchPosition.java    |  38 -
 .../tokenize/TokenizerRowSearchResult.java      |  73 --
 .../prefixtree/scanner/CellScannerPosition.java |  67 --
 .../codec/prefixtree/scanner/CellSearcher.java  | 118 ---
 .../scanner/ReversibleCellScanner.java          |  55 --
 .../hbase/util/byterange/ByteRangeSet.java      | 181 ----
 .../util/byterange/impl/ByteRangeHashSet.java   |  57 --
 .../util/byterange/impl/ByteRangeTreeSet.java   |  54 --
 .../hadoop/hbase/util/vint/UFIntTool.java       | 117 ---
 .../hadoop/hbase/util/vint/UVIntTool.java       | 112 ---
 .../hadoop/hbase/util/vint/UVLongTool.java      | 116 ---
 .../hbase/codec/keyvalue/TestKeyValueTool.java  |  65 --
 .../prefixtree/PrefixTreeTestConstants.java     |  27 -
 .../prefixtree/blockmeta/TestBlockMeta.java     |  91 --
 .../codec/prefixtree/builder/TestTokenizer.java |  78 --
 .../prefixtree/builder/TestTokenizerData.java   |  42 -
 .../codec/prefixtree/builder/TestTreeDepth.java |  90 --
 .../builder/data/TestTokenizerDataBasic.java    |  51 --
 .../builder/data/TestTokenizerDataEdgeCase.java |  53 --
 .../prefixtree/column/TestColumnBuilder.java    | 127 ---
 .../codec/prefixtree/column/TestColumnData.java |  45 -
 .../column/data/TestColumnDataRandom.java       |  63 --
 .../column/data/TestColumnDataSimple.java       |  52 --
 .../codec/prefixtree/row/BaseTestRowData.java   |  54 --
 .../prefixtree/row/TestPrefixTreeSearcher.java  | 229 -----
 .../hbase/codec/prefixtree/row/TestRowData.java | 105 ---
 .../codec/prefixtree/row/TestRowEncoder.java    | 194 ----
 .../row/data/TestRowDataComplexQualifiers.java  |  67 --
 .../prefixtree/row/data/TestRowDataDeeper.java  |  85 --
 .../data/TestRowDataDifferentTimestamps.java    |  94 --
 .../prefixtree/row/data/TestRowDataEmpty.java   |  43 -
 .../row/data/TestRowDataExerciseFInts.java      | 115 ---
 .../row/data/TestRowDataMultiFamilies.java      |  60 --
 .../prefixtree/row/data/TestRowDataNub.java     |  59 --
 .../row/data/TestRowDataNumberStrings.java      |  61 --
 .../data/TestRowDataQualifierByteOrdering.java  |  58 --
 .../row/data/TestRowDataRandomKeyValues.java    |  42 -
 .../TestRowDataRandomKeyValuesWithTags.java     |  41 -
 .../row/data/TestRowDataSearchWithPrefix.java   |  74 --
 .../row/data/TestRowDataSearcherRowMiss.java    | 128 ---
 .../prefixtree/row/data/TestRowDataSimple.java  | 117 ---
 .../row/data/TestRowDataSingleQualifier.java    |  52 --
 .../prefixtree/row/data/TestRowDataTrivial.java |  74 --
 .../row/data/TestRowDataTrivialWithTags.java    |  81 --
 .../prefixtree/row/data/TestRowDataUrls.java    |  99 --
 .../row/data/TestRowDataUrlsExample.java        | 126 ---
 .../prefixtree/timestamp/TestTimestampData.java |  45 -
 .../timestamp/TestTimestampEncoder.java         |  98 --
 .../timestamp/data/TestTimestampDataBasic.java  |  54 --
 .../data/TestTimestampDataNumbers.java          |  56 --
 .../data/TestTimestampDataRepeats.java          |  52 --
 .../hadoop/hbase/util/bytes/TestByteRange.java  |  39 -
 .../util/comparator/ByteArrayComparator.java    |  32 -
 .../hbase/util/number/NumberFormatter.java      |  33 -
 .../hbase/util/number/RandomNumberUtils.java    |  34 -
 .../hadoop/hbase/util/vint/TestFIntTool.java    | 126 ---
 .../hadoop/hbase/util/vint/TestVIntTool.java    | 105 ---
 .../hadoop/hbase/util/vint/TestVLongTool.java   | 113 ---
 .../src/test/resources/log4j.properties         |  68 --
 hbase-server/pom.xml                            |   5 -
 .../hadoop/hbase/io/hfile/HFileWriterImpl.java  |   6 +-
 .../hadoop/hbase/client/TestFromClientSide.java |   2 -
 .../io/encoding/TestDataBlockEncoders.java      |  18 +-
 .../hbase/io/encoding/TestPrefixTree.java       | 192 ----
 .../io/encoding/TestPrefixTreeEncoding.java     | 338 -------
 .../encoding/TestSeekToBlockWithEncoders.java   |   3 +-
 .../io/hfile/TestHFileDataBlockEncoder.java     |   4 -
 .../hadoop/hbase/io/hfile/TestSeekTo.java       |   7 +-
 .../hadoop/hbase/regionserver/TestTags.java     |   5 +-
 hbase-shell/pom.xml                             |   5 -
 pom.xml                                         |   9 -
 src/main/asciidoc/_chapters/compression.adoc    |   7 +-
 103 files changed, 8 insertions(+), 12052 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/f812218f/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
index efc9f05..f3b5b25 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
@@ -39,7 +39,7 @@ public enum DataBlockEncoding {
   FAST_DIFF(4, "org.apache.hadoop.hbase.io.encoding.FastDiffDeltaEncoder"),
   // id 5 is reserved for the COPY_KEY algorithm for benchmarking
   // COPY_KEY(5, "org.apache.hadoop.hbase.io.encoding.CopyKeyDataBlockEncoder"),
-  PREFIX_TREE(6, "org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec"),
+  // PREFIX_TREE(6, "org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec"),
   ROW_INDEX_V1(7, "org.apache.hadoop.hbase.io.encoding.RowIndexCodecV1");
 
   private final short id;

http://git-wip-us.apache.org/repos/asf/hbase/blob/f812218f/hbase-mapreduce/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/pom.xml b/hbase-mapreduce/pom.xml
index 607b43b..42a50bc 100644
--- a/hbase-mapreduce/pom.xml
+++ b/hbase-mapreduce/pom.xml
@@ -180,10 +180,6 @@
       <version>${slf4j.version}</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-prefix-tree</artifactId>
-    </dependency>
-    <dependency>
       <groupId>org.apache.htrace</groupId>
       <artifactId>htrace-core</artifactId>
     </dependency>

http://git-wip-us.apache.org/repos/asf/hbase/blob/f812218f/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
index cdecf14..cf86184 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
@@ -801,21 +801,6 @@ public class TableMapReduceUtil {
    * @see <a href="https://issues.apache.org/jira/browse/PIG-3285">PIG-3285</a>
    */
   public static void addHBaseDependencyJars(Configuration conf) throws IOException {
-
-    // PrefixTreeCodec is part of the hbase-prefix-tree module. If not included in MR jobs jar
-    // dependencies, MR jobs that write encoded hfiles will fail.
-    // We used reflection here so to prevent a circular module dependency.
-    // TODO - if we extract the MR into a module, make it depend on hbase-prefix-tree.
-    Class prefixTreeCodecClass = null;
-    try {
-      prefixTreeCodecClass =
-          Class.forName("org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec");
-    } catch (ClassNotFoundException e) {
-      // this will show up in unit tests but should not show in real deployments
-      LOG.warn("The hbase-prefix-tree module jar containing PrefixTreeCodec is not present." +
-          "  Continuing without it.");
-    }
-
     addDependencyJarsForClasses(conf,
       // explicitly pull a class from each module
       org.apache.hadoop.hbase.HConstants.class,                      // hbase-common
@@ -828,8 +813,6 @@ public class TableMapReduceUtil {
       org.apache.hadoop.hbase.mapreduce.TableMapper.class,           // hbase-mapreduce
       org.apache.hadoop.hbase.metrics.impl.FastLongHistogram.class,  // hbase-metrics
       org.apache.hadoop.hbase.metrics.Snapshot.class,                // hbase-metrics-api
-      prefixTreeCodecClass, //  hbase-prefix-tree (if null will be skipped)
-      // pull necessary dependencies
       org.apache.zookeeper.ZooKeeper.class,
       org.apache.hadoop.hbase.shaded.io.netty.channel.Channel.class,
       com.google.protobuf.Message.class,

http://git-wip-us.apache.org/repos/asf/hbase/blob/f812218f/hbase-prefix-tree/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/pom.xml b/hbase-prefix-tree/pom.xml
deleted file mode 100644
index e531706..0000000
--- a/hbase-prefix-tree/pom.xml
+++ /dev/null
@@ -1,189 +0,0 @@
-<?xml version="1.0"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-  <modelVersion>4.0.0</modelVersion>
-  <parent>
-    <artifactId>hbase-build-configuration</artifactId>
-    <groupId>org.apache.hbase</groupId>
-    <version>3.0.0-SNAPSHOT</version>
-    <relativePath>../hbase-build-configuration</relativePath>
-  </parent>
-
-  <artifactId>hbase-prefix-tree</artifactId>
-  <name>Apache HBase - Prefix Tree</name>
-  <description>Prefix Tree Data Block Encoder</description>
-<!--REMOVE-->
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-site-plugin</artifactId>
-        <configuration>
-          <skip>true</skip>
-        </configuration>
-      </plugin>
-      <!-- Make a jar and put the sources in the jar -->
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-source-plugin</artifactId>
-      </plugin>
-      <plugin>
-        <!--Make it so assembly:single does nothing in here-->
-        <artifactId>maven-assembly-plugin</artifactId>
-        <configuration>
-          <skipAssembly>true</skipAssembly>
-        </configuration>
-      </plugin>
-    </plugins>
-    <pluginManagement>
-      <plugins>
-        <!--This plugin's configuration is used to store Eclipse m2e settings
-             only. It has no influence on the Maven build itself.-->
-        <plugin>
-          <groupId>org.eclipse.m2e</groupId>
-          <artifactId>lifecycle-mapping</artifactId>
-          <configuration>
-            <lifecycleMappingMetadata>
-              <pluginExecutions>
-                <pluginExecution>
-                  <pluginExecutionFilter>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-compiler-plugin</artifactId>
-                    <versionRange>[3.2,)</versionRange>
-                    <goals>
-                      <goal>compile</goal>
-                    </goals>
-                  </pluginExecutionFilter>
-                  <action>
-                    <ignore></ignore>
-                  </action>
-                </pluginExecution>
-              </pluginExecutions>
-            </lifecycleMappingMetadata>
-          </configuration>
-        </plugin>
-      </plugins>
-    </pluginManagement>
-  </build>
-
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-common</artifactId>
-      <version>${project.version}</version>
-      <classifier>tests</classifier>
-    </dependency>
-   <dependency>
-     <groupId>org.apache.hbase</groupId>
-     <artifactId>hbase-annotations</artifactId>
-      <type>test-jar</type>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-common</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hbase.thirdparty</groupId>
-      <artifactId>hbase-shaded-miscellaneous</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <scope>test</scope>
-    </dependency>
-  </dependencies>
-
-  <profiles>
-    <!-- Skip the tests in this module -->
-    <profile>
-      <id>skipPrefixTreeTests</id>
-      <activation>
-        <property>
-          <name>skipPrefixTreeTests</name>
-        </property>
-      </activation>
-      <properties>
-        <surefire.skipFirstPart>true</surefire.skipFirstPart>
-        <surefire.skipSecondPart>true</surefire.skipSecondPart>
-      </properties>
-    </profile>
-    <!-- Profiles for building against different hadoop versions -->
-    <!--
-      profile for building against Hadoop 2.0.0-alpha. Activate using:
-       mvn -Dhadoop.profile=2.0
-    -->
-    <profile>
-      <id>hadoop-2.0</id>
-      <activation>
-        <property>
-            <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
-            <!--h2--><name>!hadoop.profile</name>
-        </property>
-      </activation>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <exclusions>
-            <exclusion>
-              <groupId>com.google.guava</groupId>
-              <artifactId>guava</artifactId>
-            </exclusion>
-          </exclusions>
-        </dependency>
-      </dependencies>
-    </profile>
-    <!--
-      profile for building against Hadoop 3.0.x. Activate using:
-       mvn -Dhadoop.profile=3.0
-    -->
-    <profile>
-      <id>hadoop-3.0</id>
-      <activation>
-        <property>
-          <name>hadoop.profile</name>
-          <value>3.0</value>
-        </property>
-      </activation>
-      <properties>
-        <hadoop.version>3.0-SNAPSHOT</hadoop.version>
-      </properties>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-common</artifactId>
-          <exclusions>
-            <exclusion>
-              <groupId>com.google.guava</groupId>
-              <artifactId>guava</artifactId>
-            </exclusion>
-          </exclusions>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
-</project>

http://git-wip-us.apache.org/repos/asf/hbase/blob/f812218f/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeBlockMeta.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeBlockMeta.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeBlockMeta.java
deleted file mode 100644
index b6b8ad8..0000000
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeBlockMeta.java
+++ /dev/null
@@ -1,899 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.codec.prefixtree;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.hbase.codec.prefixtree.encode.other.LongEncoder;
-import org.apache.hadoop.hbase.nio.ByteBuff;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.vint.UVIntTool;
-import org.apache.hadoop.hbase.util.vint.UVLongTool;
-
-/**
- * Information about the block.  Stored at the beginning of the byte[].  Contains things
- * like minimum timestamp and width of FInts in the row tree.
- *
- * Most fields stored in VInts that get decoded on the first access of each new block.
- */
-@InterfaceAudience.Private
-public class PrefixTreeBlockMeta {
-
-  /******************* static fields ********************/
-
-  public static final int VERSION = 0;
-
-  public static final int MAX_FAMILY_LENGTH = Byte.MAX_VALUE;// hard-coded in KeyValue
-
-  public static final int
-    NUM_LONGS = 2,
-    NUM_INTS = 28,
-    NUM_SHORTS = 0,//keyValueTypeWidth not persisted
-    NUM_SINGLE_BYTES = 2,
-    MAX_BYTES = Bytes.SIZEOF_LONG * NUM_LONGS
-        + Bytes.SIZEOF_SHORT * NUM_SHORTS
-        + Bytes.SIZEOF_INT * NUM_INTS
-        + NUM_SINGLE_BYTES;
-
-
-  /**************** transient fields *********************/
-  protected int bufferOffset;
-
-
-  /**************** persisted fields **********************/
-
-  // PrefixTree version to allow future format modifications
-  protected int version;
-  protected int numMetaBytes;
-  protected int numKeyValueBytes;
-  protected boolean includesMvccVersion;//probably don't need this explicitly, but only 1 byte
-
-  // split the byte[] into 6 sections for the different data types
-  protected int numRowBytes;
-  protected int numFamilyBytes;
-  protected int numQualifierBytes;
-  protected int numTimestampBytes;
-  protected int numMvccVersionBytes;
-  protected int numValueBytes;
-  protected int numTagsBytes;
-
-  // number of bytes in each section of fixed width FInts
-  protected int nextNodeOffsetWidth;
-  protected int familyOffsetWidth;
-  protected int qualifierOffsetWidth;
-  protected int timestampIndexWidth;
-  protected int mvccVersionIndexWidth;
-  protected int valueOffsetWidth;
-  protected int valueLengthWidth;
-  protected int tagsOffsetWidth;
-
-  // used to pre-allocate structures for reading
-  protected int rowTreeDepth;
-  protected int maxRowLength;
-  protected int maxQualifierLength;
-  protected int maxTagsLength;
-
-  // the timestamp from which the deltas are calculated
-  protected long minTimestamp;
-  protected int timestampDeltaWidth;
-  protected long minMvccVersion;
-  protected int mvccVersionDeltaWidth;
-
-  protected boolean allSameType;
-  protected byte allTypes;
-
-  protected int numUniqueRows;
-  protected int numUniqueFamilies;
-  protected int numUniqueQualifiers;
-  protected int numUniqueTags;
-
-
-  /***************** constructors ********************/
-
-  public PrefixTreeBlockMeta() {
-  }
-
-  public PrefixTreeBlockMeta(InputStream is) throws IOException{
-    this.version = VERSION;
-    this.bufferOffset = 0;
-    readVariableBytesFromInputStream(is);
-  }
-
-  /**
-   * @param buffer positioned at start of PtBlockMeta
-   */
-  public PrefixTreeBlockMeta(ByteBuff buffer) {
-    initOnBlock(buffer);
-  }
-
-  public void initOnBlock(ByteBuff buffer) {
-    bufferOffset = buffer.position();
-    readVariableBytesFromBuffer(buffer, bufferOffset);
-  }
-
-
-  /**************** operate on each field **********************/
-
-  public int calculateNumMetaBytes(){
-    int numBytes = 0;
-    numBytes += UVIntTool.numBytes(version);
-    numBytes += UVLongTool.numBytes(numMetaBytes);
-    numBytes += UVIntTool.numBytes(numKeyValueBytes);
-    ++numBytes;//os.write(getIncludesMvccVersion());
-
-    numBytes += UVIntTool.numBytes(numRowBytes);
-    numBytes += UVIntTool.numBytes(numFamilyBytes);
-    numBytes += UVIntTool.numBytes(numQualifierBytes);
-    numBytes += UVIntTool.numBytes(numTagsBytes);
-    numBytes += UVIntTool.numBytes(numTimestampBytes);
-    numBytes += UVIntTool.numBytes(numMvccVersionBytes);
-    numBytes += UVIntTool.numBytes(numValueBytes);
-
-    numBytes += UVIntTool.numBytes(nextNodeOffsetWidth);
-    numBytes += UVIntTool.numBytes(familyOffsetWidth);
-    numBytes += UVIntTool.numBytes(qualifierOffsetWidth);
-    numBytes += UVIntTool.numBytes(tagsOffsetWidth);
-    numBytes += UVIntTool.numBytes(timestampIndexWidth);
-    numBytes += UVIntTool.numBytes(mvccVersionIndexWidth);
-    numBytes += UVIntTool.numBytes(valueOffsetWidth);
-    numBytes += UVIntTool.numBytes(valueLengthWidth);
-
-    numBytes += UVIntTool.numBytes(rowTreeDepth);
-    numBytes += UVIntTool.numBytes(maxRowLength);
-    numBytes += UVIntTool.numBytes(maxQualifierLength);
-    numBytes += UVIntTool.numBytes(maxTagsLength);
-
-    numBytes += UVLongTool.numBytes(minTimestamp);
-    numBytes += UVIntTool.numBytes(timestampDeltaWidth);
-    numBytes += UVLongTool.numBytes(minMvccVersion);
-    numBytes += UVIntTool.numBytes(mvccVersionDeltaWidth);
-    ++numBytes;//os.write(getAllSameTypeByte());
-    ++numBytes;//os.write(allTypes);
-
-    numBytes += UVIntTool.numBytes(numUniqueRows);
-    numBytes += UVIntTool.numBytes(numUniqueFamilies);
-    numBytes += UVIntTool.numBytes(numUniqueQualifiers);
-    numBytes += UVIntTool.numBytes(numUniqueTags);
-    return numBytes;
-  }
-
-  public void writeVariableBytesToOutputStream(OutputStream os) throws IOException{
-      UVIntTool.writeBytes(version, os);
-      UVIntTool.writeBytes(numMetaBytes, os);
-      UVIntTool.writeBytes(numKeyValueBytes, os);
-      os.write(getIncludesMvccVersionByte());
-
-      UVIntTool.writeBytes(numRowBytes, os);
-      UVIntTool.writeBytes(numFamilyBytes, os);
-      UVIntTool.writeBytes(numQualifierBytes, os);
-      UVIntTool.writeBytes(numTagsBytes, os);
-      UVIntTool.writeBytes(numTimestampBytes, os);
-      UVIntTool.writeBytes(numMvccVersionBytes, os);
-      UVIntTool.writeBytes(numValueBytes, os);
-
-      UVIntTool.writeBytes(nextNodeOffsetWidth, os);
-      UVIntTool.writeBytes(familyOffsetWidth, os);
-      UVIntTool.writeBytes(qualifierOffsetWidth, os);
-      UVIntTool.writeBytes(tagsOffsetWidth, os);
-      UVIntTool.writeBytes(timestampIndexWidth, os);
-      UVIntTool.writeBytes(mvccVersionIndexWidth, os);
-      UVIntTool.writeBytes(valueOffsetWidth, os);
-      UVIntTool.writeBytes(valueLengthWidth, os);
-
-      UVIntTool.writeBytes(rowTreeDepth, os);
-      UVIntTool.writeBytes(maxRowLength, os);
-      UVIntTool.writeBytes(maxQualifierLength, os);
-      UVIntTool.writeBytes(maxTagsLength, os);
-
-      UVLongTool.writeBytes(minTimestamp, os);
-      UVIntTool.writeBytes(timestampDeltaWidth, os);
-      UVLongTool.writeBytes(minMvccVersion, os);
-      UVIntTool.writeBytes(mvccVersionDeltaWidth, os);
-      os.write(getAllSameTypeByte());
-      os.write(allTypes);
-
-      UVIntTool.writeBytes(numUniqueRows, os);
-      UVIntTool.writeBytes(numUniqueFamilies, os);
-      UVIntTool.writeBytes(numUniqueQualifiers, os);
-      UVIntTool.writeBytes(numUniqueTags, os);
-  }
-
-  public void readVariableBytesFromInputStream(InputStream is) throws IOException{
-      version = UVIntTool.getInt(is);
-      numMetaBytes = UVIntTool.getInt(is);
-      numKeyValueBytes = UVIntTool.getInt(is);
-      setIncludesMvccVersion((byte) is.read());
-
-      numRowBytes = UVIntTool.getInt(is);
-      numFamilyBytes = UVIntTool.getInt(is);
-      numQualifierBytes = UVIntTool.getInt(is);
-      numTagsBytes = UVIntTool.getInt(is);
-      numTimestampBytes = UVIntTool.getInt(is);
-      numMvccVersionBytes = UVIntTool.getInt(is);
-      numValueBytes = UVIntTool.getInt(is);
-
-      nextNodeOffsetWidth = UVIntTool.getInt(is);
-      familyOffsetWidth = UVIntTool.getInt(is);
-      qualifierOffsetWidth = UVIntTool.getInt(is);
-      tagsOffsetWidth = UVIntTool.getInt(is);
-      timestampIndexWidth = UVIntTool.getInt(is);
-      mvccVersionIndexWidth = UVIntTool.getInt(is);
-      valueOffsetWidth = UVIntTool.getInt(is);
-      valueLengthWidth = UVIntTool.getInt(is);
-
-      rowTreeDepth = UVIntTool.getInt(is);
-      maxRowLength = UVIntTool.getInt(is);
-      maxQualifierLength = UVIntTool.getInt(is);
-      maxTagsLength = UVIntTool.getInt(is);
-
-      minTimestamp = UVLongTool.getLong(is);
-      timestampDeltaWidth = UVIntTool.getInt(is);
-      minMvccVersion = UVLongTool.getLong(is);
-      mvccVersionDeltaWidth = UVIntTool.getInt(is);
-
-      setAllSameType((byte) is.read());
-      allTypes = (byte) is.read();
-
-      numUniqueRows = UVIntTool.getInt(is);
-      numUniqueFamilies = UVIntTool.getInt(is);
-      numUniqueQualifiers = UVIntTool.getInt(is);
-      numUniqueTags = UVIntTool.getInt(is);
-  }
-
-  public void readVariableBytesFromBuffer(ByteBuff buf, int offset) {
-    int position = offset;
-
-    version = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(version);
-    numMetaBytes = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(numMetaBytes);
-    numKeyValueBytes = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(numKeyValueBytes);
-    setIncludesMvccVersion(buf.get(position));
-    ++position;
-
-    numRowBytes = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(numRowBytes);
-    numFamilyBytes = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(numFamilyBytes);
-    numQualifierBytes = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(numQualifierBytes);
-    numTagsBytes = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(numTagsBytes);
-    numTimestampBytes = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(numTimestampBytes);
-    numMvccVersionBytes = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(numMvccVersionBytes);
-    numValueBytes = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(numValueBytes);
-
-    nextNodeOffsetWidth = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(nextNodeOffsetWidth);
-    familyOffsetWidth = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(familyOffsetWidth);
-    qualifierOffsetWidth = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(qualifierOffsetWidth);
-    tagsOffsetWidth = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(tagsOffsetWidth);
-    timestampIndexWidth = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(timestampIndexWidth);
-    mvccVersionIndexWidth = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(mvccVersionIndexWidth);
-    valueOffsetWidth = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(valueOffsetWidth);
-    valueLengthWidth = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(valueLengthWidth);
-
-    rowTreeDepth = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(rowTreeDepth);
-    maxRowLength = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(maxRowLength);
-    maxQualifierLength = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(maxQualifierLength);
-    maxTagsLength = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(maxTagsLength);
-    minTimestamp = UVLongTool.getLong(buf, position);
-    position += UVLongTool.numBytes(minTimestamp);
-    timestampDeltaWidth = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(timestampDeltaWidth);
-    minMvccVersion = UVLongTool.getLong(buf, position);
-    position += UVLongTool.numBytes(minMvccVersion);
-    mvccVersionDeltaWidth = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(mvccVersionDeltaWidth);
-
-    setAllSameType(buf.get(position));
-    ++position;
-    allTypes =  buf.get(position);
-    ++position;
-
-    numUniqueRows = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(numUniqueRows);
-    numUniqueFamilies = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(numUniqueFamilies);
-    numUniqueQualifiers = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(numUniqueQualifiers);
-    numUniqueTags = UVIntTool.getInt(buf, position);
-    position += UVIntTool.numBytes(numUniqueTags);
-  }
-
-  //TODO method that can read directly from ByteBuffer instead of InputStream
-
-
-  /*************** methods *************************/
-
-  public int getKeyValueTypeWidth() {
-    return allSameType ? 0 : 1;
-  }
-
-  public byte getIncludesMvccVersionByte() {
-    return includesMvccVersion ? (byte) 1 : (byte) 0;
-  }
-
-  public void setIncludesMvccVersion(byte includesMvccVersionByte) {
-    includesMvccVersion = includesMvccVersionByte != 0;
-  }
-
-  public byte getAllSameTypeByte() {
-    return allSameType ? (byte) 1 : (byte) 0;
-  }
-
-  public void setAllSameType(byte allSameTypeByte) {
-    allSameType = allSameTypeByte != 0;
-  }
-
-  public boolean isAllSameTimestamp() {
-    return timestampIndexWidth == 0;
-  }
-
-  public boolean isAllSameMvccVersion() {
-    return mvccVersionIndexWidth == 0;
-  }
-
-  public void setTimestampFields(LongEncoder encoder){
-    this.minTimestamp = encoder.getMin();
-    this.timestampIndexWidth = encoder.getBytesPerIndex();
-    this.timestampDeltaWidth = encoder.getBytesPerDelta();
-    this.numTimestampBytes = encoder.getTotalCompressedBytes();
-  }
-
-  public void setMvccVersionFields(LongEncoder encoder){
-    this.minMvccVersion = encoder.getMin();
-    this.mvccVersionIndexWidth = encoder.getBytesPerIndex();
-    this.mvccVersionDeltaWidth = encoder.getBytesPerDelta();
-    this.numMvccVersionBytes = encoder.getTotalCompressedBytes();
-  }
-
-
-  /*************** Object methods *************************/
-
-  /**
-   * Generated by Eclipse
-   */
-  @Override
-  public boolean equals(Object obj) {
-    if (this == obj)
-      return true;
-    if (obj == null)
-      return false;
-    if (getClass() != obj.getClass())
-      return false;
-    PrefixTreeBlockMeta other = (PrefixTreeBlockMeta) obj;
-    if (allSameType != other.allSameType)
-      return false;
-    if (allTypes != other.allTypes)
-      return false;
-    if (bufferOffset != other.bufferOffset)
-      return false;
-    if (valueLengthWidth != other.valueLengthWidth)
-      return false;
-    if (valueOffsetWidth != other.valueOffsetWidth)
-      return false;
-    if (familyOffsetWidth != other.familyOffsetWidth)
-      return false;
-    if (includesMvccVersion != other.includesMvccVersion)
-      return false;
-    if (maxQualifierLength != other.maxQualifierLength)
-      return false;
-    if (maxTagsLength != other.maxTagsLength)
-      return false;
-    if (maxRowLength != other.maxRowLength)
-      return false;
-    if (mvccVersionDeltaWidth != other.mvccVersionDeltaWidth)
-      return false;
-    if (mvccVersionIndexWidth != other.mvccVersionIndexWidth)
-      return false;
-    if (minMvccVersion != other.minMvccVersion)
-      return false;
-    if (minTimestamp != other.minTimestamp)
-      return false;
-    if (nextNodeOffsetWidth != other.nextNodeOffsetWidth)
-      return false;
-    if (numValueBytes != other.numValueBytes)
-      return false;
-    if (numFamilyBytes != other.numFamilyBytes)
-      return false;
-    if (numMvccVersionBytes != other.numMvccVersionBytes)
-      return false;
-    if (numMetaBytes != other.numMetaBytes)
-      return false;
-    if (numQualifierBytes != other.numQualifierBytes)
-      return false;
-    if (numTagsBytes != other.numTagsBytes)
-      return false;
-    if (numRowBytes != other.numRowBytes)
-      return false;
-    if (numTimestampBytes != other.numTimestampBytes)
-      return false;
-    if (numUniqueFamilies != other.numUniqueFamilies)
-      return false;
-    if (numUniqueQualifiers != other.numUniqueQualifiers)
-      return false;
-    if (numUniqueTags != other.numUniqueTags)
-      return false;
-    if (numUniqueRows != other.numUniqueRows)
-      return false;
-    if (numKeyValueBytes != other.numKeyValueBytes)
-      return false;
-    if (qualifierOffsetWidth != other.qualifierOffsetWidth)
-      return false;
-    if(tagsOffsetWidth !=  other.tagsOffsetWidth) 
-      return false;
-    if (rowTreeDepth != other.rowTreeDepth)
-      return false;
-    if (timestampDeltaWidth != other.timestampDeltaWidth)
-      return false;
-    if (timestampIndexWidth != other.timestampIndexWidth)
-      return false;
-    if (version != other.version)
-      return false;
-    return true;
-  }
-
-  /**
-   * Generated by Eclipse
-   */
-  @Override
-  public int hashCode() {
-    final int prime = 31;
-    int result = 1;
-    result = prime * result + (allSameType ? 1231 : 1237);
-    result = prime * result + allTypes;
-    result = prime * result + bufferOffset;
-    result = prime * result + valueLengthWidth;
-    result = prime * result + valueOffsetWidth;
-    result = prime * result + familyOffsetWidth;
-    result = prime * result + (includesMvccVersion ? 1231 : 1237);
-    result = prime * result + maxQualifierLength;
-    result = prime * result + maxTagsLength;
-    result = prime * result + maxRowLength;
-    result = prime * result + mvccVersionDeltaWidth;
-    result = prime * result + mvccVersionIndexWidth;
-    result = prime * result + (int) (minMvccVersion ^ (minMvccVersion >>> 32));
-    result = prime * result + (int) (minTimestamp ^ (minTimestamp >>> 32));
-    result = prime * result + nextNodeOffsetWidth;
-    result = prime * result + numValueBytes;
-    result = prime * result + numFamilyBytes;
-    result = prime * result + numMvccVersionBytes;
-    result = prime * result + numMetaBytes;
-    result = prime * result + numQualifierBytes;
-    result = prime * result + numTagsBytes;
-    result = prime * result + numRowBytes;
-    result = prime * result + numTimestampBytes;
-    result = prime * result + numUniqueFamilies;
-    result = prime * result + numUniqueQualifiers;
-    result = prime * result + numUniqueTags;
-    result = prime * result + numUniqueRows;
-    result = prime * result + numKeyValueBytes;
-    result = prime * result + qualifierOffsetWidth;
-    result = prime * result + tagsOffsetWidth;
-    result = prime * result + rowTreeDepth;
-    result = prime * result + timestampDeltaWidth;
-    result = prime * result + timestampIndexWidth;
-    result = prime * result + version;
-    return result;
-  }
-
-  /**
-   * Generated by Eclipse
-   */
-  @Override
-  public String toString() {
-    StringBuilder builder = new StringBuilder();
-    builder.append("PtBlockMeta [bufferOffset=");
-    builder.append(bufferOffset);
-    builder.append(", version=");
-    builder.append(version);
-    builder.append(", numMetaBytes=");
-    builder.append(numMetaBytes);
-    builder.append(", numKeyValueBytes=");
-    builder.append(numKeyValueBytes);
-    builder.append(", includesMvccVersion=");
-    builder.append(includesMvccVersion);
-    builder.append(", numRowBytes=");
-    builder.append(numRowBytes);
-    builder.append(", numFamilyBytes=");
-    builder.append(numFamilyBytes);
-    builder.append(", numQualifierBytes=");
-    builder.append(numQualifierBytes);
-    builder.append(", numTimestampBytes=");
-    builder.append(numTimestampBytes);
-    builder.append(", numMvccVersionBytes=");
-    builder.append(numMvccVersionBytes);
-    builder.append(", numValueBytes=");
-    builder.append(numValueBytes);
-    builder.append(", numTagBytes=");
-    builder.append(numTagsBytes);
-    builder.append(", nextNodeOffsetWidth=");
-    builder.append(nextNodeOffsetWidth);
-    builder.append(", familyOffsetWidth=");
-    builder.append(familyOffsetWidth);
-    builder.append(", qualifierOffsetWidth=");
-    builder.append(qualifierOffsetWidth);
-    builder.append(", tagOffsetWidth=");
-    builder.append(tagsOffsetWidth);
-    builder.append(", timestampIndexWidth=");
-    builder.append(timestampIndexWidth);
-    builder.append(", mvccVersionIndexWidth=");
-    builder.append(mvccVersionIndexWidth);
-    builder.append(", valueOffsetWidth=");
-    builder.append(valueOffsetWidth);
-    builder.append(", valueLengthWidth=");
-    builder.append(valueLengthWidth);
-    builder.append(", rowTreeDepth=");
-    builder.append(rowTreeDepth);
-    builder.append(", maxRowLength=");
-    builder.append(maxRowLength);
-    builder.append(", maxQualifierLength=");
-    builder.append(maxQualifierLength);
-    builder.append(", maxTagLength=");
-    builder.append(maxTagsLength);
-    builder.append(", minTimestamp=");
-    builder.append(minTimestamp);
-    builder.append(", timestampDeltaWidth=");
-    builder.append(timestampDeltaWidth);
-    builder.append(", minMvccVersion=");
-    builder.append(minMvccVersion);
-    builder.append(", mvccVersionDeltaWidth=");
-    builder.append(mvccVersionDeltaWidth);
-    builder.append(", allSameType=");
-    builder.append(allSameType);
-    builder.append(", allTypes=");
-    builder.append(allTypes);
-    builder.append(", numUniqueRows=");
-    builder.append(numUniqueRows);
-    builder.append(", numUniqueFamilies=");
-    builder.append(numUniqueFamilies);
-    builder.append(", numUniqueQualifiers=");
-    builder.append(numUniqueQualifiers);
-    builder.append(", numUniqueTags=");
-    builder.append(numUniqueTags);
-    builder.append("]");
-    return builder.toString();
-  }
-
-
-  /************** absolute getters *******************/
-
-  public int getAbsoluteRowOffset() {
-    return getBufferOffset() + numMetaBytes;
-  }
-
-  public int getAbsoluteFamilyOffset() {
-    return getAbsoluteRowOffset() + numRowBytes;
-  }
-
-  public int getAbsoluteQualifierOffset() {
-    return getAbsoluteFamilyOffset() + numFamilyBytes;
-  }
-
-  public int getAbsoluteTagsOffset() {
-    return getAbsoluteQualifierOffset() + numQualifierBytes;
-  }
-
-  public int getAbsoluteTimestampOffset() {
-    return getAbsoluteTagsOffset() + numTagsBytes;
-  }
-
-  public int getAbsoluteMvccVersionOffset() {
-    return getAbsoluteTimestampOffset() + numTimestampBytes;
-  }
-
-  public int getAbsoluteValueOffset() {
-    return getAbsoluteMvccVersionOffset() + numMvccVersionBytes;
-  }
-
-
-  /*************** get/set ***************************/
-
-  public int getTimestampDeltaWidth() {
-    return timestampDeltaWidth;
-  }
-
-  public void setTimestampDeltaWidth(int timestampDeltaWidth) {
-    this.timestampDeltaWidth = timestampDeltaWidth;
-  }
-
-  public int getValueOffsetWidth() {
-    return valueOffsetWidth;
-  }
-
-  public int getTagsOffsetWidth() {
-    return tagsOffsetWidth;
-  }
-
-  public void setValueOffsetWidth(int dataOffsetWidth) {
-    this.valueOffsetWidth = dataOffsetWidth;
-  }
-
-  public void setTagsOffsetWidth(int dataOffsetWidth) {
-    this.tagsOffsetWidth = dataOffsetWidth;
-  }
-
-  public int getValueLengthWidth() {
-    return valueLengthWidth;
-  }
-
-  public void setValueLengthWidth(int dataLengthWidth) {
-    this.valueLengthWidth = dataLengthWidth;
-  }
-
-  public int getMaxRowLength() {
-    return maxRowLength;
-  }
-
-  public void setMaxRowLength(int maxRowLength) {
-    this.maxRowLength = maxRowLength;
-  }
-
-  public long getMinTimestamp() {
-    return minTimestamp;
-  }
-
-  public void setMinTimestamp(long minTimestamp) {
-    this.minTimestamp = minTimestamp;
-  }
-
-  public byte getAllTypes() {
-    return allTypes;
-  }
-
-  public void setAllTypes(byte allTypes) {
-    this.allTypes = allTypes;
-  }
-
-  public boolean isAllSameType() {
-    return allSameType;
-  }
-
-  public void setAllSameType(boolean allSameType) {
-    this.allSameType = allSameType;
-  }
-
-  public int getNextNodeOffsetWidth() {
-    return nextNodeOffsetWidth;
-  }
-
-  public void setNextNodeOffsetWidth(int nextNodeOffsetWidth) {
-    this.nextNodeOffsetWidth = nextNodeOffsetWidth;
-  }
-
-  public int getNumRowBytes() {
-    return numRowBytes;
-  }
-
-  public void setNumRowBytes(int numRowBytes) {
-    this.numRowBytes = numRowBytes;
-  }
-
-  public int getNumTimestampBytes() {
-    return numTimestampBytes;
-  }
-
-  public void setNumTimestampBytes(int numTimestampBytes) {
-    this.numTimestampBytes = numTimestampBytes;
-  }
-
-  public int getNumValueBytes() {
-    return numValueBytes;
-  }
-
-  public int getNumTagsBytes() {
-    return numTagsBytes;
-  }
-
-  public void setNumTagsBytes(int numTagBytes){
-    this.numTagsBytes = numTagBytes;
-  }
-
-  public void setNumValueBytes(int numValueBytes) {
-    this.numValueBytes = numValueBytes;
-  }
-
-  public int getNumMetaBytes() {
-    return numMetaBytes;
-  }
-
-  public void setNumMetaBytes(int numMetaBytes) {
-    this.numMetaBytes = numMetaBytes;
-  }
-
-  public int getBufferOffset() {
-    return bufferOffset;
-  }
-
-  public void setBufferOffset(int bufferOffset) {
-    this.bufferOffset = bufferOffset;
-  }
-
-  public int getNumKeyValueBytes() {
-    return numKeyValueBytes;
-  }
-
-  public void setNumKeyValueBytes(int numKeyValueBytes) {
-    this.numKeyValueBytes = numKeyValueBytes;
-  }
-
-  public int getRowTreeDepth() {
-    return rowTreeDepth;
-  }
-
-  public void setRowTreeDepth(int rowTreeDepth) {
-    this.rowTreeDepth = rowTreeDepth;
-  }
-
-  public int getNumMvccVersionBytes() {
-    return numMvccVersionBytes;
-  }
-
-  public void setNumMvccVersionBytes(int numMvccVersionBytes) {
-    this.numMvccVersionBytes = numMvccVersionBytes;
-  }
-
-  public int getMvccVersionDeltaWidth() {
-    return mvccVersionDeltaWidth;
-  }
-
-  public void setMvccVersionDeltaWidth(int mvccVersionDeltaWidth) {
-    this.mvccVersionDeltaWidth = mvccVersionDeltaWidth;
-  }
-
-  public long getMinMvccVersion() {
-    return minMvccVersion;
-  }
-
-  public void setMinMvccVersion(long minMvccVersion) {
-    this.minMvccVersion = minMvccVersion;
-  }
-
-  public int getNumFamilyBytes() {
-    return numFamilyBytes;
-  }
-
-  public void setNumFamilyBytes(int numFamilyBytes) {
-    this.numFamilyBytes = numFamilyBytes;
-  }
-
-  public int getFamilyOffsetWidth() {
-    return familyOffsetWidth;
-  }
-
-  public void setFamilyOffsetWidth(int familyOffsetWidth) {
-    this.familyOffsetWidth = familyOffsetWidth;
-  }
-
-  public int getNumUniqueRows() {
-    return numUniqueRows;
-  }
-
-  public void setNumUniqueRows(int numUniqueRows) {
-    this.numUniqueRows = numUniqueRows;
-  }
-
-  public int getNumUniqueFamilies() {
-    return numUniqueFamilies;
-  }
-
-  public void setNumUniqueFamilies(int numUniqueFamilies) {
-    this.numUniqueFamilies = numUniqueFamilies;
-  }
-
-  public int getNumUniqueQualifiers() {
-    return numUniqueQualifiers;
-  }
-
-  public void setNumUniqueQualifiers(int numUniqueQualifiers) {
-    this.numUniqueQualifiers = numUniqueQualifiers;
-  }
-
-  public void setNumUniqueTags(int numUniqueTags) {
-    this.numUniqueTags = numUniqueTags;
-  }
-
-  public int getNumUniqueTags() {
-    return numUniqueTags;
-  }
-  public int getNumQualifierBytes() {
-    return numQualifierBytes;
-  }
-
-  public void setNumQualifierBytes(int numQualifierBytes) {
-    this.numQualifierBytes = numQualifierBytes;
-  }
-
-  public int getQualifierOffsetWidth() {
-    return qualifierOffsetWidth;
-  }
-
-  public void setQualifierOffsetWidth(int qualifierOffsetWidth) {
-    this.qualifierOffsetWidth = qualifierOffsetWidth;
-  }
-
-  public int getMaxQualifierLength() {
-    return maxQualifierLength;
-  }
-
-  // TODO : decide on some max value for this ? INTEGER_MAX?
-  public void setMaxQualifierLength(int maxQualifierLength) {
-    this.maxQualifierLength = maxQualifierLength;
-  }
-
-  public int getMaxTagsLength() {
-    return this.maxTagsLength;
-  }
-
-  public void setMaxTagsLength(int maxTagLength) {
-    this.maxTagsLength = maxTagLength;
-  }
-
-  public int getTimestampIndexWidth() {
-    return timestampIndexWidth;
-  }
-
-  public void setTimestampIndexWidth(int timestampIndexWidth) {
-    this.timestampIndexWidth = timestampIndexWidth;
-  }
-
-  public int getMvccVersionIndexWidth() {
-    return mvccVersionIndexWidth;
-  }
-
-  public void setMvccVersionIndexWidth(int mvccVersionIndexWidth) {
-    this.mvccVersionIndexWidth = mvccVersionIndexWidth;
-  }
-
-  public int getVersion() {
-    return version;
-  }
-
-  public void setVersion(int version) {
-    this.version = version;
-  }
-
-  public boolean isIncludesMvccVersion() {
-    return includesMvccVersion;
-  }
-
-  public void setIncludesMvccVersion(boolean includesMvccVersion) {
-    this.includesMvccVersion = includesMvccVersion;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f812218f/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
deleted file mode 100644
index 4295035..0000000
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
+++ /dev/null
@@ -1,216 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.codec.prefixtree;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellComparator;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.CellComparatorImpl.MetaCellComparator;
-import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory;
-import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
-import org.apache.hadoop.hbase.codec.prefixtree.encode.EncoderFactory;
-import org.apache.hadoop.hbase.codec.prefixtree.encode.PrefixTreeEncoder;
-import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
-import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
-import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-import org.apache.hadoop.hbase.io.encoding.EncodingState;
-import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
-import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;
-import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;
-import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
-import org.apache.hadoop.hbase.io.hfile.BlockType;
-import org.apache.hadoop.hbase.io.hfile.HFileContext;
-import org.apache.hadoop.hbase.nio.ByteBuff;
-import org.apache.hadoop.hbase.nio.SingleByteBuff;
-import org.apache.hadoop.hbase.util.ByteBufferUtils;
-import org.apache.hadoop.io.WritableUtils;
-
-/**
- * <p>
- * This class is created via reflection in DataBlockEncoding enum. Update the enum if class name or
- * package changes.
- * </p>
- * PrefixTreeDataBlockEncoder implementation of DataBlockEncoder. This is the primary entry point
- * for PrefixTree encoding and decoding. Encoding is delegated to instances of
- * {@link PrefixTreeEncoder}, and decoding is delegated to instances of
- * {@link org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher}.
- * Encoder and decoder instances are
- * created and recycled by static PtEncoderFactory and PtDecoderFactory.
- */
-@InterfaceAudience.Private
-public class PrefixTreeCodec implements DataBlockEncoder {
-
-  /**
-   * no-arg constructor for reflection
-   */
-  public PrefixTreeCodec() {
-  }
-
-  @Override
-  public ByteBuffer decodeKeyValues(DataInputStream source, HFileBlockDecodingContext decodingCtx)
-      throws IOException {
-    return decodeKeyValues(source, 0, 0, decodingCtx);
-  }
-
-
-  /**
-   * I don't think this method is called during normal HBase operation, so efficiency is not
-   * important.
-   */
-  public ByteBuffer decodeKeyValues(DataInputStream source, int allocateHeaderLength,
-      int skipLastBytes, HFileBlockDecodingContext decodingCtx) throws IOException {
-    ByteBuffer sourceAsBuffer = ByteBufferUtils.drainInputStreamToBuffer(source);// waste
-    sourceAsBuffer.mark();
-    PrefixTreeBlockMeta blockMeta = new PrefixTreeBlockMeta(new SingleByteBuff(sourceAsBuffer));
-    sourceAsBuffer.rewind();
-    int numV1BytesWithHeader = allocateHeaderLength + blockMeta.getNumKeyValueBytes();
-    byte[] keyValueBytesWithHeader = new byte[numV1BytesWithHeader];
-    ByteBuffer result = ByteBuffer.wrap(keyValueBytesWithHeader);
-    result.rewind();
-    CellSearcher searcher = null;
-    try {
-      boolean includesMvcc = decodingCtx.getHFileContext().isIncludesMvcc();
-      searcher = DecoderFactory.checkOut(new SingleByteBuff(sourceAsBuffer), includesMvcc);
-      while (searcher.advance()) {
-        KeyValue currentCell = KeyValueUtil.copyToNewKeyValue(searcher.current());
-        // needs to be modified for DirectByteBuffers. no existing methods to
-        // write VLongs to byte[]
-        int offset = result.arrayOffset() + result.position();
-        System.arraycopy(currentCell.getBuffer(), currentCell.getOffset(), result.array(), offset,
-            currentCell.getLength());
-        int keyValueLength = KeyValueUtil.length(currentCell);
-        ByteBufferUtils.skip(result, keyValueLength);
-        offset += keyValueLength;
-        if (includesMvcc) {
-          ByteBufferUtils.writeVLong(result, currentCell.getSequenceId());
-        }
-      }
-      result.position(result.limit());//make it appear as if we were appending
-      return result;
-    } finally {
-      DecoderFactory.checkIn(searcher);
-    }
-  }
-
-
-  @Override
-  public Cell getFirstKeyCellInBlock(ByteBuff block) {
-    block.rewind();
-    PrefixTreeArraySearcher searcher = null;
-    try {
-      // should i includeMemstoreTS (second argument)?  i think PrefixKeyDeltaEncoder is, so i will
-      searcher = DecoderFactory.checkOut(block, true);
-      if (!searcher.positionAtFirstCell()) {
-        return null;
-      }
-      return searcher.current();
-    } finally {
-      DecoderFactory.checkIn(searcher);
-    }
-  }
-
-  @Override
-  public HFileBlockEncodingContext newDataBlockEncodingContext(
-      DataBlockEncoding encoding, byte[] header, HFileContext meta) {
-    if(DataBlockEncoding.PREFIX_TREE != encoding){
-      //i'm not sure why encoding is in the interface.  Each encoder implementation should probably
-      //know it's encoding type
-      throw new IllegalArgumentException("only DataBlockEncoding.PREFIX_TREE supported");
-    }
-    return new HFileBlockDefaultEncodingContext(encoding, header, meta);
-  }
-
-  @Override
-  public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta) {
-    return new HFileBlockDefaultDecodingContext(meta);
-  }
-
-  /**
-   * Is this the correct handling of an illegal comparator?  How to prevent that from getting all
-   * the way to this point.
-   */
-  @Override
-  public EncodedSeeker createSeeker(CellComparator comparator,
-      HFileBlockDecodingContext decodingCtx) {
-    if (comparator instanceof MetaCellComparator) {
-      throw new IllegalArgumentException(
-          "DataBlockEncoding.PREFIX_TREE not compatible with hbase:meta " + "table");
-    }
-
-    return new PrefixTreeSeeker(decodingCtx.getHFileContext().isIncludesMvcc());
-  }
-
-  @Override
-  public int encode(Cell cell, HFileBlockEncodingContext encodingCtx, DataOutputStream out)
-      throws IOException {
-    PrefixTreeEncodingState state = (PrefixTreeEncodingState) encodingCtx.getEncodingState();
-    PrefixTreeEncoder builder = state.builder;
-    builder.write(cell);
-    int size = KeyValueUtil.length(cell);
-    if (encodingCtx.getHFileContext().isIncludesMvcc()) {
-      size += WritableUtils.getVIntSize(cell.getSequenceId());
-    }
-    return size;
-  }
-
-  private static class PrefixTreeEncodingState extends EncodingState {
-    PrefixTreeEncoder builder = null;
-  }
-
-  @Override
-  public void startBlockEncoding(HFileBlockEncodingContext blkEncodingCtx, DataOutputStream out)
-      throws IOException {
-    if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
-      throw new IOException(this.getClass().getName() + " only accepts "
-          + HFileBlockDefaultEncodingContext.class.getName() + " as the " + "encoding context.");
-    }
-
-    HFileBlockDefaultEncodingContext encodingCtx = 
-        (HFileBlockDefaultEncodingContext) blkEncodingCtx;
-    encodingCtx.prepareEncoding(out);
-
-    PrefixTreeEncoder builder = EncoderFactory.checkOut(out, encodingCtx.getHFileContext()
-        .isIncludesMvcc());
-    PrefixTreeEncodingState state = new PrefixTreeEncodingState();
-    state.builder = builder;
-    blkEncodingCtx.setEncodingState(state);
-  }
-
-  @Override
-  public void endBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out,
-      byte[] uncompressedBytesWithHeader) throws IOException {
-    PrefixTreeEncodingState state = (PrefixTreeEncodingState) encodingCtx.getEncodingState();
-    PrefixTreeEncoder builder = state.builder;
-    builder.flush();
-    EncoderFactory.checkIn(builder);
-    // do i need to check this, or will it always be DataBlockEncoding.PREFIX_TREE?
-    if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
-      encodingCtx.postEncoding(BlockType.ENCODED_DATA);
-    } else {
-      encodingCtx.postEncoding(BlockType.DATA);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f812218f/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java
deleted file mode 100644
index b027eb1..0000000
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java
+++ /dev/null
@@ -1,586 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.codec.prefixtree;
-
-import java.nio.ByteBuffer;
-
-import org.apache.hadoop.hbase.ByteBufferCell;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellComparator;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.PrivateCellUtil;
-import org.apache.hadoop.hbase.KeyValue.Type;
-import org.apache.hadoop.hbase.SettableSequenceId;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory;
-import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
-import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
-import org.apache.hadoop.hbase.io.HeapSize;
-import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker;
-import org.apache.hadoop.hbase.nio.ByteBuff;
-import org.apache.hadoop.hbase.util.ByteBufferUtils;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.ClassSize;
-
-/**
- * These methods have the same definition as any implementation of the EncodedSeeker.
- *
- * In the future, the EncodedSeeker could be modified to work with the Cell interface directly.  It
- * currently returns a new KeyValue object each time getKeyValue is called.  This is not horrible,
- * but in order to create a new KeyValue object, we must first allocate a new byte[] and copy in
- * the data from the PrefixTreeCell.  It is somewhat heavyweight right now.
- */
-@InterfaceAudience.Private
-public class PrefixTreeSeeker implements EncodedSeeker {
-
-  protected boolean includeMvccVersion;
-  protected PrefixTreeArraySearcher ptSearcher;
-
-  public PrefixTreeSeeker(boolean includeMvccVersion) {
-    this.includeMvccVersion = includeMvccVersion;
-  }
-
-  @Override
-  public void setCurrentBuffer(ByteBuff fullBlockBuffer) {
-    ptSearcher = DecoderFactory.checkOut(fullBlockBuffer, includeMvccVersion);
-    rewind();
-  }
-
-  /**
-   * <p>
-   * Currently unused.
-   * </p>
-   * TODO performance leak. should reuse the searchers. hbase does not currently have a hook where
-   * this can be called
-   */
-  public void releaseCurrentSearcher(){
-    DecoderFactory.checkIn(ptSearcher);
-  }
-
-
-  @Override
-  public Cell getKey() {
-    return ptSearcher.current();
-  }
-
-
-  @Override
-  public ByteBuffer getValueShallowCopy() {
-    return PrivateCellUtil.getValueBufferShallowCopy(ptSearcher.current());
-  }
-
-  /**
-   * currently must do deep copy into new array
-   */
-  @Override
-  public Cell getCell() {
-    // The PrefixTreecell is of type BytebufferedCell and the value part of the cell
-    // determines whether we are offheap cell or onheap cell.  All other parts of the cell-
-    // row, fam and col are all represented as onheap byte[]
-    ByteBufferCell cell = (ByteBufferCell)ptSearcher.current();
-    if (cell == null) {
-      return null;
-    }
-    // Use the ByteBuffered cell to see if the Cell is onheap or offheap
-    if (cell.getValueByteBuffer().hasArray()) {
-      return new OnheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),
-          cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),
-          cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),
-          cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), cell.getTagsArray(),
-          cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(), cell.getTypeByte(),
-          cell.getSequenceId());
-    } else {
-      return new OffheapPrefixTreeCell(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),
-          cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),
-          cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),
-          cell.getValueByteBuffer(), cell.getValuePosition(), cell.getValueLength(),
-          cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength(), cell.getTimestamp(),
-          cell.getTypeByte(), cell.getSequenceId());
-    }
-  }
-
-  /**
-   * <p>
-   * Currently unused.
-   * </p><p>
-   * A nice, lightweight reference, though the underlying cell is transient. This method may return
-   * the same reference to the backing PrefixTreeCell repeatedly, while other implementations may
-   * return a different reference for each Cell.
-   * </p>
-   * The goal will be to transition the upper layers of HBase, like Filters and KeyValueHeap, to
-   * use this method instead of the getKeyValue() methods above.
-   */
-  public Cell get() {
-    return ptSearcher.current();
-  }
-
-  @Override
-  public void rewind() {
-    ptSearcher.positionAtFirstCell();
-  }
-
-  @Override
-  public boolean next() {
-    return ptSearcher.advance();
-  }
-
-  public boolean advance() {
-    return ptSearcher.advance();
-  }
-
-
-  private static final boolean USE_POSITION_BEFORE = false;
-
-  /*
-   * Support both of these options since the underlying PrefixTree supports
-   * both. Possibly expand the EncodedSeeker to utilize them both.
-   */
-
-  protected int seekToOrBeforeUsingPositionAtOrBefore(Cell kv, boolean seekBefore) {
-    // this does a deep copy of the key byte[] because the CellSearcher
-    // interface wants a Cell
-    CellScannerPosition position = ptSearcher.seekForwardToOrBefore(kv);
-
-    if (CellScannerPosition.AT == position) {
-      if (seekBefore) {
-        ptSearcher.previous();
-        return 1;
-      }
-      return 0;
-    }
-
-    return 1;
-  }
-
-  protected int seekToOrBeforeUsingPositionAtOrAfter(Cell kv, boolean seekBefore) {
-    // should probably switch this to use the seekForwardToOrBefore method
-    CellScannerPosition position = ptSearcher.seekForwardToOrAfter(kv);
-
-    if (CellScannerPosition.AT == position) {
-      if (seekBefore) {
-        ptSearcher.previous();
-        return 1;
-      }
-      return 0;
-
-    }
-
-    if (CellScannerPosition.AFTER == position) {
-      if (!ptSearcher.isBeforeFirst()) {
-        ptSearcher.previous();
-      }
-      return 1;
-    }
-
-    if (position == CellScannerPosition.AFTER_LAST) {
-      if (seekBefore) {
-        ptSearcher.previous();
-      }
-      return 1;
-    }
-
-    throw new RuntimeException("unexpected CellScannerPosition:" + position);
-  }
-
-  @Override
-  public int seekToKeyInBlock(Cell key, boolean forceBeforeOnExactMatch) {
-    if (USE_POSITION_BEFORE) {
-      return seekToOrBeforeUsingPositionAtOrBefore(key, forceBeforeOnExactMatch);
-    } else {
-      return seekToOrBeforeUsingPositionAtOrAfter(key, forceBeforeOnExactMatch);
-    }
-  }
-
-  @Override
-  public int compareKey(CellComparator comparator, Cell key) {
-    return comparator.compare(key,
-        ptSearcher.current());
-  }
-
-  /**
-   * Cloned version of the PrefixTreeCell where except the value part, the rest
-   * of the key part is deep copied
-   *
-   */
-  private static class OnheapPrefixTreeCell implements Cell, SettableSequenceId, HeapSize {
-    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT
-        + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT)
-        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.ARRAY));
-    private byte[] row;
-    private short rowLength;
-    private byte[] fam;
-    private byte famLength;
-    private byte[] qual;
-    private int qualLength;
-    private byte[] val;
-    private int valOffset;
-    private int valLength;
-    private byte[] tag;
-    private int tagsLength;
-    private long ts;
-    private long seqId;
-    private byte type;
-
-    public OnheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam,
-        int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, byte[] val,
-        int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type,
-        long seqId) {
-      this.row = new byte[rowLength];
-      System.arraycopy(row, rowOffset, this.row, 0, rowLength);
-      this.rowLength = rowLength;
-      this.fam = new byte[famLength];
-      System.arraycopy(fam, famOffset, this.fam, 0, famLength);
-      this.famLength = famLength;
-      this.qual = new byte[qualLength];
-      System.arraycopy(qual, qualOffset, this.qual, 0, qualLength);
-      this.qualLength = qualLength;
-      this.tag = new byte[tagLength];
-      System.arraycopy(tag, tagOffset, this.tag, 0, tagLength);
-      this.tagsLength = tagLength;
-      this.val = val;
-      this.valLength = valLength;
-      this.valOffset = valOffset;
-      this.ts = ts;
-      this.seqId = seqId;
-      this.type = type;
-    }
-
-    @Override
-    public void setSequenceId(long seqId) {
-      this.seqId = seqId;
-    }
-
-    @Override
-    public byte[] getRowArray() {
-      return this.row;
-    }
-
-    @Override
-    public int getRowOffset() {
-      return 0;
-    }
-
-    @Override
-    public short getRowLength() {
-      return this.rowLength;
-    }
-
-    @Override
-    public byte[] getFamilyArray() {
-      return this.fam;
-    }
-
-    @Override
-    public int getFamilyOffset() {
-      return 0;
-    }
-
-    @Override
-    public byte getFamilyLength() {
-      return this.famLength;
-    }
-
-    @Override
-    public byte[] getQualifierArray() {
-      return this.qual;
-    }
-
-    @Override
-    public int getQualifierOffset() {
-      return 0;
-    }
-
-    @Override
-    public int getQualifierLength() {
-      return this.qualLength;
-    }
-
-    @Override
-    public long getTimestamp() {
-      return ts;
-    }
-
-    @Override
-    public byte getTypeByte() {
-      return type;
-    }
-
-    @Override
-    public long getSequenceId() {
-      return seqId;
-    }
-
-    @Override
-    public byte[] getValueArray() {
-      return val;
-    }
-
-    @Override
-    public int getValueOffset() {
-      return this.valOffset;
-    }
-
-    @Override
-    public int getValueLength() {
-      return this.valLength;
-    }
-
-    @Override
-    public byte[] getTagsArray() {
-      return this.tag;
-    }
-
-    @Override
-    public int getTagsOffset() {
-      return 0;
-    }
-
-    @Override
-    public int getTagsLength() {
-      return this.tagsLength;
-    }
-
-    @Override
-    public String toString() {
-      String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength());
-      String family = Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength());
-      String qualifier = Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(),
-          getQualifierLength());
-      String timestamp = String.valueOf((getTimestamp()));
-      return row + "/" + family + (family != null && family.length() > 0 ? ":" : "") + qualifier
-          + "/" + timestamp + "/" + Type.codeToType(type);
-    }
-
-    @Override
-    public long heapSize() {
-      return FIXED_OVERHEAD + rowLength + famLength + qualLength + valLength + tagsLength;
-    }
-  }
-
-  private static class OffheapPrefixTreeCell extends ByteBufferCell implements Cell,
-      SettableSequenceId, HeapSize {
-    private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT
-        + (5 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (4 * Bytes.SIZEOF_INT)
-        + (Bytes.SIZEOF_SHORT) + (2 * Bytes.SIZEOF_BYTE) + (5 * ClassSize.BYTE_BUFFER));
-    private ByteBuffer rowBuff;
-    private short rowLength;
-    private ByteBuffer famBuff;
-    private byte famLength;
-    private ByteBuffer qualBuff;
-    private int qualLength;
-    private ByteBuffer val;
-    private int valOffset;
-    private int valLength;
-    private ByteBuffer tagBuff;
-    private int tagsLength;
-    private long ts;
-    private long seqId;
-    private byte type;
-    public OffheapPrefixTreeCell(byte[] row, int rowOffset, short rowLength, byte[] fam,
-        int famOffset, byte famLength, byte[] qual, int qualOffset, int qualLength, ByteBuffer val,
-        int valOffset, int valLength, byte[] tag, int tagOffset, int tagLength, long ts, byte type,
-        long seqId) {
-      byte[] tmpRow = new byte[rowLength];
-      System.arraycopy(row, rowOffset, tmpRow, 0, rowLength);
-      this.rowBuff = ByteBuffer.wrap(tmpRow);
-      this.rowLength = rowLength;
-      byte[] tmpFam = new byte[famLength];
-      System.arraycopy(fam, famOffset, tmpFam, 0, famLength);
-      this.famBuff = ByteBuffer.wrap(tmpFam);
-      this.famLength = famLength;
-      byte[] tmpQual = new byte[qualLength];
-      System.arraycopy(qual, qualOffset, tmpQual, 0, qualLength);
-      this.qualBuff = ByteBuffer.wrap(tmpQual);
-      this.qualLength = qualLength;
-      byte[] tmpTag = new byte[tagLength];
-      System.arraycopy(tag, tagOffset, tmpTag, 0, tagLength);
-      this.tagBuff = ByteBuffer.wrap(tmpTag);
-      this.tagsLength = tagLength;
-      this.val = val;
-      this.valLength = valLength;
-      this.valOffset = valOffset;
-      this.ts = ts;
-      this.seqId = seqId;
-      this.type = type;
-    }
-    
-    @Override
-    public void setSequenceId(long seqId) {
-      this.seqId = seqId;
-    }
-
-    @Override
-    public byte[] getRowArray() {
-      return this.rowBuff.array();
-    }
-
-    @Override
-    public int getRowOffset() {
-      return getRowPosition();
-    }
-
-    @Override
-    public short getRowLength() {
-      return this.rowLength;
-    }
-
-    @Override
-    public byte[] getFamilyArray() {
-      return this.famBuff.array();
-    }
-
-    @Override
-    public int getFamilyOffset() {
-      return getFamilyPosition();
-    }
-
-    @Override
-    public byte getFamilyLength() {
-      return this.famLength;
-    }
-
-    @Override
-    public byte[] getQualifierArray() {
-      return this.qualBuff.array();
-    }
-
-    @Override
-    public int getQualifierOffset() {
-      return getQualifierPosition();
-    }
-
-    @Override
-    public int getQualifierLength() {
-      return this.qualLength;
-    }
-
-    @Override
-    public long getTimestamp() {
-      return ts;
-    }
-
-    @Override
-    public byte getTypeByte() {
-      return type;
-    }
-
-    @Override
-    public long getSequenceId() {
-      return seqId;
-    }
-
-    @Override
-    public byte[] getValueArray() {
-      byte[] tmpVal = new byte[valLength];
-      ByteBufferUtils.copyFromBufferToArray(tmpVal, val, valOffset, 0, valLength);
-      return tmpVal;
-    }
-
-    @Override
-    public int getValueOffset() {
-      return 0;
-    }
-
-    @Override
-    public int getValueLength() {
-      return this.valLength;
-    }
-
-    @Override
-    public byte[] getTagsArray() {
-      return this.tagBuff.array();
-    }
-
-    @Override
-    public int getTagsOffset() {
-      return getTagsPosition();
-    }
-
-    @Override
-    public int getTagsLength() {
-      return this.tagsLength;
-    }
-    
-    @Override
-    public ByteBuffer getRowByteBuffer() {
-      return this.rowBuff;
-    }
-    
-    @Override
-    public int getRowPosition() {
-      return 0;
-    }
-    
-    @Override
-    public ByteBuffer getFamilyByteBuffer() {
-      return this.famBuff;
-    }
-    
-    @Override
-    public int getFamilyPosition() {
-      return 0;
-    }
-    
-    @Override
-    public ByteBuffer getQualifierByteBuffer() {
-      return this.qualBuff;
-    }
-
-    @Override
-    public int getQualifierPosition() {
-      return 0;
-    }
-
-    @Override
-    public ByteBuffer getTagsByteBuffer() {
-      return this.tagBuff;
-    }
-
-    @Override
-    public int getTagsPosition() {
-      return 0;
-    }
-
-    @Override
-    public ByteBuffer getValueByteBuffer() {
-      return this.val;
-    }
-
-    @Override
-    public int getValuePosition() {
-      return this.valOffset;
-    }
-
-    @Override
-    public long heapSize() {
-      return FIXED_OVERHEAD;
-    }
-
-    @Override
-    public String toString() {
-      String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength());
-      String family = Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength());
-      String qualifier = Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(),
-          getQualifierLength());
-      String timestamp = String.valueOf((getTimestamp()));
-      return row + "/" + family + (family != null && family.length() > 0 ? ":" : "") + qualifier
-          + "/" + timestamp + "/" + Type.codeToType(type);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f812218f/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/ArraySearcherPool.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/ArraySearcherPool.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/ArraySearcherPool.java
deleted file mode 100644
index f522b08..0000000
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/ArraySearcherPool.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.codec.prefixtree.decode;
-
-import java.util.Queue;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.hbase.nio.ByteBuff;
-
-/**
- * <p>
- * Pools PrefixTreeArraySearcher objects. Each Searcher can consist of hundreds or thousands of
- * objects and 1 is needed for each HFile during a Get operation. With tens of thousands of
- * Gets/second, reusing these searchers may save a lot of young gen collections.
- * </p>
- * Alternative implementation would be a ByteBufferSearcherPool (not implemented yet).
- */
-@InterfaceAudience.Private
-public class ArraySearcherPool {
-
-  /**
-   * One decoder is needed for each storefile for each Get operation so we may need hundreds at the
-   * same time, however, decoding is a CPU bound activity so should limit this to something in the
-   * realm of maximum reasonable active threads.
-   */
-  private static final Integer MAX_POOL_SIZE = 1000;
-
-  protected Queue<PrefixTreeArraySearcher> pool = new LinkedBlockingQueue<>(MAX_POOL_SIZE);
-
-  public PrefixTreeArraySearcher checkOut(ByteBuff buffer, boolean includesMvccVersion) {
-    PrefixTreeArraySearcher searcher = pool.poll();//will return null if pool is empty
-    searcher = DecoderFactory.ensureArraySearcherValid(buffer, searcher, includesMvccVersion);
-    return searcher;
-  }
-
-  public void checkIn(PrefixTreeArraySearcher searcher) {
-    searcher.releaseBlockReference();
-    pool.offer(searcher);
-  }
-
-  @Override
-  public String toString() {
-    return ("poolSize:" + pool.size());
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f812218f/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/DecoderFactory.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/DecoderFactory.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/DecoderFactory.java
deleted file mode 100644
index 9258b17..0000000
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/DecoderFactory.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.codec.prefixtree.decode;
-
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
-import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher;
-import org.apache.hadoop.hbase.nio.ByteBuff;
-/**
- * Static wrapper class for the ArraySearcherPool.
- */
-@InterfaceAudience.Private
-public class DecoderFactory {
-  private static final ArraySearcherPool POOL = new ArraySearcherPool();
-
-  //TODO will need a PrefixTreeSearcher on top of CellSearcher
-  public static PrefixTreeArraySearcher checkOut(final ByteBuff buffer, 
-      boolean includeMvccVersion) {
-    PrefixTreeArraySearcher searcher = POOL.checkOut(buffer,
-      includeMvccVersion);
-    return searcher;
-  }
-
-  public static void checkIn(CellSearcher pSearcher) {
-    if (pSearcher == null) {
-      return;
-    }
-    if (! (pSearcher instanceof PrefixTreeArraySearcher)) {
-      throw new IllegalArgumentException("Cannot return "+pSearcher.getClass()+" to "
-          +DecoderFactory.class);
-    }
-    PrefixTreeArraySearcher searcher = (PrefixTreeArraySearcher) pSearcher;
-    POOL.checkIn(searcher);
-  }
-
-
-  /**************************** helper ******************************/
-  public static PrefixTreeArraySearcher ensureArraySearcherValid(ByteBuff buffer,
-      PrefixTreeArraySearcher searcher, boolean includeMvccVersion) {
-    if (searcher == null) {
-      PrefixTreeBlockMeta blockMeta = new PrefixTreeBlockMeta(buffer);
-      searcher = new PrefixTreeArraySearcher(blockMeta, blockMeta.getRowTreeDepth(),
-          blockMeta.getMaxRowLength(), blockMeta.getMaxQualifierLength(),
-          blockMeta.getMaxTagsLength());
-      searcher.initOnBlock(blockMeta, buffer, includeMvccVersion);
-      return searcher;
-    }
-
-    PrefixTreeBlockMeta blockMeta = searcher.getBlockMeta();
-    blockMeta.initOnBlock(buffer);
-    if (!searcher.areBuffersBigEnough()) {
-      int maxRowTreeStackNodes = Math.max(blockMeta.getRowTreeDepth(),
-        searcher.getMaxRowTreeStackNodes());
-      int rowBufferLength = Math.max(blockMeta.getMaxRowLength(), searcher.getRowBufferLength());
-      int qualifierBufferLength = Math.max(blockMeta.getMaxQualifierLength(),
-        searcher.getQualifierBufferLength());
-      int tagBufferLength = Math.max(blockMeta.getMaxTagsLength(), searcher.getTagBufferLength());
-      searcher = new PrefixTreeArraySearcher(blockMeta, maxRowTreeStackNodes, rowBufferLength,
-          qualifierBufferLength, tagBufferLength);
-    }
-    //this is where we parse the BlockMeta
-    searcher.initOnBlock(blockMeta, buffer, includeMvccVersion);
-    return searcher;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/f812218f/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayReversibleScanner.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayReversibleScanner.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayReversibleScanner.java
deleted file mode 100644
index cbb6eee..0000000
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayReversibleScanner.java
+++ /dev/null
@@ -1,145 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.codec.prefixtree.decode;
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
-import org.apache.hadoop.hbase.codec.prefixtree.scanner.ReversibleCellScanner;
-
-/**
- * Methods for going backwards through a PrefixTree block.  This class is split out on its own to
- * simplify the Scanner superclass and Searcher subclass.
- */
-@InterfaceAudience.Private
-public class PrefixTreeArrayReversibleScanner extends PrefixTreeArrayScanner implements
-    ReversibleCellScanner {
-
-  /***************** construct ******************************/
-
-  public PrefixTreeArrayReversibleScanner(PrefixTreeBlockMeta blockMeta, int rowTreeDepth,
-      int rowBufferLength, int qualifierBufferLength, int tagsBufferLength) {
-    super(blockMeta, rowTreeDepth, rowBufferLength, qualifierBufferLength, tagsBufferLength);
-  }
-
-
-  /***************** Object methods ***************************/
-
-  @Override
-  public boolean equals(Object obj) {
-    //trivial override to confirm intent (findbugs)
-    return super.equals(obj);
-  }
-
-
-  /***************** methods **********************************/
-
-  @Override
-  public boolean previous() {
-    if (afterLast) {
-      afterLast = false;
-      positionAtLastCell();
-      return true;
-    }
-    if (beforeFirst) {
-      return false;
-    }
-    if (isFirstCellInRow()) {
-      previousRowInternal();
-      if (beforeFirst) {
-        return false;
-      }
-      populateLastNonRowFields();
-      return true;
-    }
-    populatePreviousNonRowFields();
-    return true;
-  }
-
-  @Override
-  public boolean previousRow(boolean endOfRow) {
-    previousRowInternal();
-    if(beforeFirst){
-      return false;
-    }
-    if(endOfRow){
-      populateLastNonRowFields();
-    }else{
-      populateFirstNonRowFields();
-    }
-    return true;
-  }
-
-  private boolean previousRowInternal() {
-    if (beforeFirst) {
-      return false;
-    }
-    if (afterLast) {
-      positionAtLastRow();
-      return true;
-    }
-    if (currentRowNode.hasOccurrences()) {
-      discardCurrentRowNode(false);
-      if(currentRowNode==null){
-        return false;
-      }
-    }
-    while (!beforeFirst) {
-      if (isDirectlyAfterNub()) {//we are about to back up to the nub
-        currentRowNode.resetFanIndex();//sets it to -1, which is before the first leaf
-        nubCellsRemain = true;//this positions us on the nub
-        return true;
-      }
-      if (currentRowNode.hasPreviousFanNodes()) {
-        followPreviousFan();
-        descendToLastRowFromCurrentPosition();
-      } else {// keep going up the stack until we find previous fan positions
-        discardCurrentRowNode(false);
-        if(currentRowNode==null){
-          return false;
-        }
-      }
-      if (currentRowNode.hasOccurrences()) {// escape clause
-        currentRowNode.resetFanIndex();
-        return true;// found some values
-      }
-    }
-    return false;// went past the beginning
-  }
-  
-  protected boolean isDirectlyAfterNub() {
-    return currentRowNode.isNub() && currentRowNode.getFanIndex()==0;
-  }
-
-  protected void positionAtLastRow() {
-    reInitFirstNode();
-    descendToLastRowFromCurrentPosition();
-  }
-
-  protected void descendToLastRowFromCurrentPosition() {
-    while (currentRowNode.hasChildren()) {
-      followLastFan();
-    }
-  }
-
-  protected void positionAtLastCell() {
-    positionAtLastRow();
-    populateLastNonRowFields();
-  }
-
-}


Mime
View raw message