hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jmhs...@apache.org
Subject svn commit: r1518817 [2/2] - in /hbase/trunk: hbase-client/src/main/java/org/apache/hadoop/hbase/client/ hbase-common/src/main/java/org/apache/hadoop/hbase/ hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/ hbase-common/src/main/java/org/...
Date Thu, 29 Aug 2013 20:45:05 GMT
Modified: hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java (original)
+++ hbase/trunk/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java Thu Aug 29 20:45:04 2013
@@ -25,8 +25,9 @@ import java.nio.ByteBuffer;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValue.KeyComparator;
-import org.apache.hadoop.hbase.KeyValue.MetaKeyComparator;
+import org.apache.hadoop.hbase.KeyValue.KVComparator;
+import org.apache.hadoop.hbase.KeyValue.MetaComparator;
+import org.apache.hadoop.hbase.KeyValue.RawBytesComparator;
 import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.codec.prefixtree.decode.DecoderFactory;
 import org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeArraySearcher;
@@ -189,11 +190,10 @@ public class PrefixTreeCodec implements 
    * the way to this point.
    */
   @Override
-  public EncodedSeeker createSeeker(RawComparator<byte[]> comparator, boolean includesMvccVersion) {
-    if(! (comparator instanceof KeyComparator)){
+  public EncodedSeeker createSeeker(KVComparator comparator, boolean includesMvccVersion) {
+    if (comparator instanceof RawBytesComparator){
       throw new IllegalArgumentException("comparator must be KeyValue.KeyComparator");
-    }
-    if(comparator instanceof MetaKeyComparator){
+    } else if (comparator instanceof MetaComparator){
       throw new IllegalArgumentException("DataBlockEncoding.PREFIX_TREE not compatible with META "
           +"table");
     }

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java Thu Aug 29 20:45:04 2013
@@ -160,7 +160,7 @@ public class HalfStoreFileReader extends
         // constrain the bottom.
         if (!top) {
           ByteBuffer bb = getKey();
-          if (getComparator().compare(bb.array(), bb.arrayOffset(), bb.limit(),
+          if (getComparator().compareFlatKey(bb.array(), bb.arrayOffset(), bb.limit(),
               splitkey, 0, splitkey.length) >= 0) {
             atEnd = true;
             return false;
@@ -179,13 +179,13 @@ public class HalfStoreFileReader extends
           byte[] fk = getFirstKey();
           // This will be null when the file is empty in which we can not seekBefore to any key
           if (fk == null) return false;
-          if (getComparator().compare(key, offset, length, fk, 0,
+          if (getComparator().compareFlatKey(key, offset, length, fk, 0,
               fk.length) <= 0) {
             return false;
           }
         } else {
           // The equals sign isn't strictly necessary just here to be consistent with seekTo
-          if (getComparator().compare(key, offset, length, splitkey, 0,
+          if (getComparator().compareFlatKey(key, offset, length, splitkey, 0,
               splitkey.length) >= 0) {
             return this.delegate.seekBefore(splitkey, 0, splitkey.length);
           }
@@ -216,7 +216,7 @@ public class HalfStoreFileReader extends
         // Check key.
         ByteBuffer k = this.delegate.getKey();
         return this.delegate.getReader().getComparator().
-          compare(k.array(), k.arrayOffset(), k.limit(),
+          compareFlatKey(k.array(), k.arrayOffset(), k.limit(),
             splitkey, 0, splitkey.length) < 0;
       }
 
@@ -226,12 +226,12 @@ public class HalfStoreFileReader extends
 
       public int seekTo(byte[] key, int offset, int length) throws IOException {
         if (top) {
-          if (getComparator().compare(key, offset, length, splitkey, 0,
+          if (getComparator().compareFlatKey(key, offset, length, splitkey, 0,
               splitkey.length) < 0) {
             return -1;
           }
         } else {
-          if (getComparator().compare(key, offset, length, splitkey, 0,
+          if (getComparator().compareFlatKey(key, offset, length, splitkey, 0,
               splitkey.length) >= 0) {
             // we would place the scanner in the second half.
             // it might be an error to return false here ever...
@@ -256,12 +256,12 @@ public class HalfStoreFileReader extends
         //This function is identical to the corresponding seekTo function except
         //that we call reseekTo (and not seekTo) on the delegate.
         if (top) {
-          if (getComparator().compare(key, offset, length, splitkey, 0,
+          if (getComparator().compareFlatKey(key, offset, length, splitkey, 0,
               splitkey.length) < 0) {
             return -1;
           }
         } else {
-          if (getComparator().compare(key, offset, length, splitkey, 0,
+          if (getComparator().compareFlatKey(key, offset, length, splitkey, 0,
               splitkey.length) >= 0) {
             // we would place the scanner in the second half.
             // it might be an error to return false here ever...

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java Thu Aug 29 20:45:04 2013
@@ -25,6 +25,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValue.KVComparator;
 import org.apache.hadoop.hbase.fs.HFileSystem;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@@ -71,7 +72,7 @@ public abstract class AbstractHFileReade
   protected int avgValueLen = -1;
 
   /** Key comparator */
-  protected RawComparator<byte []> comparator;
+  protected KVComparator comparator;
 
   /** Size of this file. */
   protected final long fileSize;
@@ -206,7 +207,7 @@ public abstract class AbstractHFileReade
 
   /** @return comparator */
   @Override
-  public RawComparator<byte []> getComparator() {
+  public KVComparator getComparator() {
     return comparator;
   }
 

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java Thu Aug 29 20:45:04 2013
@@ -33,7 +33,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValue.KeyComparator;
+import org.apache.hadoop.hbase.KeyValue.KVComparator;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -77,7 +77,7 @@ public abstract class AbstractHFileWrite
   protected long totalUncompressedBytes = 0;
 
   /** Key comparator. Used to ensure we write in order. */
-  protected final KeyComparator comparator;
+  protected final KVComparator comparator;
 
   /** Meta block names. */
   protected List<byte[]> metaNames = new ArrayList<byte[]>();
@@ -114,7 +114,7 @@ public abstract class AbstractHFileWrite
       FSDataOutputStream outputStream, Path path, int blockSize,
       Compression.Algorithm compressAlgo,
       HFileDataBlockEncoder dataBlockEncoder,
-      KeyComparator comparator) {
+      KVComparator comparator) {
     this.outputStream = outputStream;
     this.path = path;
     this.name = path != null ? path.getName() : outputStream.toString();
@@ -124,7 +124,7 @@ public abstract class AbstractHFileWrite
     this.blockEncoder = dataBlockEncoder != null
         ? dataBlockEncoder : NoOpDataBlockEncoder.INSTANCE;
     this.comparator = comparator != null ? comparator
-        : KeyValue.KEY_COMPARATOR;
+        : KeyValue.COMPARATOR;
 
     closeOutputStream = path != null;
     this.cacheConf = cacheConf;
@@ -198,8 +198,9 @@ public abstract class AbstractHFileWrite
       throw new IOException("Key cannot be null or empty");
     }
     if (lastKeyBuffer != null) {
-      int keyComp = comparator.compare(lastKeyBuffer, lastKeyOffset,
+      int keyComp = comparator.compareFlatKey(lastKeyBuffer, lastKeyOffset,
           lastKeyLength, key, offset, length);
+
       if (keyComp > 0) {
         throw new IOException("Added a key not lexically larger than"
             + " previous key="

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java Thu Aug 29 20:45:04 2013
@@ -31,10 +31,10 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValue.KVComparator;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.protobuf.generated.HFileProtos;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.io.RawComparator;
 
 import com.google.common.io.NullOutputStream;
 
@@ -114,8 +114,8 @@ public class FixedFileTrailer {
    */
   private long lastDataBlockOffset;
 
-  /** Raw key comparator class name in version 2 */
-  private String comparatorClassName = KeyValue.KEY_COMPARATOR.getClass().getName();
+  /** Raw key comparator class name in version 3 */
+  private String comparatorClassName = KeyValue.COMPARATOR.getLegacyKeyComparatorName();
 
   /** The {@link HFile} format major version. */
   private final int majorVersion;
@@ -214,6 +214,8 @@ public class FixedFileTrailer {
       .setNumDataIndexLevels(numDataIndexLevels)
       .setFirstDataBlockOffset(firstDataBlockOffset)
       .setLastDataBlockOffset(lastDataBlockOffset)
+      // TODO this is a classname encoded into an  HFile's trailer. We are going to need to have 
+      // some compat code here.
       .setComparatorClassName(comparatorClassName)
       .setCompressionCodec(compressionCodec.ordinal())
       .build().writeDelimitedTo(baos);
@@ -324,6 +326,8 @@ public class FixedFileTrailer {
       lastDataBlockOffset = builder.getLastDataBlockOffset();
     }
     if (builder.hasComparatorClassName()) {
+      // TODO this is a classname encoded into an  HFile's trailer. We are going to need to have 
+      // some compat code here.
       setComparatorClass(getComparatorClass(builder.getComparatorClassName()));
     }
     if (builder.hasCompressionCodec()) {
@@ -351,6 +355,8 @@ public class FixedFileTrailer {
     numDataIndexLevels = input.readInt();
     firstDataBlockOffset = input.readLong();
     lastDataBlockOffset = input.readLong();
+    // TODO this is a classname encoded into an  HFile's trailer. We are going to need to have 
+    // some compat code here.
     setComparatorClass(getComparatorClass(Bytes.readStringFixedSize(input,
         MAX_COMPARATOR_NAME_LENGTH)));
   }
@@ -555,30 +561,53 @@ public class FixedFileTrailer {
     return minorVersion;
   }
 
-  @SuppressWarnings("rawtypes")
-  public void setComparatorClass(Class<? extends RawComparator> klass) {
-    // Is the comparator instantiable
+  public void setComparatorClass(Class<? extends KVComparator> klass) {
+    // Is the comparator instantiable?
     try {
-      klass.newInstance();
+      KVComparator comp = klass.newInstance();
+
+      // HFile V2 legacy comparator class names.
+      if (KeyValue.COMPARATOR.getClass().equals(klass)) {
+        comparatorClassName = KeyValue.COMPARATOR.getLegacyKeyComparatorName();
+      } else if (KeyValue.META_COMPARATOR.getClass().equals(klass)) {
+        comparatorClassName = KeyValue.META_COMPARATOR.getLegacyKeyComparatorName();
+      } else if (KeyValue.RAW_COMPARATOR.getClass().equals(klass)) {
+        comparatorClassName = KeyValue.RAW_COMPARATOR.getLegacyKeyComparatorName();
+      } else {
+        // if the name wasn't one of the legacy names, maybe its a legit new kind of comparator.
+        comparatorClassName = klass.getName();
+      }
+
     } catch (Exception e) {
       throw new RuntimeException("Comparator class " + klass.getName() +
         " is not instantiable", e);
     }
-    comparatorClassName = klass.getName();
+
   }
 
   @SuppressWarnings("unchecked")
-  private static Class<? extends RawComparator<byte[]>> getComparatorClass(
+  private static Class<? extends KVComparator> getComparatorClass(
       String comparatorClassName) throws IOException {
     try {
-      return (Class<? extends RawComparator<byte[]>>)
+      // HFile V2 legacy comparator class names.
+      if (comparatorClassName.equals(KeyValue.COMPARATOR.getLegacyKeyComparatorName())) {
+        comparatorClassName = KeyValue.COMPARATOR.getClass().getName();
+      } else if (comparatorClassName.equals(KeyValue.META_COMPARATOR.getLegacyKeyComparatorName())) {
+        comparatorClassName = KeyValue.META_COMPARATOR.getClass().getName();
+      } else if (comparatorClassName.equals(KeyValue.RAW_COMPARATOR.getLegacyKeyComparatorName())) {
+        comparatorClassName = KeyValue.RAW_COMPARATOR.getClass().getName();
+      }
+
+      // if the name wasn't one of the legacy names, maybe its a legit new kind of comparator.
+
+      return (Class<? extends KVComparator>)
           Class.forName(comparatorClassName);
     } catch (ClassNotFoundException ex) {
       throw new IOException(ex);
     }
   }
 
-  public static RawComparator<byte[]> createComparator(
+  public static KVComparator createComparator(
       String comparatorClassName) throws IOException {
     try {
       return getComparatorClass(comparatorClassName).newInstance();
@@ -591,7 +620,7 @@ public class FixedFileTrailer {
     }
   }
 
-  RawComparator<byte[]> createComparator() throws IOException {
+  KVComparator createComparator() throws IOException {
     expectAtLeastMajorVersion(2);
     return createComparator(comparatorClassName);
   }

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java Thu Aug 29 20:45:04 2013
@@ -53,7 +53,7 @@ import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValue.KeyComparator;
+import org.apache.hadoop.hbase.KeyValue.KVComparator;
 import org.apache.hadoop.hbase.fs.HFileSystem;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@@ -336,7 +336,7 @@ public class HFile {
     protected Compression.Algorithm compression =
         HFile.DEFAULT_COMPRESSION_ALGORITHM;
     protected HFileDataBlockEncoder encoder = NoOpDataBlockEncoder.INSTANCE;
-    protected KeyComparator comparator = KeyValue.KEY_COMPARATOR;
+    protected KVComparator comparator = KeyValue.COMPARATOR;
     protected InetSocketAddress[] favoredNodes;
     protected ChecksumType checksumType = HFile.DEFAULT_CHECKSUM_TYPE;
     protected int bytesPerChecksum = DEFAULT_BYTES_PER_CHECKSUM;
@@ -384,7 +384,7 @@ public class HFile {
       return this;
     }
 
-    public WriterFactory withComparator(KeyComparator comparator) {
+    public WriterFactory withComparator(KVComparator comparator) {
       Preconditions.checkNotNull(comparator);
       this.comparator = comparator;
       return this;
@@ -432,7 +432,7 @@ public class HFile {
         FSDataOutputStream ostream, int blockSize,
         Compression.Algorithm compress,
         HFileDataBlockEncoder dataBlockEncoder,
-        KeyComparator comparator, ChecksumType checksumType,
+        KVComparator comparator, ChecksumType checksumType,
         int bytesPerChecksum, boolean includeMVCCReadpoint) throws IOException;
   }
 
@@ -489,7 +489,7 @@ public class HFile {
      */
     String getName();
 
-    RawComparator<byte []> getComparator();
+    KVComparator getComparator();
 
     HFileScanner getScanner(boolean cacheBlocks,
        final boolean pread, final boolean isCompaction);

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java Thu Aug 29 20:45:04 2013
@@ -38,13 +38,13 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValue.KVComparator;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.io.hfile.HFile.CachingBlockReader;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;
 import org.apache.hadoop.hbase.util.CompoundBloomFilterWriter;
-import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.util.StringUtils;
 
@@ -106,7 +106,7 @@ public class HFileBlockIndex {
    */
   public static class BlockIndexReader implements HeapSize {
     /** Needed doing lookup on blocks. */
-    private final RawComparator<byte[]> comparator;
+    private final KVComparator comparator;
 
     // Root-level data.
     private byte[][] blockKeys;
@@ -132,13 +132,13 @@ public class HFileBlockIndex {
     /** A way to read {@link HFile} blocks at a given offset */
     private CachingBlockReader cachingBlockReader;
 
-    public BlockIndexReader(final RawComparator<byte[]> c, final int treeLevel,
+    public BlockIndexReader(final KVComparator c, final int treeLevel,
         final CachingBlockReader cachingBlockReader) {
       this(c, treeLevel);
       this.cachingBlockReader = cachingBlockReader;
     }
 
-    public BlockIndexReader(final RawComparator<byte[]> c, final int treeLevel)
+    public BlockIndexReader(final KVComparator c, final int treeLevel)
     {
       comparator = c;
       searchTreeLevel = treeLevel;
@@ -481,7 +481,7 @@ public class HFileBlockIndex {
      */
     static int binarySearchNonRootIndex(byte[] key, int keyOffset,
         int keyLength, ByteBuffer nonRootIndex,
-        RawComparator<byte[]> comparator) {
+        KVComparator comparator) {
 
       int numEntries = nonRootIndex.getInt(0);
       int low = 0;
@@ -516,7 +516,7 @@ public class HFileBlockIndex {
 
         // we have to compare in this order, because the comparator order
         // has special logic when the 'left side' is a special key.
-        int cmp = comparator.compare(key, keyOffset, keyLength,
+        int cmp = comparator.compareFlatKey(key, keyOffset, keyLength,
             nonRootIndex.array(), nonRootIndex.arrayOffset() + midKeyOffset,
             midLength);
 
@@ -568,7 +568,7 @@ public class HFileBlockIndex {
      *
      */
     static int locateNonRootIndexEntry(ByteBuffer nonRootBlock, byte[] key,
-        int keyOffset, int keyLength, RawComparator<byte[]> comparator) {
+        int keyOffset, int keyLength, KVComparator comparator) {
       int entryIndex = binarySearchNonRootIndex(key, keyOffset, keyLength,
           nonRootBlock, comparator);
 

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java Thu Aug 29 20:45:04 2013
@@ -120,7 +120,7 @@ public class HFileReaderV2 extends Abstr
     dataBlockIndexReader = new HFileBlockIndex.BlockIndexReader(comparator,
         trailer.getNumDataIndexLevels(), this);
     metaBlockIndexReader = new HFileBlockIndex.BlockIndexReader(
-        Bytes.BYTES_RAWCOMPARATOR, 1);
+        KeyValue.RAW_COMPARATOR, 1);
 
     // Parse load-on-open data.
 
@@ -500,7 +500,7 @@ public class HFileReaderV2 extends Abstr
       int compared;
       if (isSeeked()) {
         ByteBuffer bb = getKey();
-        compared = reader.getComparator().compare(key, offset,
+        compared = reader.getComparator().compareFlatKey(key, offset,
             length, bb.array(), bb.arrayOffset(), bb.limit());
         if (compared < 1) {
           // If the required key is less than or equal to current key, then
@@ -509,7 +509,7 @@ public class HFileReaderV2 extends Abstr
         } else {
           if (this.nextIndexedKey != null &&
               (this.nextIndexedKey == HConstants.NO_NEXT_INDEXED_KEY ||
-               reader.getComparator().compare(key, offset, length,
+               reader.getComparator().compareFlatKey(key, offset, length,
                    nextIndexedKey, 0, nextIndexedKey.length) < 0)) {
             // The reader shall continue to scan the current data block instead of querying the
             // block index as long as it knows the target key is strictly smaller than
@@ -535,7 +535,7 @@ public class HFileReaderV2 extends Abstr
       }
       ByteBuffer firstKey = getFirstKeyInBlock(seekToBlock);
 
-      if (reader.getComparator().compare(firstKey.array(),
+      if (reader.getComparator().compareFlatKey(firstKey.array(),
           firstKey.arrayOffset(), firstKey.limit(), key, offset, length) == 0)
       {
         long previousBlockOffset = seekToBlock.getPrevBlockOffset();
@@ -851,7 +851,7 @@ public class HFileReaderV2 extends Abstr
 
         int keyOffset = blockBuffer.arrayOffset() + blockBuffer.position()
             + KEY_VALUE_LEN_SIZE;
-        int comp = reader.getComparator().compare(key, offset, length,
+        int comp = reader.getComparator().compareFlatKey(key, offset, length,
             blockBuffer.array(), keyOffset, klen);
 
         if (comp == 0) {

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java Thu Aug 29 20:45:04 2013
@@ -33,7 +33,7 @@ import org.apache.hadoop.fs.FSDataOutput
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValue.KeyComparator;
+import org.apache.hadoop.hbase.KeyValue.KVComparator;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
 import org.apache.hadoop.hbase.io.hfile.HFileBlock.BlockWritable;
@@ -100,7 +100,7 @@ public class HFileWriterV2 extends Abstr
     public Writer createWriter(FileSystem fs, Path path,
         FSDataOutputStream ostream, int blockSize,
         Compression.Algorithm compress, HFileDataBlockEncoder blockEncoder,
-        final KeyComparator comparator, final ChecksumType checksumType,
+        final KVComparator comparator, final ChecksumType checksumType,
         final int bytesPerChecksum, boolean includeMVCCReadpoint) throws IOException {
       return new HFileWriterV2(conf, cacheConf, fs, path, ostream, blockSize, compress,
           blockEncoder, comparator, checksumType, bytesPerChecksum, includeMVCCReadpoint);
@@ -111,7 +111,7 @@ public class HFileWriterV2 extends Abstr
   public HFileWriterV2(Configuration conf, CacheConfig cacheConf,
       FileSystem fs, Path path, FSDataOutputStream ostream, int blockSize,
       Compression.Algorithm compressAlgo, HFileDataBlockEncoder blockEncoder,
-      final KeyComparator comparator, final ChecksumType checksumType,
+      final KVComparator comparator, final ChecksumType checksumType,
       final int bytesPerChecksum, final boolean includeMVCCReadpoint) throws IOException {
     super(cacheConf,
         ostream == null ? createOutputStream(conf, fs, path, null) : ostream,

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java Thu Aug 29 20:45:04 2013
@@ -74,7 +74,7 @@ class GetClosestRowBeforeTracker {
     this.tablenamePlusDelimiterLength = metaregion? l + 1: -1;
     this.oldestts = System.currentTimeMillis() - ttl;
     this.kvcomparator = c;
-    KeyValue.RowComparator rc = new KeyValue.RowComparator(this.kvcomparator);
+    KeyValue.RowOnlyComparator rc = new KeyValue.RowOnlyComparator(this.kvcomparator);
     this.deletes = new TreeMap<KeyValue, NavigableSet<KeyValue>>(rc);
   }
 

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java Thu Aug 29 20:45:04 2013
@@ -529,7 +529,7 @@ public class HRegionFileSystem {
       byte[] lastKey = f.createReader().getLastKey();      
       // If lastKey is null means storefile is empty.
       if (lastKey == null) return null;
-      if (f.getReader().getComparator().compare(splitKey.getBuffer(), 
+      if (f.getReader().getComparator().compareFlatKey(splitKey.getBuffer(), 
           splitKey.getKeyOffset(), splitKey.getKeyLength(), lastKey, 0, lastKey.length) > 0) {
         return null;
       }
@@ -539,7 +539,7 @@ public class HRegionFileSystem {
       byte[] firstKey = f.createReader().getFirstKey();
       // If firstKey is null means storefile is empty.
       if (firstKey == null) return null;
-      if (f.getReader().getComparator().compare(splitKey.getBuffer(), 
+      if (f.getReader().getComparator().compareFlatKey(splitKey.getBuffer(), 
           splitKey.getKeyOffset(), splitKey.getKeyLength(), firstKey, 0, firstKey.length) < 0) {
         return null;
       }      

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java Thu Aug 29 20:45:04 2013
@@ -81,7 +81,7 @@ public class ScanQueryMatcher {
   private final KeyValue startKey;
 
   /** Row comparator for the region this query is for */
-  private final KeyValue.KeyComparator rowComparator;
+  private final KeyValue.KVComparator rowComparator;
 
   /* row is not private for tests */
   /** Row the query is on */
@@ -145,7 +145,7 @@ public class ScanQueryMatcher {
       NavigableSet<byte[]> columns, ScanType scanType,
       long readPointToUse, long earliestPutTs, long oldestUnexpiredTS) {
     this.tr = scan.getTimeRange();
-    this.rowComparator = scanInfo.getComparator().getRawComparator();
+    this.rowComparator = scanInfo.getComparator();
     this.deletes =  new ScanDeleteTracker();
     this.stopRow = scan.getStopRow();
     this.startKey = KeyValue.createFirstDeleteFamilyOnRow(scan.getStartRow(),

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java Thu Aug 29 20:45:04 2013
@@ -798,7 +798,7 @@ public class StoreFile {
           .withBlockSize(blocksize)
           .withCompression(compress)
           .withDataBlockEncoder(this.dataBlockEncoder)
-          .withComparator(comparator.getRawComparator())
+          .withComparator(comparator)
           .withChecksumType(checksumType)
           .withBytesPerChecksum(bytesPerChecksum)
           .withFavoredNodes(favoredNodes)
@@ -877,7 +877,7 @@ public class StoreFile {
      * @param kv
      */
     public void trackTimestamps(final KeyValue kv) {
-      if (KeyValue.Type.Put.getCode() == kv.getType()) {
+      if (KeyValue.Type.Put.getCode() == kv.getTypeByte()) {
         earliestPutTs = Math.min(earliestPutTs, kv.getTimestamp());
       }
       if (!isTimeRangeTrackerSet) {
@@ -939,7 +939,7 @@ public class StoreFile {
           }
           generalBloomFilterWriter.add(bloomKey, bloomKeyOffset, bloomKeyLen);
           if (lastBloomKey != null
-              && generalBloomFilterWriter.getComparator().compare(bloomKey,
+              && generalBloomFilterWriter.getComparator().compareFlatKey(bloomKey,
                   bloomKeyOffset, bloomKeyLen, lastBloomKey,
                   lastBloomKeyOffset, lastBloomKeyLen) <= 0) {
             throw new IOException("Non-increasing Bloom keys: "
@@ -1105,7 +1105,7 @@ public class StoreFile {
       this.reader = null;
     }
 
-    public RawComparator<byte []> getComparator() {
+    public KVComparator getComparator() {
       return reader.getComparator();
     }
 
@@ -1333,7 +1333,7 @@ public class StoreFile {
           // from the file info. For row-column Bloom filters this is not yet
           // a sufficient condition to return false.
           boolean keyIsAfterLast = lastBloomKey != null
-              && bloomFilter.getComparator().compare(key, lastBloomKey) > 0;
+              && bloomFilter.getComparator().compareFlatKey(key, lastBloomKey) > 0;
 
           if (bloomFilterType == BloomType.ROWCOL) {
             // Since a Row Delete is essentially a DeleteFamily applied to all
@@ -1344,7 +1344,7 @@ public class StoreFile {
                 null, 0, 0);
 
             if (keyIsAfterLast
-                && bloomFilter.getComparator().compare(rowBloomKey,
+                && bloomFilter.getComparator().compareFlatKey(rowBloomKey,
                     lastBloomKey) > 0) {
               exists = false;
             } else {
@@ -1388,9 +1388,9 @@ public class StoreFile {
       }
       KeyValue startKeyValue = KeyValue.createFirstOnRow(scan.getStartRow());
       KeyValue stopKeyValue = KeyValue.createLastOnRow(scan.getStopRow());
-      boolean nonOverLapping = (getComparator().compare(this.getFirstKey(),
+      boolean nonOverLapping = (getComparator().compareFlatKey(this.getFirstKey(),
         stopKeyValue.getKey()) > 0 && !Bytes.equals(scan.getStopRow(), HConstants.EMPTY_END_ROW))
-          || getComparator().compare(this.getLastKey(), startKeyValue.getKey()) < 0;
+          || getComparator().compareFlatKey(this.getLastKey(), startKeyValue.getKey()) < 0;
       return !nonOverLapping;
     }
 

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterBase.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterBase.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterBase.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterBase.java Thu Aug 29 20:45:04 2013
@@ -19,7 +19,7 @@
 package org.apache.hadoop.hbase.util;
 
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.io.RawComparator;
+import org.apache.hadoop.hbase.KeyValue.KVComparator;
 
 /**
  * Common methods Bloom filter methods required at read and write time.
@@ -52,6 +52,6 @@ public interface BloomFilterBase {
   /**
    * @return Bloom key comparator
    */
-  RawComparator<byte[]> getComparator();
+  KVComparator getComparator();
 
 }

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java Thu Aug 29 20:45:04 2013
@@ -200,7 +200,7 @@ public final class BloomFilterFactory {
     // In case of compound Bloom filters we ignore the maxKeys hint.
     CompoundBloomFilterWriter bloomWriter = new CompoundBloomFilterWriter(getBloomBlockSize(conf),
         err, Hash.getHashType(conf), maxFold, cacheConf.shouldCacheBloomsOnWrite(),
-        bloomType == BloomType.ROWCOL ? KeyValue.KEY_COMPARATOR : Bytes.BYTES_RAWCOMPARATOR);
+        bloomType == BloomType.ROWCOL ? KeyValue.COMPARATOR : KeyValue.RAW_COMPARATOR);
     writer.addInlineBlockWriter(bloomWriter);
     return bloomWriter;
   }
@@ -231,7 +231,7 @@ public final class BloomFilterFactory {
     // In case of compound Bloom filters we ignore the maxKeys hint.
     CompoundBloomFilterWriter bloomWriter = new CompoundBloomFilterWriter(getBloomBlockSize(conf),
         err, Hash.getHashType(conf), maxFold, cacheConf.shouldCacheBloomsOnWrite(),
-        Bytes.BYTES_RAWCOMPARATOR);
+        KeyValue.RAW_COMPARATOR);
     writer.addInlineBlockWriter(bloomWriter);
     return bloomWriter;
   }

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java Thu Aug 29 20:45:04 2013
@@ -20,6 +20,8 @@
 package org.apache.hadoop.hbase.util;
 
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValue.KVComparator;
 import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.Writable;
 
@@ -625,8 +627,9 @@ public class ByteBloomFilter implements 
   }
 
   @Override
-  public RawComparator<byte[]> getComparator() {
-    return Bytes.BYTES_RAWCOMPARATOR;
+  public KVComparator getComparator() {
+//    return Bytes.BYTES_RAWCOMPARATOR;
+    return KeyValue.RAW_COMPARATOR;
   }
 
   /**

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilter.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilter.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilter.java Thu Aug 29 20:45:04 2013
@@ -24,12 +24,12 @@ import java.io.IOException;
 import java.nio.ByteBuffer;
 
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.KeyValue.KVComparator;
 import org.apache.hadoop.hbase.io.hfile.BlockType;
 import org.apache.hadoop.hbase.io.hfile.FixedFileTrailer;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileBlock;
 import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex;
-import org.apache.hadoop.io.RawComparator;
 
 /**
  * A Bloom filter implementation built on top of {@link ByteBloomFilter},
@@ -131,7 +131,7 @@ public class CompoundBloomFilter extends
   }
 
   @Override
-  public RawComparator<byte[]> getComparator() {
+  public KVComparator getComparator() {
     return comparator;
   }
 

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterBase.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterBase.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterBase.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterBase.java Thu Aug 29 20:45:04 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.util;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValue.KVComparator;
 import org.apache.hadoop.io.RawComparator;
 
 @InterfaceAudience.Private
@@ -51,7 +52,7 @@ public class CompoundBloomFilterBase imp
   protected int hashType;
   
   /** Comparator used to compare Bloom filter keys */
-  protected RawComparator<byte[]> comparator;
+  protected KVComparator comparator;
 
   @Override
   public long getMaxKeys() {
@@ -89,7 +90,7 @@ public class CompoundBloomFilterBase imp
   }
 
   @Override
-  public RawComparator<byte[]> getComparator() {
+  public KVComparator getComparator() {
     return comparator;
   }
 

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java Thu Aug 29 20:45:04 2013
@@ -28,10 +28,10 @@ import java.util.Queue;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.KeyValue.KVComparator;
 import org.apache.hadoop.hbase.io.hfile.BlockType;
 import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex;
 import org.apache.hadoop.hbase.io.hfile.InlineBlockWriter;
-import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.Writable;
 
 /**
@@ -89,7 +89,7 @@ public class CompoundBloomFilterWriter e
    */
   public CompoundBloomFilterWriter(int chunkByteSizeHint, float errorRate,
       int hashType, int maxFold, boolean cacheOnWrite,
-      RawComparator<byte[]> comparator) {
+      KVComparator comparator) {
     chunkByteSize = ByteBloomFilter.computeFoldableByteSize(
         chunkByteSizeHint * 8L, maxFold);
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java Thu Aug 29 20:45:04 2013
@@ -185,7 +185,7 @@ public class TestDataBlockEncoders {
           ByteBuffer.wrap(encodeBytes(encoding, originalBuffer));
       DataBlockEncoder encoder = encoding.getEncoder();
       DataBlockEncoder.EncodedSeeker seeker =
-          encoder.createSeeker(KeyValue.KEY_COMPARATOR, includesMemstoreTS);
+          encoder.createSeeker(KeyValue.COMPARATOR, includesMemstoreTS);
       seeker.setCurrentBuffer(encodedBuffer);
       encodedSeekers.add(seeker);
     }
@@ -240,7 +240,7 @@ public class TestDataBlockEncoders {
             "Bug while encoding using '%s'", encoder.toString()), e);
       }
       DataBlockEncoder.EncodedSeeker seeker =
-          encoder.createSeeker(KeyValue.KEY_COMPARATOR, includesMemstoreTS);
+          encoder.createSeeker(KeyValue.COMPARATOR, includesMemstoreTS);
       seeker.setCurrentBuffer(encodedBuffer);
       int i = 0;
       do {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java Thu Aug 29 20:45:04 2013
@@ -77,7 +77,7 @@ public class TestPrefixTreeEncoding {
     HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
         Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]);
     encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx);
-    EncodedSeeker seeker = encoder.createSeeker(KeyValue.KEY_COMPARATOR, false);
+    EncodedSeeker seeker = encoder.createSeeker(KeyValue.COMPARATOR, false);
     byte[] onDiskBytes = blkEncodingCtx.getOnDiskBytesWithHeader();
     ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes,
         DataBlockEncoding.ID_SIZE, onDiskBytes.length
@@ -117,7 +117,7 @@ public class TestPrefixTreeEncoding {
     HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
         Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]);
     encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx);
-    EncodedSeeker seeker = encoder.createSeeker(KeyValue.KEY_COMPARATOR, false);
+    EncodedSeeker seeker = encoder.createSeeker(KeyValue.COMPARATOR, false);
     byte[] onDiskBytes=blkEncodingCtx.getOnDiskBytesWithHeader();
     ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes,
         DataBlockEncoding.ID_SIZE, onDiskBytes.length
@@ -143,7 +143,7 @@ public class TestPrefixTreeEncoding {
     HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
         Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]);
     encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx);
-    EncodedSeeker seeker = encoder.createSeeker(KeyValue.KEY_COMPARATOR, false);
+    EncodedSeeker seeker = encoder.createSeeker(KeyValue.COMPARATOR, false);
     byte[] onDiskBytes = blkEncodingCtx.getOnDiskBytesWithHeader();
     ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes,
         DataBlockEncoding.ID_SIZE, onDiskBytes.length
@@ -159,7 +159,7 @@ public class TestPrefixTreeEncoding {
     HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
         Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]);
     encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx);
-    EncodedSeeker seeker = encoder.createSeeker(KeyValue.KEY_COMPARATOR,
+    EncodedSeeker seeker = encoder.createSeeker(KeyValue.COMPARATOR,
         false);
     byte[] onDiskBytes = blkEncodingCtx.getOnDiskBytesWithHeader();
     ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes,

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java Thu Aug 29 20:45:04 2013
@@ -92,7 +92,7 @@ public class TestFixedFileTrailer {
 
     t.setLastDataBlockOffset(291);
     t.setNumDataIndexLevels(3);
-    t.setComparatorClass(KeyValue.KEY_COMPARATOR.getClass());
+    t.setComparatorClass(KeyValue.COMPARATOR.getClass());
     t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic.
     t.setUncompressedDataIndexSize(827398717L); // Something random.
 
@@ -209,7 +209,7 @@ public class TestFixedFileTrailer {
       assertEquals(expected.getFirstDataBlockOffset(),
           loaded.getFirstDataBlockOffset());
       assertTrue(
-          expected.createComparator() instanceof KeyValue.KeyComparator);
+          expected.createComparator() instanceof KeyValue.KVComparator);
       assertEquals(expected.getUncompressedDataIndexSize(),
           loaded.getUncompressedDataIndexSize());
     }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java Thu Aug 29 20:45:04 2013
@@ -35,7 +35,6 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestCase;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValue.KeyComparator;
 import org.apache.hadoop.hbase.SmallTests;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
@@ -216,7 +215,7 @@ public class TestHFile extends HBaseTest
         .withBlockSize(minBlockSize)
         .withCompression(codec)
         // NOTE: This test is dependent on this deprecated nonstandard comparator
-        .withComparator(new KeyValue.RawKeyComparator())
+        .withComparator(new KeyValue.RawBytesComparator())
         .create();
     LOG.info(writer);
     writeRecords(writer);
@@ -350,36 +349,5 @@ public class TestHFile extends HBaseTest
     assertTrue(Compression.Algorithm.LZ4.ordinal() == 4);
   }
 
-  // This can't be an anonymous class because the compiler will not generate
-  // a nullary constructor for it.
-  static class CustomKeyComparator extends KeyComparator {
-    @Override
-    public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2,
-        int l2) {
-      return -Bytes.compareTo(b1, s1, l1, b2, s2, l2);
-    }
-    @Override
-    public int compare(byte[] o1, byte[] o2) {
-      return compare(o1, 0, o1.length, o2, 0, o2.length);
-    }
-  }
-
-  public void testComparator() throws IOException {
-    if (cacheConf == null) cacheConf = new CacheConfig(conf);
-    Path mFile = new Path(ROOT_DIR, "meta.tfile");
-    FSDataOutputStream fout = createFSOutput(mFile);
-    KeyComparator comparator = new CustomKeyComparator();
-    Writer writer = HFile.getWriterFactory(conf, cacheConf)
-        .withOutputStream(fout)
-        .withBlockSize(minBlockSize)
-        .withComparator(comparator)
-        .create();
-    writer.append("3".getBytes(), "0".getBytes());
-    writer.append("2".getBytes(), "0".getBytes());
-    writer.append("1".getBytes(), "0".getBytes());
-    writer.close();
-  }
-
-
 }
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java Thu Aug 29 20:45:04 2013
@@ -42,11 +42,13 @@ import org.apache.hadoop.fs.FSDataInputS
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
 import org.apache.hadoop.hbase.fs.HFileSystem;
 import org.apache.hadoop.hbase.io.compress.Compression;
-import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexReader;
 import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexChunk;
+import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexReader;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;
 import org.junit.Before;
@@ -173,7 +175,7 @@ public class TestHFileBlockIndex {
     BlockReaderWrapper brw = new BlockReaderWrapper(blockReader);
     HFileBlockIndex.BlockIndexReader indexReader =
         new HFileBlockIndex.BlockIndexReader(
-            Bytes.BYTES_RAWCOMPARATOR, numLevels, brw);
+            KeyValue.RAW_COMPARATOR, numLevels, brw);
 
     indexReader.readRootIndex(blockReader.blockRange(rootIndexOffset,
         fileSize).nextBlockWithBlockType(BlockType.ROOT_INDEX), numRootEntries);
@@ -355,7 +357,7 @@ public class TestHFileBlockIndex {
 
       int searchResult = BlockIndexReader.binarySearchNonRootIndex(
           arrayHoldingKey, searchKey.length / 2, searchKey.length, nonRootIndex,
-          Bytes.BYTES_RAWCOMPARATOR);
+          KeyValue.RAW_COMPARATOR);
       String lookupFailureMsg = "Failed to look up key #" + i + " ("
           + Bytes.toStringBinary(searchKey) + ")";
 
@@ -381,7 +383,7 @@ public class TestHFileBlockIndex {
       // higher-level API function.s
       boolean locateBlockResult =
         (BlockIndexReader.locateNonRootIndexEntry(nonRootIndex, arrayHoldingKey,
-            searchKey.length / 2, searchKey.length, Bytes.BYTES_RAWCOMPARATOR) != -1);
+            searchKey.length / 2, searchKey.length, KeyValue.RAW_COMPARATOR) != -1);
 
       if (i == 0) {
         assertFalse(locateBlockResult);
@@ -441,7 +443,7 @@ public class TestHFileBlockIndex {
     long expected = ClassSize.estimateBase(cl, false);
 
     HFileBlockIndex.BlockIndexReader bi =
-        new HFileBlockIndex.BlockIndexReader(Bytes.BYTES_RAWCOMPARATOR, 1);
+        new HFileBlockIndex.BlockIndexReader(KeyValue.RAW_COMPARATOR, 1);
     long actual = bi.heapSize();
 
     // Since the arrays in BlockIndex(byte [][] blockKeys, long [] blockOffsets,
@@ -506,7 +508,7 @@ public class TestHFileBlockIndex {
           keyStrSet.add(Bytes.toStringBinary(k));
 
           if (i > 0) {
-            assertTrue(KeyValue.KEY_COMPARATOR.compare(keys[i - 1],
+            assertTrue(KeyValue.COMPARATOR.compareFlatKey(keys[i - 1],
                 keys[i]) < 0);
           }
         }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java Thu Aug 29 20:45:04 2013
@@ -166,7 +166,7 @@ public class TestHFilePerformance extend
             .withOutputStream(fout)
             .withBlockSize(minBlockSize)
             .withCompression(codecName)
-            .withComparator(new KeyValue.RawKeyComparator())
+            .withComparator(new KeyValue.RawBytesComparator())
             .create();
 
         // Writing value in one shot.

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java Thu Aug 29 20:45:04 2013
@@ -131,7 +131,7 @@ public class TestHFileSeek extends TestC
           .withOutputStream(fout)
           .withBlockSize(options.minBlockSize)
           .withCompression(options.compress)
-          .withComparator(new KeyValue.RawKeyComparator())
+          .withComparator(new KeyValue.RawBytesComparator())
           .create();
       try {
         BytesWritable key = new BytesWritable();

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java Thu Aug 29 20:45:04 2013
@@ -39,13 +39,13 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValue.KVComparator;
 import org.apache.hadoop.hbase.SmallTests;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
 import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Writables;
-import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableUtils;
 import org.junit.Before;
@@ -137,13 +137,13 @@ public class TestHFileWriterV2 {
     HFileBlock.FSReader blockReader =
         new HFileBlock.FSReaderV2(fsdis, compressAlgo, fileSize);
     // Comparator class name is stored in the trailer in version 2.
-    RawComparator<byte []> comparator = trailer.createComparator();
+    KVComparator comparator = trailer.createComparator();
     HFileBlockIndex.BlockIndexReader dataBlockIndexReader =
         new HFileBlockIndex.BlockIndexReader(comparator,
             trailer.getNumDataIndexLevels());
     HFileBlockIndex.BlockIndexReader metaBlockIndexReader =
         new HFileBlockIndex.BlockIndexReader(
-            Bytes.BYTES_RAWCOMPARATOR, 1);
+            KeyValue.RAW_COMPARATOR, 1);
 
     HFileBlock.BlockIterator blockIter = blockReader.blockRange(
         trailer.getLoadOnOpenDataOffset(),

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java Thu Aug 29 20:45:04 2013
@@ -51,7 +51,7 @@ public class TestReseekTo {
             .withOutputStream(fout)
             .withBlockSize(4000)
             // NOTE: This test is dependent on this deprecated nonstandard comparator
-            .withComparator(new KeyValue.RawKeyComparator())
+            .withComparator(new KeyValue.RawBytesComparator())
             .create();
     int numberOfKeys = 1000;
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java Thu Aug 29 20:45:04 2013
@@ -23,7 +23,6 @@ import java.io.IOException;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.*;
-import org.apache.hadoop.hbase.KeyValue.KeyComparator;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.RawComparator;
 import org.junit.experimental.categories.Category;
@@ -51,7 +50,7 @@ public class TestSeekTo extends HBaseTes
         .withOutputStream(fout)
         .withBlockSize(blocksize)
         // NOTE: This test is dependent on this deprecated nonstandard comparator
-        .withComparator(new KeyValue.RawKeyComparator())
+        .withComparator(KeyValue.RAW_COMPARATOR)
         .create();
     // 4 bytes * 3 * 2 for each key/value +
     // 3 for keys, 15 for values = 42 (woot)

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java Thu Aug 29 20:45:04 2013
@@ -536,9 +536,10 @@ public class TestMemStore extends TestCa
       List<KeyValue> results = new ArrayList<KeyValue>();
       for (int i = 0; scanner.next(results); i++) {
         int rowId = startRowId + i;
+        KeyValue left = results.get(0);
+        byte[] row1 = Bytes.toBytes(rowId);
         assertTrue("Row name",
-          KeyValue.COMPARATOR.compareRows(results.get(0),
-          Bytes.toBytes(rowId)) == 0);
+          KeyValue.COMPARATOR.compareRows(left.getBuffer(), left.getRowOffset(), (int) left.getRowLength(), row1, 0, row1.length) == 0);
         assertEquals("Count of columns", QUALIFIER_COUNT, results.size());
         List<KeyValue> row = new ArrayList<KeyValue>();
         for (KeyValue kv : results) {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java?rev=1518817&r1=1518816&r2=1518817&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java Thu Aug 29 20:45:04 2013
@@ -327,7 +327,7 @@ public class TestStoreFile extends HBase
              (topScanner.isSeeked() && topScanner.next())) {
         key = topScanner.getKey();
 
-        if (topScanner.getReader().getComparator().compare(key.array(),
+        if (topScanner.getReader().getComparator().compareFlatKey(key.array(),
           key.arrayOffset(), key.limit(), midkey, 0, midkey.length) < 0) {
           fail("key=" + Bytes.toStringBinary(key) + " < midkey=" +
               Bytes.toStringBinary(midkey));
@@ -377,7 +377,7 @@ public class TestStoreFile extends HBase
       while ((!topScanner.isSeeked() && topScanner.seekTo()) ||
           topScanner.next()) {
         key = topScanner.getKey();
-        assertTrue(topScanner.getReader().getComparator().compare(key.array(),
+        assertTrue(topScanner.getReader().getComparator().compareFlatKey(key.array(),
           key.arrayOffset(), key.limit(), badmidkey, 0, badmidkey.length) >= 0);
         if (first) {
           first = false;



Mime
View raw message