hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jmhs...@apache.org
Subject svn commit: r1515591 - in /hbase/trunk: hbase-client/src/main/java/org/apache/hadoop/hbase/client/ hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ hbase-common/src/main/java/org/apache/hadoop/hbase/ hbase-common/src/main/java/org/apache/hado...
Date Mon, 19 Aug 2013 20:30:03 GMT
Author: jmhsieh
Date: Mon Aug 19 20:30:02 2013
New Revision: 1515591

URL: http://svn.apache.org/r1515591
Log:
HBASE-9262 Make KeyValue.KEY_COMPARATOR default for HFileWriterFactory

Modified:
    hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
    hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
    hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java

Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
(original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
Mon Aug 19 20:30:02 2013
@@ -1281,7 +1281,7 @@ public class HConnectionManager {
       // checking is actually the last region in the table.
       byte[] endKey = possibleRegion.getRegionInfo().getEndKey();
       if (Bytes.equals(endKey, HConstants.EMPTY_END_ROW) ||
-          KeyValue.getRowComparator(tableName).compareRows(
+          tableName.getRowComparator().compareRows(
               endKey, 0, endKey.length, row, 0, row.length) > 0) {
         return possibleRegion;
       }

Modified: hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
(original)
+++ hbase/trunk/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
Mon Aug 19 20:30:02 2013
@@ -19,6 +19,8 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import static org.apache.hadoop.hbase.util.Bytes.len;
+
 import com.google.common.base.Preconditions;
 import com.google.protobuf.ByteString;
 import com.google.protobuf.InvalidProtocolBufferException;
@@ -214,8 +216,8 @@ public class ColumnRangeFilter extends F
   public KeyValue getNextKeyHint(KeyValue kv) {
     return KeyValue.createFirstOnRow(kv.getBuffer(), kv.getRowOffset(), kv
         .getRowLength(), kv.getBuffer(), kv.getFamilyOffset(), kv
-        .getFamilyLength(), this.minColumn, 0, this.minColumn == null ? 0
-        : this.minColumn.length);
+        .getFamilyLength(), this.minColumn, 0, len(this.minColumn));
+
   }
 
   @Override

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java Mon Aug 19
20:30:02 2013
@@ -19,6 +19,8 @@
  */
 package org.apache.hadoop.hbase;
 
+import static org.apache.hadoop.hbase.util.Bytes.len;
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
@@ -97,22 +99,6 @@ public class KeyValue implements Cell, H
    */
   public static final KVComparator META_COMPARATOR = new MetaComparator();
 
-  /**
-   * Get the appropriate row comparator for the specified table.
-   *
-   * Hopefully we can get rid of this, I added this here because it's replacing
-   * something in HSK.  We should move completely off of that.
-   *
-   * @param tableName  The table name.
-   * @return The comparator.
-   */
-  public static KeyComparator getRowComparator(TableName tableName) {
-     if(TableName.META_TABLE_NAME.equals(tableName)) {
-      return META_COMPARATOR.getRawComparator();
-    }
-    return COMPARATOR.getRawComparator();
-  }
-
   /** Size of the key length field in bytes*/
   public static final int KEY_LENGTH_SIZE = Bytes.SIZEOF_INT;
 
@@ -317,7 +303,7 @@ public class KeyValue implements Cell, H
    * @param timestamp
    */
   public KeyValue(final byte [] row, final long timestamp) {
-    this(row, timestamp, Type.Maximum);
+    this(row, null, null, timestamp, Type.Maximum, null);
   }
 
   /**
@@ -393,31 +379,8 @@ public class KeyValue implements Cell, H
   public KeyValue(final byte[] row, final byte[] family,
       final byte[] qualifier, final long timestamp, Type type,
       final byte[] value) {
-    this(row, family, qualifier, 0, qualifier==null ? 0 : qualifier.length,
-        timestamp, type, value, 0, value==null ? 0 : value.length);
-  }
-
-  /**
-   * Constructs KeyValue structure filled with specified values.
-   * @param row row key
-   * @param family family name
-   * @param qualifier column qualifier
-   * @param qoffset qualifier offset
-   * @param qlength qualifier length
-   * @param timestamp version timestamp
-   * @param type key type
-   * @param value column value
-   * @param voffset value offset
-   * @param vlength value length
-   * @throws IllegalArgumentException
-   */
-  public KeyValue(byte [] row, byte [] family,
-      byte [] qualifier, int qoffset, int qlength, long timestamp, Type type,
-      byte [] value, int voffset, int vlength) {
-    this(row, 0, row==null ? 0 : row.length,
-        family, 0, family==null ? 0 : family.length,
-        qualifier, qoffset, qlength, timestamp, type,
-        value, voffset, vlength);
+    this(row, 0, len(row),   family, 0, len(family),   qualifier, 0, len(qualifier),
+        timestamp, type,   value, 0, len(value));
   }
 
   /**
@@ -530,85 +493,6 @@ public class KeyValue implements Cell, H
   }
 
   /**
-   * Constructs KeyValue structure filled with specified values. Uses the provided buffer
as its
-   * backing data buffer.
-   * <p>
-   * Column is split into two fields, family and qualifier.
-   *
-   * @param buffer the bytes buffer to use
-   * @param row row key
-   * @param roffset row offset
-   * @param rlength row length
-   * @param family family name
-   * @param foffset family offset
-   * @param flength family length
-   * @param qualifier column qualifier
-   * @param qoffset qualifier offset
-   * @param qlength qualifier length
-   * @param timestamp version timestamp
-   * @param type key type
-   * @param value column value
-   * @param voffset value offset
-   * @param vlength value length
-   * @throws IllegalArgumentException an illegal value was passed or there is insufficient
space
-   * remaining in the buffer
-   */
-  public KeyValue(byte [] buffer,
-      final byte [] row, final int roffset, final int rlength,
-      final byte [] family, final int foffset, final int flength,
-      final byte [] qualifier, final int qoffset, final int qlength,
-      final long timestamp, final Type type,
-      final byte [] value, final int voffset, final int vlength) {
-
-    this(buffer, 0,
-        row, roffset, rlength,
-        family, foffset, flength,
-        qualifier, qoffset, qlength,
-        timestamp, type,
-        value, voffset, vlength);
-  }
-
-  /**
-   * Constructs KeyValue structure filled with specified values. Uses the provided buffer
as the
-   * data buffer.
-   * <p>
-   * Column is split into two fields, family and qualifier.
-   *
-   * @param buffer the bytes buffer to use
-   * @param boffset buffer offset
-   * @param row row key
-   * @param roffset row offset
-   * @param rlength row length
-   * @param family family name
-   * @param foffset family offset
-   * @param flength family length
-   * @param qualifier column qualifier
-   * @param qoffset qualifier offset
-   * @param qlength qualifier length
-   * @param timestamp version timestamp
-   * @param type key type
-   * @param value column value
-   * @param voffset value offset
-   * @param vlength value length
-   * @throws IllegalArgumentException an illegal value was passed or there is insufficient
space
-   * remaining in the buffer
-   */
-  public KeyValue(byte [] buffer, final int boffset,
-      final byte [] row, final int roffset, final int rlength,
-      final byte [] family, final int foffset, final int flength,
-      final byte [] qualifier, final int qoffset, final int qlength,
-      final long timestamp, final Type type,
-      final byte [] value, final int voffset, final int vlength) {
-
-    this.bytes  = buffer;
-    this.length = writeByteArray(buffer, boffset,
-        row, roffset, rlength,
-        family, foffset, flength, qualifier, qoffset, qlength,
-        timestamp, type, value, voffset, vlength);
-    this.offset = boffset;
-  }
-
-  /**
    * Checks the parameters passed to a constructor.
    *
    * @param row row key
@@ -683,7 +567,7 @@ public class KeyValue implements Cell, H
    * @throws IllegalArgumentException an illegal value was passed or there is insufficient
space
    * remaining in the buffer
    */
-  static int writeByteArray(byte [] buffer, final int boffset,
+  private static int writeByteArray(byte [] buffer, final int boffset,
       final byte [] row, final int roffset, final int rlength,
       final byte [] family, final int foffset, int flength,
       final byte [] qualifier, final int qoffset, int qlength,
@@ -1083,7 +967,7 @@ public class KeyValue implements Cell, H
   /**
    * @return Family offset
    */
-  public int getFamilyOffset(int rlength) {
+  private int getFamilyOffset(int rlength) {
     return this.offset + ROW_OFFSET + Bytes.SIZEOF_SHORT + rlength + Bytes.SIZEOF_BYTE;
   }
 
@@ -1121,7 +1005,7 @@ public class KeyValue implements Cell, H
   /**
    * @return Qualifier offset
    */
-  public int getQualifierOffset(int foffset) {
+  private int getQualifierOffset(int foffset) {
     return foffset + getFamilyLength(foffset);
   }
 
@@ -1136,7 +1020,7 @@ public class KeyValue implements Cell, H
   /**
    * @return Qualifier length
    */
-  public int getQualifierLength(int rlength, int flength) {
+  private int getQualifierLength(int rlength, int flength) {
     return getKeyLength() - (int) getKeyDataStructureSize(rlength, flength, 0);
   }
 
@@ -1152,7 +1036,7 @@ public class KeyValue implements Cell, H
   /**
    * @return Column (family + qualifier) length
    */
-  public int getTotalColumnLength(int rlength, int foffset) {
+  private int getTotalColumnLength(int rlength, int foffset) {
     int flength = getFamilyLength(foffset);
     int qlength = getQualifierLength(rlength,flength);
     return flength + qlength;
@@ -1169,7 +1053,7 @@ public class KeyValue implements Cell, H
    * @param keylength Pass if you have it to save on a int creation.
    * @return Timestamp offset
    */
-  public int getTimestampOffset(final int keylength) {
+  private int getTimestampOffset(final int keylength) {
     return getKeyOffset() + keylength - TIMESTAMP_TYPE_SIZE;
   }
 
@@ -1298,8 +1182,9 @@ public class KeyValue implements Cell, H
   /**
    * @return Type of this KeyValue.
    */
+  @Deprecated
   public byte getType() {
-    return getType(getKeyLength());
+    return getTypeByte();
   }
 
   /**
@@ -1307,15 +1192,7 @@ public class KeyValue implements Cell, H
    */
   @Override
   public byte getTypeByte() {
-    return getType(getKeyLength());
-  }
-
-  /**
-   * @param keylength Pass if you have it to save on a int creation.
-   * @return Type of this KeyValue.
-   */
-  byte getType(final int keylength) {
-    return this.bytes[this.offset + keylength - 1 + ROW_OFFSET];
+    return this.bytes[this.offset + getKeyLength() - 1 + ROW_OFFSET];
   }
 
   /**
@@ -1332,21 +1209,21 @@ public class KeyValue implements Cell, H
    */
   public boolean isDeleteType() {
     // TODO: Fix this method name vis-a-vis isDelete!
-    return getType() == Type.Delete.getCode();
+    return getTypeByte() == Type.Delete.getCode();
   }
 
   /**
    * @return True if this KV is a delete family type.
    */
   public boolean isDeleteFamily() {
-    return getType() == Type.DeleteFamily.getCode();
+    return getTypeByte() == Type.DeleteFamily.getCode();
   }
 
   /**
    * @return True if this KV is a delete family-version type.
    */
   public boolean isDeleteFamilyVersion() {
-    return getType() == Type.DeleteFamilyVersion.getCode();
+    return getTypeByte() == Type.DeleteFamilyVersion.getCode();
   }
 
   /**
@@ -1354,7 +1231,7 @@ public class KeyValue implements Cell, H
    * @return True if this KV is a delete family or column type.
    */
   public boolean isDeleteColumnOrFamily() {
-    int t = getType();
+    int t = getTypeByte();
     return t == Type.DeleteColumn.getCode() || t == Type.DeleteFamily.getCode();
   }
 
@@ -1539,8 +1416,7 @@ public class KeyValue implements Cell, H
    * @return True if column matches
    */
   public boolean matchingColumn(final byte[] family, final byte[] qualifier) {
-    return matchingColumn(family, 0, family == null ? 0 : family.length,
-        qualifier, 0, qualifier == null ? 0 : qualifier.length);
+    return matchingColumn(family, 0, len(family), qualifier, 0, len(qualifier));
   }
 
   /**
@@ -2252,12 +2128,11 @@ public class KeyValue implements Cell, H
       throw new IllegalArgumentException("Buffer size " + (buffer.length - boffset) + " <
" +
           iLength);
     }
-    return new KeyValue(buffer, boffset,
-        row, roffset, rlength,
-        family, foffset, flength,
-        qualifier, qoffset, qlength,
-        HConstants.LATEST_TIMESTAMP, KeyValue.Type.Maximum,
+
+    int len = writeByteArray(buffer, boffset, row, roffset, rlength, family, foffset, flength,
+        qualifier, qoffset, qlength, HConstants.LATEST_TIMESTAMP, KeyValue.Type.Maximum,
         null, 0, 0);
+    return new KeyValue(buffer, boffset, len);
   }
 
   /**
@@ -2533,6 +2408,7 @@ public class KeyValue implements Cell, H
   public static class KeyComparator
       implements RawComparator<byte []>, SamePrefixComparator<byte[]> {
 
+    @Override
     public int compare(byte[] left, int loffset, int llength, byte[] right,
         int roffset, int rlength) {
       // Compare row
@@ -2706,7 +2582,7 @@ public class KeyValue implements Cell, H
      * @param rightKey the current block's real start key usually
      * @return newKey: the newly generated faked key
      */
-    public byte[] getShortMidpointKey(final byte[] leftKey, final byte[] rightKey) {
+    protected byte[] getShortMidpointKey(final byte[] leftKey, final byte[] rightKey) {
       if (rightKey == null) {
         throw new IllegalArgumentException("rightKey can not be null");
       }
@@ -2798,6 +2674,50 @@ public class KeyValue implements Cell, H
       }
       return 0;
     }
+
+    /**
+     * Generate a shorter faked key into index block. For example, consider a block boundary
+     * between the keys "the quick brown fox" and "the who test text".  We can use "the r"
as the
+     * key for the index block entry since it is > all entries in the previous block and
<= all
+     * entries in subsequent blocks.
+     *
+     * @param lastKeyOfPreviousBlock
+     * @param firstKeyInBlock
+     * @return a shortened null key, or if there are unexpected results, the firstKeyIn (new)
Block
+     */
+    public byte[] calcIndexKey(byte[] lastKeyOfPreviousBlock, byte[] firstKeyInBlock) {
+      byte[] fakeKey = getShortMidpointKey(lastKeyOfPreviousBlock, firstKeyInBlock);
+      if (compare(fakeKey, firstKeyInBlock) > 0) {
+        LOG.error("Unexpected getShortMidpointKey result, fakeKey:"
+            + Bytes.toStringBinary(fakeKey) + ", firstKeyInBlock:"
+            + Bytes.toStringBinary(firstKeyInBlock));
+        return firstKeyInBlock;
+      }
+      if (lastKeyOfPreviousBlock != null && compare(lastKeyOfPreviousBlock, fakeKey)
>= 0) {
+        LOG.error("Unexpected getShortMidpointKey result, lastKeyOfPreviousBlock:" +
+            Bytes.toStringBinary(lastKeyOfPreviousBlock) + ", fakeKey:" +
+            Bytes.toStringBinary(fakeKey));
+        return firstKeyInBlock;
+      }
+      return fakeKey;
+    }
+  }
+
+  /**
+   * This is a TEST only Comparator used in TestSeekTo and TestReseekTo.
+   */
+  @Deprecated
+  public static class RawKeyComparator extends KeyComparator {
+    RawComparator<byte []> getRawComparator() { return Bytes.BYTES_RAWCOMPARATOR; }
+    
+    public int compare(byte[] left, int loffset, int llength, byte[] right,
+        int roffset, int rlength) {
+      return getRawComparator().compare(left,  loffset, llength, right, roffset, rlength);
+    }
+    
+    public byte[] calcIndexKey(byte[] lastKeyOfPreviousBlock, byte[] firstKeyInBlock) {
+      return firstKeyInBlock;
+    }
   }
 
   /**

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java Mon Aug
19 20:30:02 2013
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.KeyValue.KeyComparator;
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**
@@ -306,4 +307,16 @@ public final class TableName implements 
   public int compareTo(TableName tableName) {
     return this.nameAsString.compareTo(tableName.getNameAsString());
   }
+
+  /**
+   * Get the appropriate row comparator for this table.
+   *
+   * @return The comparator.
+   */
+  public KeyComparator getRowComparator() {
+     if(TableName.META_TABLE_NAME.equals(this)) {
+      return KeyValue.META_COMPARATOR.getRawComparator();
+    }
+    return KeyValue.COMPARATOR.getRawComparator();
+  }
 }

Modified: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java (original)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java Mon Aug
19 20:30:02 2013
@@ -123,6 +123,17 @@ public class Bytes {
   // SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?)
   public static final int ESTIMATED_HEAP_TAX = 16;
 
+  
+  /**
+   * Returns length of the byte array, returning 0 if the array is null.
+   * Useful for calculating sizes.
+   * @param b byte array, which can be null
+   * @return 0 if b is null, otherwise returns length
+   */
+  final public static int len(byte[] b) {
+    return b == null ? 0 : b.length;
+  }
+
   /**
    * Byte array comparator class.
    */

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java
(original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java
Mon Aug 19 20:30:02 2013
@@ -32,12 +32,12 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.KeyComparator;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
-import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.Writable;
 
 /**
@@ -77,7 +77,7 @@ public abstract class AbstractHFileWrite
   protected long totalUncompressedBytes = 0;
 
   /** Key comparator. Used to ensure we write in order. */
-  protected final RawComparator<byte[]> comparator;
+  protected final KeyComparator comparator;
 
   /** Meta block names. */
   protected List<byte[]> metaNames = new ArrayList<byte[]>();
@@ -124,7 +124,7 @@ public abstract class AbstractHFileWrite
     this.blockEncoder = dataBlockEncoder != null
         ? dataBlockEncoder : NoOpDataBlockEncoder.INSTANCE;
     this.comparator = comparator != null ? comparator
-        : Bytes.BYTES_RAWCOMPARATOR;
+        : KeyValue.KEY_COMPARATOR;
 
     closeOutputStream = path != null;
     this.cacheConf = cacheConf;

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java Mon
Aug 19 20:30:02 2013
@@ -336,7 +336,7 @@ public class HFile {
     protected Compression.Algorithm compression =
         HFile.DEFAULT_COMPRESSION_ALGORITHM;
     protected HFileDataBlockEncoder encoder = NoOpDataBlockEncoder.INSTANCE;
-    protected KeyComparator comparator;
+    protected KeyComparator comparator = KeyValue.KEY_COMPARATOR;
     protected InetSocketAddress[] favoredNodes;
     protected ChecksumType checksumType = HFile.DEFAULT_CHECKSUM_TYPE;
     protected int bytesPerChecksum = DEFAULT_BYTES_PER_CHECKSUM;

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java
(original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java
Mon Aug 19 20:30:02 2013
@@ -173,28 +173,9 @@ public class HFileWriterV2 extends Abstr
     lastDataBlockOffset = outputStream.getPos();
     fsBlockWriter.writeHeaderAndData(outputStream);
     int onDiskSize = fsBlockWriter.getOnDiskSizeWithHeader();
-    // Generate a shorter faked key into index block. For example, consider a block boundary
-    // between the keys "the quick brown fox" and "the who test text".  We can use "the r"
as the 
-    // key for the index block entry since it is > all entries in the previous block and
<= all
-    // entries in subsequent blocks.
-    if (comparator instanceof KeyComparator) {
-      byte[] fakeKey = ((KeyComparator) comparator).getShortMidpointKey(
-        lastKeyOfPreviousBlock, firstKeyInBlock);
-      if (comparator.compare(fakeKey, firstKeyInBlock) > 0) {
-        throw new IOException("Unexpected getShortMidpointKey result, fakeKey:"
-            + Bytes.toStringBinary(fakeKey) + ", firstKeyInBlock:"
-            + Bytes.toStringBinary(firstKeyInBlock));
-      }
-      if (lastKeyOfPreviousBlock != null && comparator.compare(lastKeyOfPreviousBlock,
-        fakeKey) >= 0) {
-        throw new IOException("Unexpected getShortMidpointKey result, lastKeyOfPreviousBlock:"
+
-            Bytes.toStringBinary(lastKeyOfPreviousBlock) + ", fakeKey:" +
-            Bytes.toStringBinary(fakeKey));
-      }
-      dataBlockIndexWriter.addEntry(fakeKey, lastDataBlockOffset,onDiskSize);
-    } else {
-      dataBlockIndexWriter.addEntry(firstKeyInBlock, lastDataBlockOffset,onDiskSize);
-    }
+
+    byte[] indexKey = comparator.calcIndexKey(lastKeyOfPreviousBlock, firstKeyInBlock);
+    dataBlockIndexWriter.addEntry(indexKey, lastDataBlockOffset, onDiskSize);
     totalUncompressedBytes += fsBlockWriter.getUncompressedSizeWithHeader();
     HFile.offerWriteLatency(System.nanoTime() - startTimeNs);
     if (cacheConf.shouldCacheDataOnWrite()) {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
Mon Aug 19 20:30:02 2013
@@ -519,7 +519,6 @@ public class TestRegionObserverInterface
       byte[] family, byte[] qualifier) throws IOException {
     HFile.Writer writer = HFile.getWriterFactory(conf, new CacheConfig(conf))
         .withPath(fs, path)
-        .withComparator(KeyValue.KEY_COMPARATOR)
         .create();
     long now = System.currentTimeMillis();
     try {
@@ -532,14 +531,4 @@ public class TestRegionObserverInterface
     }
   }
 
-  private static byte [][] makeN(byte [] base, int n) {
-    byte [][] ret = new byte[n][];
-    for(int i=0;i<n;i++) {
-      ret[i] = Bytes.add(base, Bytes.toBytes(String.format("%02d", i)));
-    }
-    return ret;
-  }
-
 }
-
-

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
Mon Aug 19 20:30:02 2013
@@ -30,9 +30,11 @@ import java.util.List;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.*;
-import org.apache.hadoop.hbase.io.hfile.CacheConfig;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.SmallTests;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -83,7 +85,6 @@ public class TestHalfStoreFileReader {
     HFile.Writer w = HFile.getWriterFactory(conf, cacheConf)
         .withPath(fs, p)
         .withBlockSize(1024)
-        .withComparator(KeyValue.KEY_COMPARATOR)
         .create();
 
     // write some things.
@@ -149,7 +150,6 @@ public class TestHalfStoreFileReader {
       HFile.Writer w = HFile.getWriterFactory(conf, cacheConf)
               .withPath(fs, p)
               .withBlockSize(1024)
-              .withComparator(KeyValue.KEY_COMPARATOR)
               .create();
 
       // write some things.

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
Mon Aug 19 20:30:02 2013
@@ -34,6 +34,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestCase;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.KeyComparator;
 import org.apache.hadoop.hbase.SmallTests;
 import org.apache.hadoop.hbase.io.compress.Compression;
@@ -214,6 +215,8 @@ public class TestHFile extends HBaseTest
         .withOutputStream(fout)
         .withBlockSize(minBlockSize)
         .withCompression(codec)
+        // NOTE: This test is dependent on this deprecated nonstandard comparator
+        .withComparator(new KeyValue.RawKeyComparator())
         .create();
     LOG.info(writer);
     writeRecords(writer);

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
Mon Aug 19 20:30:02 2013
@@ -489,7 +489,6 @@ public class TestHFileBlockIndex {
                 .withPath(fs, hfilePath)
                 .withBlockSize(SMALL_BLOCK_SIZE)
                 .withCompression(compr)
-                .withComparator(KeyValue.KEY_COMPARATOR)
                 .create();
         Random rand = new Random(19231737);
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java
Mon Aug 19 20:30:02 2013
@@ -31,6 +31,7 @@ import org.apache.hadoop.fs.FSDataOutput
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.MediumTests;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.SequenceFile;
@@ -165,6 +166,7 @@ public class TestHFilePerformance extend
             .withOutputStream(fout)
             .withBlockSize(minBlockSize)
             .withCompression(codecName)
+            .withComparator(new KeyValue.RawKeyComparator())
             .create();
 
         // Writing value in one shot.

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java
Mon Aug 19 20:30:02 2013
@@ -41,6 +41,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.RawLocalFileSystem;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.MediumTests;
 import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
 import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
@@ -130,6 +131,7 @@ public class TestHFileSeek extends TestC
           .withOutputStream(fout)
           .withBlockSize(options.minBlockSize)
           .withCompression(options.compress)
+          .withComparator(new KeyValue.RawKeyComparator())
           .create();
       try {
         BytesWritable key = new BytesWritable();

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java
Mon Aug 19 20:30:02 2013
@@ -98,7 +98,6 @@ public class TestHFileWriterV2 {
             .withPath(fs, hfilePath)
             .withBlockSize(4096)
             .withCompression(compressAlgo)
-            .withComparator(KeyValue.KEY_COMPARATOR)
             .create();
 
     Random rand = new Random(9713312); // Just a fixed seed.

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
Mon Aug 19 20:30:02 2013
@@ -18,21 +18,20 @@
  */
 package org.apache.hadoop.hbase.io.hfile;
 
+import static org.junit.Assert.assertEquals;
+
 import java.util.ArrayList;
 import java.util.List;
 
-import junit.framework.Assert;
-
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.SmallTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import static org.junit.Assert.*;
-
 /**
  * Test {@link HFileScanner#reseekTo(byte[])}
  */
@@ -51,6 +50,8 @@ public class TestReseekTo {
         TEST_UTIL.getConfiguration(), cacheConf)
             .withOutputStream(fout)
             .withBlockSize(4000)
+            // NOTE: This test is dependent on this deprecated nonstandard comparator
+            .withComparator(new KeyValue.RawKeyComparator())
             .create();
     int numberOfKeys = 1000;
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
Mon Aug 19 20:30:02 2013
@@ -23,7 +23,9 @@ import java.io.IOException;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.KeyValue.KeyComparator;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.io.RawComparator;
 import org.junit.experimental.categories.Category;
 
 /**
@@ -48,6 +50,8 @@ public class TestSeekTo extends HBaseTes
     HFile.Writer writer = HFile.getWriterFactoryNoCache(conf)
         .withOutputStream(fout)
         .withBlockSize(blocksize)
+        // NOTE: This test is dependent on this deprecated nonstandard comparator
+        .withComparator(new KeyValue.RawKeyComparator())
         .create();
     // 4 bytes * 3 * 2 for each key/value +
     // 3 for keys, 15 for values = 42 (woot)

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
Mon Aug 19 20:30:02 2013
@@ -264,7 +264,6 @@ public class TestLoadIncrementalHFiles {
         .withPath(fs, path)
         .withBlockSize(BLOCKSIZE)
         .withCompression(COMPRESSION)
-        .withComparator(KeyValue.KEY_COMPARATOR)
         .create();
     long now = System.currentTimeMillis();
     try {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
Mon Aug 19 20:30:02 2013
@@ -92,7 +92,6 @@ public class TestHRegionServerBulkLoad {
         .withPath(fs, path)
         .withBlockSize(BLOCKSIZE)
         .withCompression(COMPRESSION)
-        .withComparator(KeyValue.KEY_COMPARATOR)
         .create();
     long now = System.currentTimeMillis();
     try {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java?rev=1515591&r1=1515590&r2=1515591&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
Mon Aug 19 20:30:02 2013
@@ -39,7 +39,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hbase.Coprocessor;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
@@ -49,6 +48,7 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.LargeTests;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.Delete;
@@ -85,7 +85,6 @@ import org.apache.hadoop.hbase.security.
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.JVMClusterUtil;
 import org.apache.hadoop.hbase.util.TestTableName;
-
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -845,7 +844,6 @@ public class TestAccessController extend
       try {
         writer = HFile.getWriterFactory(conf, new CacheConfig(conf))
             .withPath(fs, path)
-            .withComparator(KeyValue.KEY_COMPARATOR)
             .create();
         // subtract 2 since numRows doesn't include boundary keys
         for (byte[] key : Bytes.iterateOnSplits(startKey, endKey, true, numRows-2)) {



Mime
View raw message