hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From anoopsamj...@apache.org
Subject svn commit: r1511023 - in /hbase/trunk/hbase-server/src: main/java/org/apache/hadoop/hbase/io/hfile/ main/java/org/apache/hadoop/hbase/util/ test/java/org/apache/hadoop/hbase/io/hfile/
Date Tue, 06 Aug 2013 16:31:37 GMT
Author: anoopsamjohn
Date: Tue Aug  6 16:31:37 2013
New Revision: 1511023

URL: http://svn.apache.org/r1511023
Log:
HBASE-9126 Make HFile MIN VERSION as 2

Modified:
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java?rev=1511023&r1=1511022&r2=1511023&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
(original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
Tue Aug  6 16:31:37 2013
@@ -241,29 +241,17 @@ public class FixedFileTrailer {
     output.writeLong(loadOnOpenDataOffset);
     output.writeInt(dataIndexCount);
 
-    if (majorVersion == 1) {
-      // This used to be metaIndexOffset, but it was not used in version 1.
-      output.writeLong(0);
-    } else {
-      output.writeLong(uncompressedDataIndexSize);
-    }
+    output.writeLong(uncompressedDataIndexSize);
 
     output.writeInt(metaIndexCount);
     output.writeLong(totalUncompressedBytes);
-    if (majorVersion == 1) {
-      output.writeInt((int) Math.min(Integer.MAX_VALUE, entryCount));
-    } else {
-      // This field is long from version 2 onwards.
-      output.writeLong(entryCount);
-    }
+    output.writeLong(entryCount);
     output.writeInt(compressionCodec.ordinal());
 
-    if (majorVersion > 1) {
-      output.writeInt(numDataIndexLevels);
-      output.writeLong(firstDataBlockOffset);
-      output.writeLong(lastDataBlockOffset);
-      Bytes.writeStringFixedSize(output, comparatorClassName, MAX_COMPARATOR_NAME_LENGTH);
-    }
+    output.writeInt(numDataIndexLevels);
+    output.writeLong(firstDataBlockOffset);
+    output.writeLong(lastDataBlockOffset);
+    Bytes.writeStringFixedSize(output, comparatorClassName, MAX_COMPARATOR_NAME_LENGTH);
   }
 
   /**
@@ -354,23 +342,17 @@ public class FixedFileTrailer {
     fileInfoOffset = input.readLong();
     loadOnOpenDataOffset = input.readLong();
     dataIndexCount = input.readInt();
-    if (majorVersion == 1) {
-      input.readLong(); // Read and skip metaIndexOffset.
-    } else {
-      uncompressedDataIndexSize = input.readLong();
-    }
+    uncompressedDataIndexSize = input.readLong();
     metaIndexCount = input.readInt();
 
     totalUncompressedBytes = input.readLong();
-    entryCount = majorVersion == 1 ? input.readInt() : input.readLong();
+    entryCount = input.readLong();
     compressionCodec = Compression.Algorithm.values()[input.readInt()];
-    if (majorVersion > 1) {
-      numDataIndexLevels = input.readInt();
-      firstDataBlockOffset = input.readLong();
-      lastDataBlockOffset = input.readLong();
-      setComparatorClass(getComparatorClass(Bytes.readStringFixedSize(input,
+    numDataIndexLevels = input.readInt();
+    firstDataBlockOffset = input.readLong();
+    lastDataBlockOffset = input.readLong();
+    setComparatorClass(getComparatorClass(Bytes.readStringFixedSize(input,
         MAX_COMPARATOR_NAME_LENGTH)));
-    }
   }
   
   private void append(StringBuilder sb, String s) {
@@ -389,13 +371,11 @@ public class FixedFileTrailer {
     append(sb, "totalUncomressedBytes=" + totalUncompressedBytes);
     append(sb, "entryCount=" + entryCount);
     append(sb, "compressionCodec=" + compressionCodec);
-    if (majorVersion == 2) {
-      append(sb, "uncompressedDataIndexSize=" + uncompressedDataIndexSize);
-      append(sb, "numDataIndexLevels=" + numDataIndexLevels);
-      append(sb, "firstDataBlockOffset=" + firstDataBlockOffset);
-      append(sb, "lastDataBlockOffset=" + lastDataBlockOffset);
-      append(sb, "comparatorClassName=" + comparatorClassName);
-    }
+    append(sb, "uncompressedDataIndexSize=" + uncompressedDataIndexSize);
+    append(sb, "numDataIndexLevels=" + numDataIndexLevels);
+    append(sb, "firstDataBlockOffset=" + firstDataBlockOffset);
+    append(sb, "lastDataBlockOffset=" + lastDataBlockOffset);
+    append(sb, "comparatorClassName=" + comparatorClassName);
     append(sb, "majorVersion=" + majorVersion);
     append(sb, "minorVersion=" + minorVersion);
 
@@ -516,15 +496,6 @@ public class FixedFileTrailer {
   }
 
   public void setEntryCount(long newEntryCount) {
-    if (majorVersion == 1) {
-      int intEntryCount = (int) Math.min(Integer.MAX_VALUE, newEntryCount);
-      if (intEntryCount != newEntryCount) {
-        LOG.info("Warning: entry count is " + newEntryCount + " but writing "
-            + intEntryCount + " into the version " + majorVersion + " trailer");
-      }
-      entryCount = intEntryCount;
-      return;
-    }
     entryCount = newEntryCount;
   }
 
@@ -626,8 +597,6 @@ public class FixedFileTrailer {
   }
 
   public long getUncompressedDataIndexSize() {
-    if (majorVersion == 1)
-      return 0;
     return uncompressedDataIndexSize;
   }
 

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java?rev=1511023&r1=1511022&r2=1511023&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java Tue
Aug  6 16:31:37 2013
@@ -62,7 +62,6 @@ import org.apache.hadoop.hbase.protobuf.
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair;
 import org.apache.hadoop.hbase.protobuf.generated.HFileProtos;
 import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-import org.apache.hadoop.hbase.regionserver.StoreFile.WriterBuilder;
 import org.apache.hadoop.hbase.util.BloomFilterWriter;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ChecksumType;
@@ -153,7 +152,7 @@ public class HFile {
     Compression.Algorithm.NONE;
 
   /** Minimum supported HFile format version */
-  public static final int MIN_FORMAT_VERSION = 1;
+  public static final int MIN_FORMAT_VERSION = 2;
 
   /** Maximum supported HFile format version
    */

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java?rev=1511023&r1=1511022&r2=1511023&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java
(original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java
Tue Aug  6 16:31:37 2013
@@ -197,35 +197,12 @@ public final class BloomFilterFactory {
         MAX_ALLOWED_FOLD_FACTOR);
 
     // Do we support compound bloom filters?
-    if (HFile.getFormatVersion(conf) > HFile.MIN_FORMAT_VERSION) {
-      // In case of compound Bloom filters we ignore the maxKeys hint.
-      CompoundBloomFilterWriter bloomWriter = new CompoundBloomFilterWriter(
-          getBloomBlockSize(conf), err, Hash.getHashType(conf), maxFold,
-          cacheConf.shouldCacheBloomsOnWrite(), bloomType == BloomType.ROWCOL
-              ? KeyValue.KEY_COMPARATOR : Bytes.BYTES_RAWCOMPARATOR);
-      writer.addInlineBlockWriter(bloomWriter);
-      return bloomWriter;
-    } else {
-      // A single-block Bloom filter. Only used when testing HFile format
-      // version 1.
-      int tooBig = conf.getInt(IO_STOREFILE_BLOOM_MAX_KEYS,
-          128 * 1000 * 1000);
-
-      if (maxKeys <= 0) {
-        LOG.warn("Invalid maximum number of keys specified: " + maxKeys
-            + ", not using Bloom filter");
-        return null;
-      } else if (maxKeys < tooBig) {
-        BloomFilterWriter bloom = new ByteBloomFilter((int) maxKeys, err,
-            Hash.getHashType(conf), maxFold);
-        bloom.allocBloom();
-        return bloom;
-      } else {
-        LOG.debug("Skipping bloom filter because max keysize too large: "
-            + maxKeys);
-      }
-    }
-    return null;
+    // In case of compound Bloom filters we ignore the maxKeys hint.
+    CompoundBloomFilterWriter bloomWriter = new CompoundBloomFilterWriter(getBloomBlockSize(conf),
+        err, Hash.getHashType(conf), maxFold, cacheConf.shouldCacheBloomsOnWrite(),
+        bloomType == BloomType.ROWCOL ? KeyValue.KEY_COMPARATOR : Bytes.BYTES_RAWCOMPARATOR);
+    writer.addInlineBlockWriter(bloomWriter);
+    return bloomWriter;
   }
 
   /**
@@ -250,18 +227,12 @@ public final class BloomFilterFactory {
 
     float err = getErrorRate(conf);
 
-    if (HFile.getFormatVersion(conf) > HFile.MIN_FORMAT_VERSION) {
-      int maxFold = getMaxFold(conf);
-      // In case of compound Bloom filters we ignore the maxKeys hint.
-      CompoundBloomFilterWriter bloomWriter = new CompoundBloomFilterWriter(
-          getBloomBlockSize(conf), err, Hash.getHashType(conf),
-          maxFold,
-          cacheConf.shouldCacheBloomsOnWrite(), Bytes.BYTES_RAWCOMPARATOR);
-      writer.addInlineBlockWriter(bloomWriter);
-      return bloomWriter;
-    } else {
-      LOG.info("Delete Family Bloom filter is not supported in HFile V1");
-      return null;
-    }
+    int maxFold = getMaxFold(conf);
+    // In case of compound Bloom filters we ignore the maxKeys hint.
+    CompoundBloomFilterWriter bloomWriter = new CompoundBloomFilterWriter(getBloomBlockSize(conf),
+        err, Hash.getHashType(conf), maxFold, cacheConf.shouldCacheBloomsOnWrite(),
+        Bytes.BYTES_RAWCOMPARATOR);
+    writer.addInlineBlockWriter(bloomWriter);
+    return bloomWriter;
   }
 };

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java?rev=1511023&r1=1511022&r2=1511023&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java
(original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java
Tue Aug  6 16:31:37 2013
@@ -50,8 +50,11 @@ public class TestFixedFileTrailer {
 
   private static final Log LOG = LogFactory.getLog(TestFixedFileTrailer.class);
 
-  /** The number of used fields by version. Indexed by version minus one. */
-  private static final int[] NUM_FIELDS_BY_VERSION = new int[] { 9, 14 };
+  /**
+   * The number of used fields by version. Indexed by version minus two. 
+   * Min version that we support is V2
+   */
+  private static final int[] NUM_FIELDS_BY_VERSION = new int[] { 14 };
 
   private HBaseTestingUtility util = new HBaseTestingUtility();
   private FileSystem fs;
@@ -87,17 +90,11 @@ public class TestFixedFileTrailer {
     t.setDataIndexCount(3);
     t.setEntryCount(((long) Integer.MAX_VALUE) + 1);
 
-    if (version == 1) {
-      t.setFileInfoOffset(876);
-    }
-
-    if (version == 2) {
-      t.setLastDataBlockOffset(291);
-      t.setNumDataIndexLevels(3);
-      t.setComparatorClass(KeyValue.KEY_COMPARATOR.getClass());
-      t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic.
-      t.setUncompressedDataIndexSize(827398717L); // Something random.
-    }
+    t.setLastDataBlockOffset(291);
+    t.setNumDataIndexLevels(3);
+    t.setComparatorClass(KeyValue.KEY_COMPARATOR.getClass());
+    t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic.
+    t.setUncompressedDataIndexSize(827398717L); // Something random.
 
     t.setLoadOnOpenOffset(128);
     t.setMetaIndexCount(7);
@@ -162,7 +159,7 @@ public class TestFixedFileTrailer {
 
     String trailerStr = t.toString();
     assertEquals("Invalid number of fields in the string representation "
-        + "of the trailer: " + trailerStr, NUM_FIELDS_BY_VERSION[version - 1],
+        + "of the trailer: " + trailerStr, NUM_FIELDS_BY_VERSION[version - 2],
         trailerStr.split(", ").length);
     assertEquals(trailerStr, t4.toString());
   }



Mime
View raw message