hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject hbase git commit: HBASE-12270 A bug in the bucket cache, with cache blocks on write enabled (Liu Shaohui)
Date Thu, 15 Jan 2015 16:54:43 GMT
Repository: hbase
Updated Branches:
  refs/heads/0.98 cca511752 -> 6314063e7


HBASE-12270 A bug in the bucket cache, with cache blocks on write enabled (Liu Shaohui)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6314063e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6314063e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6314063e

Branch: refs/heads/0.98
Commit: 6314063e7475e9863806b7fba92304ca54083ab6
Parents: cca5117
Author: Andrew Purtell <apurtell@apache.org>
Authored: Thu Jan 15 08:31:39 2015 -0800
Committer: Andrew Purtell <apurtell@apache.org>
Committed: Thu Jan 15 08:31:39 2015 -0800

----------------------------------------------------------------------
 .../hadoop/hbase/io/hfile/HFileBlock.java       | 11 +++-
 .../hadoop/hbase/io/hfile/TestCacheOnWrite.java | 63 +++++++++++++-------
 2 files changed, 51 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/6314063e/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
index 9108465..5e151fc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
@@ -216,11 +216,11 @@ public class HFileBlock implements Cacheable {
     this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;
     this.prevBlockOffset = prevBlockOffset;
     this.buf = buf;
-    if (fillHeader)
-      overwriteHeader();
     this.offset = offset;
     this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;
     this.fileContext = fileContext;
+    if (fillHeader)
+      overwriteHeader();
     this.buf.rewind();
   }
 
@@ -322,6 +322,11 @@ public class HFileBlock implements Cacheable {
     buf.putInt(onDiskSizeWithoutHeader);
     buf.putInt(uncompressedSizeWithoutHeader);
     buf.putLong(prevBlockOffset);
+    if (this.fileContext.isUseHBaseChecksum()) {
+      buf.put(fileContext.getChecksumType().getCode());
+      buf.putInt(fileContext.getBytesPerChecksum());
+      buf.putInt(onDiskDataSizeWithHeader);
+    }
   }
 
   /**
@@ -1124,7 +1129,7 @@ public class HFileBlock implements Cacheable {
           cacheConf.shouldCacheCompressed(blockType.getCategory()) ?
             getOnDiskBufferWithHeader() :
             getUncompressedBufferWithHeader(),
-          DONT_FILL_HEADER, startOffset,
+          FILL_HEADER, startOffset,
           onDiskBytesWithHeader.length + onDiskChecksum.length, newContext);
     }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/6314063e/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
index e3b09c5..7a4e9e0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
@@ -37,6 +37,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
@@ -48,6 +49,7 @@ import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.fs.HFileSystem;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
 import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
@@ -158,27 +160,49 @@ public class TestCacheOnWrite {
   }
 
   public TestCacheOnWrite(CacheOnWriteType cowType, Compression.Algorithm compress,
-      BlockEncoderTestType encoderType, boolean cacheCompressedData) {
+      BlockEncoderTestType encoderType, boolean cacheCompressedData, BlockCache blockCache)
{
     this.cowType = cowType;
     this.compress = compress;
     this.encoderType = encoderType;
     this.encoder = encoderType.getEncoder();
     this.cacheCompressedData = cacheCompressedData;
+    this.blockCache = blockCache;
     testDescription = "[cacheOnWrite=" + cowType + ", compress=" + compress +
-        ", encoderType=" + encoderType + ", cacheCompressedData=" + cacheCompressedData +
"]";
+        ", encoderType=" + encoderType + ", cacheCompressedData=" + cacheCompressedData +
+        ", blockCache=" + blockCache.getClass().getSimpleName() + "]";
     System.out.println(testDescription);
   }
 
+  private static List<BlockCache> getBlockCaches() throws IOException {
+    Configuration conf = TEST_UTIL.getConfiguration();
+    List<BlockCache> blockcaches = new ArrayList<BlockCache>();
+    // default
+    blockcaches.add(new CacheConfig(conf).getBlockCache());
+
+    // memory
+    BlockCache lru = new LruBlockCache(128 * 1024 * 1024, 64 * 1024, TEST_UTIL.getConfiguration());
+    blockcaches.add(lru);
+
+    // bucket cache
+    FileSystem.get(conf).mkdirs(TEST_UTIL.getDataTestDir());
+    int[] bucketSizes = {INDEX_BLOCK_SIZE, DATA_BLOCK_SIZE, BLOOM_BLOCK_SIZE, 64 * 1024 };
+    BlockCache bucketcache =
+        new BucketCache("file:" + TEST_UTIL.getDataTestDir() + "/bucket.data",
+            256 * 1024 * 1024, 64 * 1024, bucketSizes, 3, 256 * 1024, null);
+    blockcaches.add(bucketcache);
+    return blockcaches;
+  }
+
   @Parameters
-  public static Collection<Object[]> getParameters() {
+  public static Collection<Object[]> getParameters() throws IOException {
     List<Object[]> cowTypes = new ArrayList<Object[]>();
-    for (CacheOnWriteType cowType : CacheOnWriteType.values()) {
-      for (Compression.Algorithm compress :
-           HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
-        for (BlockEncoderTestType encoderType :
-             BlockEncoderTestType.values()) {
-          for (boolean cacheCompressedData : new boolean[] { false, true }) {
-            cowTypes.add(new Object[] { cowType, compress, encoderType, cacheCompressedData
});
+    for (BlockCache blockache : getBlockCaches()) {
+      for (CacheOnWriteType cowType : CacheOnWriteType.values()) {
+        for (Compression.Algorithm compress : HBaseTestingUtility.COMPRESSION_ALGORITHMS)
{
+          for (BlockEncoderTestType encoderType : BlockEncoderTestType.values()) {
+            for (boolean cacheCompressedData : new boolean[] { false, true }) {
+              cowTypes.add(new Object[] { cowType, compress, encoderType, cacheCompressedData,
blockache});
+            }
           }
         }
       }
@@ -194,23 +218,17 @@ public class TestCacheOnWrite {
     conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, INDEX_BLOCK_SIZE);
     conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE,
         BLOOM_BLOCK_SIZE);
-    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY,
-      cowType.shouldBeCached(BlockType.DATA));
-    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY,
-        cowType.shouldBeCached(BlockType.LEAF_INDEX));
-    conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY,
-        cowType.shouldBeCached(BlockType.BLOOM_CHUNK));
     conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, cacheCompressedData);
     cowType.modifyConf(conf);
     fs = HFileSystem.get(conf);
-    cacheConf = new CacheConfig(conf);
-    blockCache = cacheConf.getBlockCache();
+    cacheConf =
+        new CacheConfig(blockCache, true, true, cowType.shouldBeCached(BlockType.DATA),
+        cowType.shouldBeCached(BlockType.LEAF_INDEX),
+        cowType.shouldBeCached(BlockType.BLOOM_CHUNK), false, cacheCompressedData, true);
   }
 
   @After
   public void tearDown() {
-    cacheConf = new CacheConfig(conf);
-    blockCache = cacheConf.getBlockCache();
   }
 
   @Test
@@ -308,6 +326,11 @@ public class TestCacheOnWrite {
       assertEquals("{" + cachedDataBlockType
           + "=1379, LEAF_INDEX=154, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=18}", countByType);
     }
+
+    // iterate all the keyvalue from hfile
+    while (scanner.next()) {
+      Cell cell = scanner.getKeyValue();
+    }
     reader.close();
   }
 


Mime
View raw message