hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject hbase git commit: HBASE-12270 A bug in the bucket cache, with cache blocks on write enabled (Liu Shaohui)
Date Tue, 30 Dec 2014 22:43:31 GMT
Repository: hbase
Updated Branches:
  refs/heads/master baa2d0537 -> 305267b8e


HBASE-12270 A bug in the bucket cache, with cache blocks on write enabled (Liu Shaohui)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/305267b8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/305267b8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/305267b8

Branch: refs/heads/master
Commit: 305267b8e2c68b6103eb389587394d19d2be3e82
Parents: baa2d05
Author: stack <stack@apache.org>
Authored: Tue Dec 30 14:43:19 2014 -0800
Committer: stack <stack@apache.org>
Committed: Tue Dec 30 14:43:19 2014 -0800

----------------------------------------------------------------------
 .../hadoop/hbase/io/hfile/HFileBlock.java       | 11 +++-
 .../hadoop/hbase/io/hfile/TestCacheOnWrite.java | 58 ++++++++++++++------
 2 files changed, 49 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/305267b8/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
index 667873b..b096185 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
@@ -216,11 +216,11 @@ public class HFileBlock implements Cacheable {
     this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;
     this.prevBlockOffset = prevBlockOffset;
     this.buf = buf;
-    if (fillHeader)
-      overwriteHeader();
     this.offset = offset;
     this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;
     this.fileContext = fileContext;
+    if (fillHeader)
+      overwriteHeader();
     this.buf.rewind();
   }
 
@@ -322,6 +322,11 @@ public class HFileBlock implements Cacheable {
     buf.putInt(onDiskSizeWithoutHeader);
     buf.putInt(uncompressedSizeWithoutHeader);
     buf.putLong(prevBlockOffset);
+    if (this.fileContext.isUseHBaseChecksum()) {
+      buf.put(fileContext.getChecksumType().getCode());
+      buf.putInt(fileContext.getBytesPerChecksum());
+      buf.putInt(onDiskDataSizeWithHeader);
+    }
   }
 
   /**
@@ -1175,7 +1180,7 @@ public class HFileBlock implements Cacheable {
           cacheConf.shouldCacheCompressed(blockType.getCategory()) ?
             getOnDiskBufferWithHeader() :
             getUncompressedBufferWithHeader(),
-          DONT_FILL_HEADER, startOffset,
+          FILL_HEADER, startOffset,
           onDiskBytesWithHeader.length + onDiskChecksum.length, newContext);
     }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/305267b8/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
index d3d8159..b13c076 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
@@ -37,6 +37,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
@@ -47,6 +48,7 @@ import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.fs.HFileSystem;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
 import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.StoreFile;
@@ -159,27 +161,48 @@ public class TestCacheOnWrite {
   }
 
   public TestCacheOnWrite(CacheOnWriteType cowType, Compression.Algorithm compress,
-      BlockEncoderTestType encoderType, boolean cacheCompressedData) {
+      BlockEncoderTestType encoderType, boolean cacheCompressedData, BlockCache blockCache)
{
     this.cowType = cowType;
     this.compress = compress;
     this.encoderType = encoderType;
     this.encoder = encoderType.getEncoder();
     this.cacheCompressedData = cacheCompressedData;
+    this.blockCache = blockCache;
     testDescription = "[cacheOnWrite=" + cowType + ", compress=" + compress +
         ", encoderType=" + encoderType + ", cacheCompressedData=" + cacheCompressedData +
"]";
     System.out.println(testDescription);
   }
 
+  private static List<BlockCache> getBlockCaches() throws IOException {
+    Configuration conf = TEST_UTIL.getConfiguration();
+    List<BlockCache> blockcaches = new ArrayList<BlockCache>();
+    // default
+    blockcaches.add(new CacheConfig(conf).getBlockCache());
+
+    // memory
+    BlockCache lru = new LruBlockCache(128 * 1024 * 1024, 64 * 1024, TEST_UTIL.getConfiguration());
+    blockcaches.add(lru);
+
+    // bucket cache
+    FileSystem.get(conf).mkdirs(TEST_UTIL.getDataTestDir());
+    int[] bucketSizes = {INDEX_BLOCK_SIZE, DATA_BLOCK_SIZE, BLOOM_BLOCK_SIZE, 64 * 1024 };
+    BlockCache bucketcache =
+        new BucketCache("file:" + TEST_UTIL.getDataTestDir() + "/bucket.data",
+            128 * 1024 * 1024, 64 * 1024, bucketSizes, 5, 64 * 100, null);
+    blockcaches.add(bucketcache);
+    return blockcaches;
+  }
+
   @Parameters
-  public static Collection<Object[]> getParameters() {
+  public static Collection<Object[]> getParameters() throws IOException {
     List<Object[]> cowTypes = new ArrayList<Object[]>();
-    for (CacheOnWriteType cowType : CacheOnWriteType.values()) {
-      for (Compression.Algorithm compress :
-           HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
-        for (BlockEncoderTestType encoderType :
-             BlockEncoderTestType.values()) {
-          for (boolean cacheCompressedData : new boolean[] { false, true }) {
-            cowTypes.add(new Object[] { cowType, compress, encoderType, cacheCompressedData
});
+    for (BlockCache blockache : getBlockCaches()) {
+      for (CacheOnWriteType cowType : CacheOnWriteType.values()) {
+        for (Compression.Algorithm compress : HBaseTestingUtility.COMPRESSION_ALGORITHMS)
{
+          for (BlockEncoderTestType encoderType : BlockEncoderTestType.values()) {
+            for (boolean cacheCompressedData : new boolean[] { false, true }) {
+              cowTypes.add(new Object[] { cowType, compress, encoderType, cacheCompressedData,
blockache});
+            }
           }
         }
       }
@@ -195,17 +218,13 @@ public class TestCacheOnWrite {
     conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, INDEX_BLOCK_SIZE);
     conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE,
         BLOOM_BLOCK_SIZE);
-    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY,
-      cowType.shouldBeCached(BlockType.DATA));
-    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY,
-        cowType.shouldBeCached(BlockType.LEAF_INDEX));
-    conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY,
-        cowType.shouldBeCached(BlockType.BLOOM_CHUNK));
     conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, cacheCompressedData);
     cowType.modifyConf(conf);
     fs = HFileSystem.get(conf);
-    cacheConf = new CacheConfig(conf);
-    blockCache = cacheConf.getBlockCache();
+    cacheConf =
+        new CacheConfig(blockCache, true, true, cowType.shouldBeCached(BlockType.DATA),
+        cowType.shouldBeCached(BlockType.LEAF_INDEX),
+        cowType.shouldBeCached(BlockType.BLOOM_CHUNK), false, cacheCompressedData, true,
false);
   }
 
   @After
@@ -309,6 +328,11 @@ public class TestCacheOnWrite {
       assertEquals("{" + cachedDataBlockType
           + "=1379, LEAF_INDEX=154, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=18}", countByType);
     }
+
+    // iterate all the keyvalue from hfile
+    while (scanner.next()) {
+      Cell cell = scanner.getKeyValue();
+    }
     reader.close();
   }
 


Mime
View raw message