Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 1D16C200CDA for ; Fri, 21 Jul 2017 02:56:01 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 1BC9116C663; Fri, 21 Jul 2017 00:56:01 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 3B26D16C662 for ; Fri, 21 Jul 2017 02:56:00 +0200 (CEST) Received: (qmail 77769 invoked by uid 500); 21 Jul 2017 00:55:57 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 77760 invoked by uid 99); 21 Jul 2017 00:55:57 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 21 Jul 2017 00:55:57 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 98909F3278; Fri, 21 Jul 2017 00:55:57 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: apurtell@apache.org To: commits@hbase.apache.org Date: Fri, 21 Jul 2017 00:55:57 -0000 Message-Id: X-Mailer: ASF-Git Admin Mailer Subject: [01/23] hbase git commit: HBASE-16993 BucketCache throw java.io.IOException: Invalid HFile block magic when configuring hbase.bucketcache.bucket.sizes. [Forced Update!] archived-at: Fri, 21 Jul 2017 00:56:01 -0000 Repository: hbase Updated Branches: refs/heads/HBASE-15631-branch-1 d0c72847a -> f7c378935 (forced update) HBASE-16993 BucketCache throw java.io.IOException: Invalid HFile block magic when configuring hbase.bucketcache.bucket.sizes. Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9036556a Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9036556a Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9036556a Branch: refs/heads/HBASE-15631-branch-1 Commit: 9036556a33c356225814c3ca50ecc09997269ea5 Parents: cfd5b6b Author: anoopsamjohn Authored: Thu Jul 20 23:00:48 2017 +0530 Committer: anoopsamjohn Committed: Thu Jul 20 23:00:48 2017 +0530 ---------------------------------------------------------------------- hbase-common/src/main/resources/hbase-default.xml | 2 +- .../apache/hadoop/hbase/io/hfile/CacheConfig.java | 17 +++++++++++++---- .../hadoop/hbase/io/hfile/TestCacheConfig.java | 14 ++++++++++++++ 3 files changed, 28 insertions(+), 5 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hbase/blob/9036556a/hbase-common/src/main/resources/hbase-default.xml ---------------------------------------------------------------------- diff --git a/hbase-common/src/main/resources/hbase-default.xml b/hbase-common/src/main/resources/hbase-default.xml index 3c4044d..a29e9c8 100644 --- a/hbase-common/src/main/resources/hbase-default.xml +++ b/hbase-common/src/main/resources/hbase-default.xml @@ -848,7 +848,7 @@ possible configurations would overwhelm and obscure the important. A comma-separated list of sizes for buckets for the bucketcache. Can be multiple sizes. List block sizes in order from smallest to largest. The sizes you use will depend on your data access patterns. - Must be a multiple of 1024 else you will run into + Must be a multiple of 256 else you will run into 'java.io.IOException: Invalid HFile block magic' when you go to read from cache. If you specify no values here, then you pick up the default bucketsizes set in code (See BucketAllocator#DEFAULT_BUCKET_SIZES). http://git-wip-us.apache.org/repos/asf/hbase/blob/9036556a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java index 340236b..57d2057 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java @@ -551,7 +551,8 @@ public class CacheConfig { * @return Returns L2 block cache instance (for now it is BucketCache BlockCache all the time) * or null if not supposed to be a L2. */ - private static BlockCache getL2(final Configuration c) { + @VisibleForTesting + static BlockCache getL2(final Configuration c) { final boolean useExternal = c.getBoolean(EXTERNAL_BLOCKCACHE_KEY, EXTERNAL_BLOCKCACHE_DEFAULT); if (LOG.isDebugEnabled()) { LOG.debug("Trying to use " + (useExternal?" External":" Internal") + " l2 cache"); @@ -561,10 +562,8 @@ public class CacheConfig { if (useExternal) { return getExternalBlockcache(c); } - // otherwise use the bucket cache. return getBucketCache(c); - } private static BlockCache getExternalBlockcache(Configuration c) { @@ -618,7 +617,17 @@ public class CacheConfig { if (configuredBucketSizes != null) { bucketSizes = new int[configuredBucketSizes.length]; for (int i = 0; i < configuredBucketSizes.length; i++) { - bucketSizes[i] = Integer.parseInt(configuredBucketSizes[i].trim()); + int bucketSize = Integer.parseInt(configuredBucketSizes[i].trim()); + if (bucketSize % 256 != 0) { + // We need all the bucket sizes to be multiples of 256. Having all the configured bucket + // sizes to be multiples of 256 will ensure that the block offsets within buckets, + // that are calculated, will also be multiples of 256. + // See BucketEntry where offset to each block is represented using 5 bytes (instead of 8 + // bytes long). We would like to save heap overhead as less as possible. + throw new IllegalArgumentException("Illegal value: " + bucketSize + " configured for '" + + BUCKET_CACHE_BUCKETS_KEY + "'. All bucket sizes to be multiples of 256"); + } + bucketSizes[i] = bucketSize; } } BucketCache bucketCache = null; http://git-wip-us.apache.org/repos/asf/hbase/blob/9036556a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java index 0175224..3f60cd7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.io.hfile; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import java.io.IOException; import java.lang.management.ManagementFactory; @@ -336,6 +337,19 @@ public class TestCacheConfig { assertDataBlockCount(lrubc, 1); } + @Test + public void testL2CacheWithInvalidBucketSize() { + Configuration c = new Configuration(this.conf); + c.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap"); + c.set(CacheConfig.BUCKET_CACHE_BUCKETS_KEY, "256,512,1024,2048,4000,4096"); + c.setFloat(HConstants.BUCKET_CACHE_SIZE_KEY, 1024); + try { + CacheConfig.getL2(c); + fail("Should throw IllegalArgumentException when passing illegal value for bucket size"); + } catch (IllegalArgumentException e) { + } + } + private void assertDataBlockCount(final LruBlockCache bc, final int expected) { Map blocks = bc.getBlockTypeCountsForTest(); assertEquals(expected, blocks == null? 0: