Return-Path: X-Original-To: apmail-hbase-commits-archive@www.apache.org Delivered-To: apmail-hbase-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 19E91197DA for ; Wed, 16 Mar 2016 16:34:08 +0000 (UTC) Received: (qmail 67215 invoked by uid 500); 16 Mar 2016 16:34:07 -0000 Delivered-To: apmail-hbase-commits-archive@hbase.apache.org Received: (qmail 67133 invoked by uid 500); 16 Mar 2016 16:34:07 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 66851 invoked by uid 99); 16 Mar 2016 16:34:07 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 16 Mar 2016 16:34:07 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 47B84DFFB9; Wed, 16 Mar 2016 16:34:07 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: mbertozzi@apache.org To: commits@hbase.apache.org Date: Wed, 16 Mar 2016 16:34:13 -0000 Message-Id: <9ee7b231c9a944acb2d5861836aa1369@git.apache.org> In-Reply-To: <347814479900436083b683a9fa359520@git.apache.org> References: <347814479900436083b683a9fa359520@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [7/7] hbase git commit: HBASE-15430 Failed taking snapshot - Manifest proto-message too large (JunHo Cho) HBASE-15430 Failed taking snapshot - Manifest proto-message too large (JunHo Cho) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/46253a43 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/46253a43 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/46253a43 Branch: refs/heads/0.98 Commit: 46253a4388b6d4e3f05d612f09abf944970d8e6a Parents: 3dd3658 Author: Matteo Bertozzi Authored: Wed Mar 16 08:52:02 2016 -0700 Committer: Matteo Bertozzi Committed: Wed Mar 16 09:33:43 2016 -0700 ---------------------------------------------------------------------- .../hadoop/hbase/snapshot/SnapshotManifest.java | 17 ++- .../hbase/snapshot/SnapshotTestingUtils.java | 19 ++- .../hbase/snapshot/TestSnapshotManifest.java | 143 +++++++++++++++++++ 3 files changed, 172 insertions(+), 7 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hbase/blob/46253a43/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java index 38ccf08..f688e79 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java @@ -18,8 +18,10 @@ package org.apache.hadoop.hbase.snapshot; -import java.io.IOException; +import com.google.protobuf.CodedInputStream; + import java.io.FileNotFoundException; +import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -30,7 +32,6 @@ import java.util.concurrent.TimeUnit; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -38,6 +39,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest; @@ -64,7 +66,9 @@ import org.apache.hadoop.hbase.util.Threads; public class SnapshotManifest { private static final Log LOG = LogFactory.getLog(SnapshotManifest.class); - private static final String DATA_MANIFEST_NAME = "data.manifest"; + public static final String SNAPSHOT_MANIFEST_SIZE_LIMIT_CONF_KEY = "snapshot.manifest.size.limit"; + + public static final String DATA_MANIFEST_NAME = "data.manifest"; private List regionManifests; private SnapshotDescription desc; @@ -74,6 +78,7 @@ public class SnapshotManifest { private final Configuration conf; private final Path workingDir; private final FileSystem fs; + private int manifestSizeLimit; private SnapshotManifest(final Configuration conf, final FileSystem fs, final Path workingDir, final SnapshotDescription desc, @@ -83,6 +88,8 @@ public class SnapshotManifest { this.workingDir = workingDir; this.conf = conf; this.fs = fs; + + this.manifestSizeLimit = conf.getInt(SNAPSHOT_MANIFEST_SIZE_LIMIT_CONF_KEY, 64 * 1024 * 1024); } /** @@ -430,7 +437,9 @@ public class SnapshotManifest { FSDataInputStream in = null; try { in = fs.open(new Path(workingDir, DATA_MANIFEST_NAME)); - return SnapshotDataManifest.parseFrom(in); + CodedInputStream cin = CodedInputStream.newInstance(in); + cin.setSizeLimit(manifestSizeLimit); + return SnapshotDataManifest.parseFrom(cin); } catch (FileNotFoundException e) { return null; } finally { http://git-wip-us.apache.org/repos/asf/hbase/blob/46253a43/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java index 9261b78..431b01e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java @@ -545,17 +545,30 @@ public class SnapshotTestingUtils { return createSnapshot(snapshotName, tableName, SnapshotManifestV1.DESCRIPTOR_VERSION); } + public SnapshotBuilder createSnapshotV1(final String snapshotName, final String tableName, + final int numRegions) throws IOException { + return createSnapshot(snapshotName, tableName, numRegions, SnapshotManifestV1.DESCRIPTOR_VERSION); + } + public SnapshotBuilder createSnapshotV2(final String snapshotName, final String tableName) throws IOException { return createSnapshot(snapshotName, tableName, SnapshotManifestV2.DESCRIPTOR_VERSION); } + public SnapshotBuilder createSnapshotV2(final String snapshotName, final String tableName, + final int numRegions) throws IOException { + return createSnapshot(snapshotName, tableName, numRegions, SnapshotManifestV2.DESCRIPTOR_VERSION); + } + private SnapshotBuilder createSnapshot(final String snapshotName, final String tableName, final int version) throws IOException { - HTableDescriptor htd = createHtd(tableName); - htd.addFamily(new HColumnDescriptor(TEST_FAMILY)); + return createSnapshot(snapshotName, tableName, TEST_NUM_REGIONS, version); + } - RegionData[] regions = createTable(htd, TEST_NUM_REGIONS); + private SnapshotBuilder createSnapshot(final String snapshotName, final String tableName, + final int numRegions, final int version) throws IOException { + HTableDescriptor htd = createHtd(tableName); + RegionData[] regions = createTable(htd, numRegions); SnapshotDescription desc = SnapshotDescription.newBuilder() .setTable(htd.getNameAsString()) http://git-wip-us.apache.org/repos/asf/hbase/blob/46253a43/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java new file mode 100644 index 0000000..870bfd9 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java @@ -0,0 +1,143 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.snapshot; + +import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDataManifest; +import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest; +import org.apache.hadoop.hbase.testclassification.MasterTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.ByteStringer; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import java.io.IOException; + +import static org.junit.Assert.fail; + +@Category({MasterTests.class, SmallTests.class}) +public class TestSnapshotManifest { + private final Log LOG = LogFactory.getLog(getClass()); + + private static final String TABLE_NAME_STR = "testSnapshotManifest"; + private static final TableName TABLE_NAME = TableName.valueOf(TABLE_NAME_STR); + private static final int TEST_NUM_REGIONS = 16000; + + private static HBaseTestingUtility TEST_UTIL; + private Configuration conf; + private FileSystem fs; + private Path rootDir; + private Path snapshotDir; + private SnapshotDescription snapshotDesc; + + @Before + public void setup() throws Exception { + TEST_UTIL = HBaseTestingUtility.createLocalHTU(); + + rootDir = TEST_UTIL.getDataTestDir(TABLE_NAME_STR); + fs = TEST_UTIL.getTestFileSystem(); + conf = TEST_UTIL.getConfiguration(); + + SnapshotTestingUtils.SnapshotMock snapshotMock = + new SnapshotTestingUtils.SnapshotMock(conf, fs, rootDir); + SnapshotTestingUtils.SnapshotMock.SnapshotBuilder builder = + snapshotMock.createSnapshotV2("snapshot", TABLE_NAME_STR, 0); + snapshotDir = builder.commit(); + snapshotDesc = builder.getSnapshotDescription(); + + SnapshotDataManifest.Builder dataManifestBuilder = + SnapshotDataManifest.newBuilder(); + byte[] startKey = null; + byte[] stopKey = null; + for (int i = 1; i <= TEST_NUM_REGIONS; i++) { + stopKey = Bytes.toBytes(String.format("%016d", i)); + HRegionInfo regionInfo = new HRegionInfo(TABLE_NAME, startKey, stopKey, false); + SnapshotRegionManifest.Builder dataRegionManifestBuilder = + SnapshotRegionManifest.newBuilder(); + + for (HColumnDescriptor hcd: builder.getTableDescriptor().getFamilies()) { + SnapshotRegionManifest.FamilyFiles.Builder family = + SnapshotRegionManifest.FamilyFiles.newBuilder(); + family.setFamilyName(ByteStringer.wrap(hcd.getName())); + for (int j = 0; j < 100; ++j) { + SnapshotRegionManifest.StoreFile.Builder sfManifest = + SnapshotRegionManifest.StoreFile.newBuilder(); + sfManifest.setName(String.format("%032d", i)); + sfManifest.setFileSize((1 + i) * (1 + i) * 1024); + family.addStoreFiles(sfManifest.build()); + } + dataRegionManifestBuilder.addFamilyFiles(family.build()); + } + + dataRegionManifestBuilder.setRegionInfo(HRegionInfo.convert(regionInfo)); + dataManifestBuilder.addRegionManifests(dataRegionManifestBuilder.build()); + + startKey = stopKey; + } + + dataManifestBuilder.setTableSchema(builder.getTableDescriptor().convert()); + + SnapshotDataManifest dataManifest = dataManifestBuilder.build(); + writeDataManifest(dataManifest); + } + + @After + public void tearDown() throws Exception { + fs.delete(rootDir,true); + } + + @Test + public void testReadSnapshotManifest() throws IOException { + try { + SnapshotManifest.open(conf, fs, snapshotDir, snapshotDesc); + fail("fail to test snapshot manifest because message size is too small."); + } catch (InvalidProtocolBufferException ipbe) { + try { + conf.setInt(SnapshotManifest.SNAPSHOT_MANIFEST_SIZE_LIMIT_CONF_KEY, 128 * 1024 * 1024); + SnapshotManifest.open(conf, fs, snapshotDir, snapshotDesc); + LOG.info("open snapshot manifest succeed."); + } catch (InvalidProtocolBufferException ipbe2) { + fail("fail to take snapshot because Manifest proto-message too large."); + } + } + } + + private void writeDataManifest(final SnapshotDataManifest manifest) + throws IOException { + FSDataOutputStream stream = fs.create(new Path(snapshotDir, SnapshotManifest.DATA_MANIFEST_NAME)); + try { + manifest.writeTo(stream); + } finally { + stream.close(); + } + } +}