Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 1CFD910ABC for ; Thu, 11 Dec 2014 02:26:14 +0000 (UTC) Received: (qmail 63096 invoked by uid 500); 11 Dec 2014 02:26:13 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 62936 invoked by uid 500); 11 Dec 2014 02:26:13 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 62750 invoked by uid 99); 11 Dec 2014 02:26:13 -0000 Received: from tyr.zones.apache.org (HELO tyr.zones.apache.org) (140.211.11.114) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 11 Dec 2014 02:26:13 +0000 Received: by tyr.zones.apache.org (Postfix, from userid 65534) id 42A61A2530A; Thu, 11 Dec 2014 02:26:13 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: arp@apache.org To: common-commits@hadoop.apache.org Date: Thu, 11 Dec 2014 02:26:14 -0000 Message-Id: In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [2/2] hadoop git commit: HDFS-7475. Make TestLazyPersistFiles#testLazyPersistBlocksAreSaved deterministic. (Contributed by Xiaoyu Yao) HDFS-7475. Make TestLazyPersistFiles#testLazyPersistBlocksAreSaved deterministic. (Contributed by Xiaoyu Yao) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d3980901 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d3980901 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d3980901 Branch: refs/heads/branch-2 Commit: d39809016df7620ef6e1679e75c222fd34524353 Parents: 87b3fc8 Author: arp Authored: Wed Dec 10 18:24:22 2014 -0800 Committer: arp Committed: Wed Dec 10 18:24:50 2014 -0800 ---------------------------------------------------------------------- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 ++ .../fsdataset/impl/LazyPersistTestCase.java | 44 ++++++++++++++++++++ .../fsdataset/impl/TestLazyPersistFiles.java | 32 +------------- 3 files changed, 48 insertions(+), 31 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/d3980901/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index eb0ff86..68271f7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -310,6 +310,9 @@ Release 2.7.0 - UNRELEASED HDFS-5578. [JDK8] Fix Javadoc errors caused by incorrect or illegal tags in doc comments. (Andrew Purtell via wheat9) + HDFS-7475. Make TestLazyPersistFiles#testLazyPersistBlocksAreSaved + deterministic. (Xiaoyu Yao via Arpit Agarwal) + Release 2.6.1 - UNRELEASED INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/d3980901/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/LazyPersistTestCase.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/LazyPersistTestCase.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/LazyPersistTestCase.java index c762849..2de5bb7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/LazyPersistTestCase.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/LazyPersistTestCase.java @@ -50,6 +50,8 @@ import java.io.IOException; import java.util.Arrays; import java.util.EnumSet; import java.util.List; +import java.util.HashSet; +import java.util.Set; import java.util.UUID; import static org.apache.hadoop.fs.CreateFlag.CREATE; @@ -131,6 +133,48 @@ public abstract class LazyPersistTestCase { return locatedBlocks; } + /** + * Make sure at least one non-transient volume has a saved copy of the replica. + * An infinite loop is used to ensure the async lazy persist tasks are completely + * done before verification. Caller of ensureLazyPersistBlocksAreSaved expects + * either a successful pass or timeout failure. + */ + protected final void ensureLazyPersistBlocksAreSaved( + LocatedBlocks locatedBlocks) throws IOException, InterruptedException { + final String bpid = cluster.getNamesystem().getBlockPoolId(); + List volumes = + cluster.getDataNodes().get(0).getFSDataset().getVolumes(); + final Set persistedBlockIds = new HashSet(); + + while (persistedBlockIds.size() < locatedBlocks.getLocatedBlocks().size()) { + // Take 1 second sleep before each verification iteration + Thread.sleep(1000); + + for (LocatedBlock lb : locatedBlocks.getLocatedBlocks()) { + for (FsVolumeSpi v : volumes) { + if (v.isTransientStorage()) { + continue; + } + + FsVolumeImpl volume = (FsVolumeImpl) v; + File lazyPersistDir = volume.getBlockPoolSlice(bpid).getLazypersistDir(); + + long blockId = lb.getBlock().getBlockId(); + File targetDir = + DatanodeUtil.idToBlockDir(lazyPersistDir, blockId); + File blockFile = new File(targetDir, lb.getBlock().getBlockName()); + if (blockFile.exists()) { + // Found a persisted copy for this block and added to the Set + persistedBlockIds.add(blockId); + } + } + } + } + + // We should have found a persisted copy for each located block. + assertThat(persistedBlockIds.size(), is(locatedBlocks.getLocatedBlocks().size())); + } + protected final void makeRandomTestFile(Path path, long length, boolean isLazyPersist, long seed) throws IOException { DFSTestUtil.createFile(fs, path, isLazyPersist, BUFFER_LENGTH, length, http://git-wip-us.apache.org/repos/asf/hadoop/blob/d3980901/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistFiles.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistFiles.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistFiles.java index 771609c..49d3c6d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistFiles.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestLazyPersistFiles.java @@ -304,37 +304,7 @@ public class TestLazyPersistFiles extends LazyPersistTestCase { // Make sure that there is a saved copy of the replica on persistent // storage. - final String bpid = cluster.getNamesystem().getBlockPoolId(); - List volumes = - cluster.getDataNodes().get(0).getFSDataset().getVolumes(); - - final Set persistedBlockIds = new HashSet(); - - // Make sure at least one non-transient volume has a saved copy of - // the replica. - for (FsVolumeSpi v : volumes) { - if (v.isTransientStorage()) { - continue; - } - - FsVolumeImpl volume = (FsVolumeImpl) v; - File lazyPersistDir = volume.getBlockPoolSlice(bpid).getLazypersistDir(); - - for (LocatedBlock lb : locatedBlocks.getLocatedBlocks()) { - File targetDir = DatanodeUtil.idToBlockDir(lazyPersistDir, lb.getBlock().getBlockId()); - File blockFile = new File(targetDir, lb.getBlock().getBlockName()); - if (blockFile.exists()) { - // Found a persisted copy for this block! - boolean added = persistedBlockIds.add(lb.getBlock().getBlockId()); - assertThat(added, is(true)); - } else { - LOG.error(blockFile + " not found"); - } - } - } - - // We should have found a persisted copy for each located block. - assertThat(persistedBlockIds.size(), is(locatedBlocks.getLocatedBlocks().size())); + ensureLazyPersistBlocksAreSaved(locatedBlocks); } /**