Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 8B1E7188E5 for ; Thu, 12 Nov 2015 18:22:14 +0000 (UTC) Received: (qmail 43102 invoked by uid 500); 12 Nov 2015 18:22:13 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 42749 invoked by uid 500); 12 Nov 2015 18:22:13 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 42390 invoked by uid 99); 12 Nov 2015 18:22:13 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 12 Nov 2015 18:22:13 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 49C41E5E2D; Thu, 12 Nov 2015 18:22:13 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: wheat9@apache.org To: common-commits@hadoop.apache.org Date: Thu, 12 Nov 2015 18:22:16 -0000 Message-Id: In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [04/50] [abbrv] hadoop git commit: HDFS-9363. Add fetchReplica to FsDatasetTestUtils to return FsDataset-agnostic replica. (Tony Wu via lei) HDFS-9363. Add fetchReplica to FsDatasetTestUtils to return FsDataset-agnostic replica. (Tony Wu via lei) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/56671292 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/56671292 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/56671292 Branch: refs/heads/HDFS-8707 Commit: 5667129276c3123ecb0a96b78d5897431c47a9d5 Parents: 0fb1867 Author: Lei Xu Authored: Wed Nov 4 10:46:19 2015 -0800 Committer: Lei Xu Committed: Wed Nov 4 10:49:28 2015 -0800 ---------------------------------------------------------------------- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../src/test/java/org/apache/hadoop/hdfs/TestPipelines.java | 6 ++---- .../hadoop/hdfs/server/datanode/FsDatasetTestUtils.java | 7 +++++++ .../datanode/fsdataset/impl/FsDatasetImplTestUtils.java | 5 +++++ .../datanode/fsdataset/impl/TestInterDatanodeProtocol.java | 5 +++-- 5 files changed, 20 insertions(+), 6 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/56671292/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 5f3ff11..ef1152e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -1623,6 +1623,9 @@ Release 2.8.0 - UNRELEASED HDFS-9331. Modify TestNameNodeMXBean#testNameNodeMXBeanInfo() to account for filesystem entirely allocated for DFS use. (Tony Wu via lei) + HDFS-9363. Add fetchReplica() to FsDatasetTestUtils to return FsDataset-agnostic + replica. (Tony Wu via lei) + OPTIMIZATIONS HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than http://git-wip-us.apache.org/repos/asf/hadoop/blob/56671292/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPipelines.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPipelines.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPipelines.java index e4fea60..c9831b0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPipelines.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPipelines.java @@ -33,7 +33,6 @@ import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.datanode.DataNode; -import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils; import org.apache.hadoop.hdfs.server.datanode.Replica; import org.apache.hadoop.test.GenericTestUtils; import org.apache.log4j.Level; @@ -102,10 +101,9 @@ public class TestPipelines { List lb = cluster.getNameNodeRpc().getBlockLocations( filePath.toString(), FILE_SIZE - 1, FILE_SIZE).getLocatedBlocks(); - String bpid = cluster.getNamesystem().getBlockPoolId(); for (DataNode dn : cluster.getDataNodes()) { - Replica r = DataNodeTestUtils.fetchReplicaInfo(dn, bpid, lb.get(0) - .getBlock().getBlockId()); + Replica r = + cluster.getFsDatasetTestUtils(dn).fetchReplica(lb.get(0).getBlock()); assertTrue("Replica on DN " + dn + " shouldn't be null", r != null); assertEquals("Should be RBW replica on " + dn http://git-wip-us.apache.org/repos/asf/hadoop/blob/56671292/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/FsDatasetTestUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/FsDatasetTestUtils.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/FsDatasetTestUtils.java index 40c4438..02af467 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/FsDatasetTestUtils.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/FsDatasetTestUtils.java @@ -206,4 +206,11 @@ public interface FsDatasetTestUtils { * @throws IOException on I/O error. */ void injectCorruptReplica(ExtendedBlock block) throws IOException; + + /** + * Get the replica of a block. Returns null if it does not exist. + * @param block the block whose replica will be returned. + * @return Replica for the block. + */ + Replica fetchReplica(ExtendedBlock block); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/56671292/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImplTestUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImplTestUtils.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImplTestUtils.java index e8e4532..1ce6b11 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImplTestUtils.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImplTestUtils.java @@ -317,4 +317,9 @@ public class FsDatasetImplTestUtils implements FsDatasetTestUtils { } } } + + @Override + public Replica fetchReplica(ExtendedBlock block) { + return dataset.fetchReplicaInfo(block.getBlockPoolId(), block.getBlockId()); + } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/56671292/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestInterDatanodeProtocol.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestInterDatanodeProtocol.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestInterDatanodeProtocol.java index 8581807..b7efdf6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestInterDatanodeProtocol.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestInterDatanodeProtocol.java @@ -46,6 +46,7 @@ import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils; import org.apache.hadoop.hdfs.server.datanode.FinalizedReplica; +import org.apache.hadoop.hdfs.server.datanode.Replica; import org.apache.hadoop.hdfs.server.datanode.ReplicaInfo; import org.apache.hadoop.hdfs.server.datanode.ReplicaUnderRecovery; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; @@ -354,8 +355,8 @@ public class TestInterDatanodeProtocol { new RecoveringBlock(b, null, recoveryid)); //check replica - final ReplicaInfo replica = FsDatasetTestUtil.fetchReplicaInfo( - fsdataset, bpid, b.getBlockId()); + final Replica replica = + cluster.getFsDatasetTestUtils(datanode).fetchReplica(b); Assert.assertEquals(ReplicaState.RUR, replica.getState()); //check meta data before update