Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 452F9200CDD for ; Mon, 7 Aug 2017 23:31:30 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 43E5A16623C; Mon, 7 Aug 2017 21:31:30 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 3D3C216623B for ; Mon, 7 Aug 2017 23:31:29 +0200 (CEST) Received: (qmail 66977 invoked by uid 500); 7 Aug 2017 21:31:28 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 66968 invoked by uid 99); 7 Aug 2017 21:31:28 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 07 Aug 2017 21:31:28 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 35E13F1755; Mon, 7 Aug 2017 21:31:28 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: virajith@apache.org To: common-commits@hadoop.apache.org Message-Id: <47b8d4fef5d84de0ae5c227e62f6f133@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: hadoop git commit: HDFS-12093. [READ] Share remoteFS between ProvidedReplica instances. Date: Mon, 7 Aug 2017 21:31:28 +0000 (UTC) archived-at: Mon, 07 Aug 2017 21:31:30 -0000 Repository: hadoop Updated Branches: refs/heads/HDFS-9806 77b671cf4 -> 5c2a0a1c5 HDFS-12093. [READ] Share remoteFS between ProvidedReplica instances. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5c2a0a1c Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5c2a0a1c Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5c2a0a1c Branch: refs/heads/HDFS-9806 Commit: 5c2a0a1c5b3c5eb3be561513aebab39f7f6d30db Parents: 77b671c Author: Virajith Jalaparti Authored: Mon Aug 7 14:31:15 2017 -0700 Committer: Virajith Jalaparti Committed: Mon Aug 7 14:31:15 2017 -0700 ---------------------------------------------------------------------- .../datanode/FinalizedProvidedReplica.java | 6 +++-- .../hdfs/server/datanode/ProvidedReplica.java | 25 +++++++++++--------- .../hdfs/server/datanode/ReplicaBuilder.java | 11 +++++++-- .../fsdataset/impl/ProvidedVolumeImpl.java | 17 +++++++++---- .../datanode/TestProvidedReplicaImpl.java | 2 +- 5 files changed, 40 insertions(+), 21 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c2a0a1c/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/FinalizedProvidedReplica.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/FinalizedProvidedReplica.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/FinalizedProvidedReplica.java index 722d573..e23d6be 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/FinalizedProvidedReplica.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/FinalizedProvidedReplica.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.server.datanode; import java.net.URI; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo; @@ -31,8 +32,9 @@ public class FinalizedProvidedReplica extends ProvidedReplica { public FinalizedProvidedReplica(long blockId, URI fileURI, long fileOffset, long blockLen, long genStamp, - FsVolumeSpi volume, Configuration conf) { - super(blockId, fileURI, fileOffset, blockLen, genStamp, volume, conf); + FsVolumeSpi volume, Configuration conf, FileSystem remoteFS) { + super(blockId, fileURI, fileOffset, blockLen, genStamp, volume, conf, + remoteFS); } @Override http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c2a0a1c/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ProvidedReplica.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ProvidedReplica.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ProvidedReplica.java index 946ab5a..2b3bd13 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ProvidedReplica.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ProvidedReplica.java @@ -65,16 +65,23 @@ public abstract class ProvidedReplica extends ReplicaInfo { * @param volume the volume this block belongs to */ public ProvidedReplica(long blockId, URI fileURI, long fileOffset, - long blockLen, long genStamp, FsVolumeSpi volume, Configuration conf) { + long blockLen, long genStamp, FsVolumeSpi volume, Configuration conf, + FileSystem remoteFS) { super(volume, blockId, blockLen, genStamp); this.fileURI = fileURI; this.fileOffset = fileOffset; this.conf = conf; - try { - this.remoteFS = FileSystem.get(fileURI, this.conf); - } catch (IOException e) { - LOG.warn("Failed to obtain filesystem for " + fileURI); - this.remoteFS = null; + if (remoteFS != null) { + this.remoteFS = remoteFS; + } else { + LOG.warn( + "Creating an reference to the remote FS for provided block " + this); + try { + this.remoteFS = FileSystem.get(fileURI, this.conf); + } catch (IOException e) { + LOG.warn("Failed to obtain filesystem for " + fileURI); + this.remoteFS = null; + } } } @@ -83,11 +90,7 @@ public abstract class ProvidedReplica extends ReplicaInfo { this.fileURI = r.fileURI; this.fileOffset = r.fileOffset; this.conf = r.conf; - try { - this.remoteFS = FileSystem.newInstance(fileURI, this.conf); - } catch (IOException e) { - this.remoteFS = null; - } + this.remoteFS = r.remoteFS; } @Override http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c2a0a1c/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaBuilder.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaBuilder.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaBuilder.java index 639467f..c5cb6a5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaBuilder.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaBuilder.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.server.datanode; import java.io.File; import java.net.URI; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.StorageType; @@ -50,6 +51,7 @@ public class ReplicaBuilder { private long offset; private Configuration conf; private FileRegion fileRegion; + private FileSystem remoteFS; public ReplicaBuilder(ReplicaState state) { volume = null; @@ -138,6 +140,11 @@ public class ReplicaBuilder { return this; } + public ReplicaBuilder setRemoteFS(FileSystem remoteFS) { + this.remoteFS = remoteFS; + return this; + } + public LocalReplicaInPipeline buildLocalReplicaInPipeline() throws IllegalArgumentException { LocalReplicaInPipeline info = null; @@ -275,14 +282,14 @@ public class ReplicaBuilder { } if (fileRegion == null) { info = new FinalizedProvidedReplica(blockId, uri, offset, - length, genStamp, volume, conf); + length, genStamp, volume, conf, remoteFS); } else { info = new FinalizedProvidedReplica(fileRegion.getBlock().getBlockId(), fileRegion.getPath().toUri(), fileRegion.getOffset(), fileRegion.getBlock().getNumBytes(), fileRegion.getBlock().getGenerationStamp(), - volume, conf); + volume, conf, remoteFS); } return info; } http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c2a0a1c/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java index 5cd28c7..d1a7015 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java @@ -28,6 +28,7 @@ import java.util.Map.Entry; import java.util.concurrent.ConcurrentHashMap; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.protocol.Block; @@ -96,7 +97,8 @@ public class ProvidedVolumeImpl extends FsVolumeImpl { } public void getVolumeMap(ReplicaMap volumeMap, - RamDiskReplicaTracker ramDiskReplicaMap) throws IOException { + RamDiskReplicaTracker ramDiskReplicaMap, FileSystem remoteFS) + throws IOException { Iterator iter = provider.iterator(); while (iter.hasNext()) { FileRegion region = iter.next(); @@ -112,9 +114,10 @@ public class ProvidedVolumeImpl extends FsVolumeImpl { .setGenerationStamp(region.getBlock().getGenerationStamp()) .setFsVolume(providedVolume) .setConf(conf) + .setRemoteFS(remoteFS) .build(); - // check if the replica already exists - ReplicaInfo oldReplica = volumeMap.get(bpid, newReplica.getBlockId()); + ReplicaInfo oldReplica = + volumeMap.get(bpid, newReplica.getBlockId()); if (oldReplica == null) { volumeMap.add(bpid, newReplica); bpVolumeMap.add(bpid, newReplica); @@ -163,6 +166,8 @@ public class ProvidedVolumeImpl extends FsVolumeImpl { new ConcurrentHashMap(); private ProvidedVolumeDF df; + //the remote FileSystem to which this ProvidedVolume points to. + private FileSystem remoteFS; ProvidedVolumeImpl(FsDatasetImpl dataset, String storageID, StorageDirectory sd, FileIoProvider fileIoProvider, @@ -176,6 +181,7 @@ public class ProvidedVolumeImpl extends FsVolumeImpl { conf.getClass(DFSConfigKeys.DFS_PROVIDER_DF_CLASS, DefaultProvidedVolumeDF.class, ProvidedVolumeDF.class); df = ReflectionUtils.newInstance(dfClass, conf); + remoteFS = FileSystem.get(baseURI, conf); } @Override @@ -397,7 +403,7 @@ public class ProvidedVolumeImpl extends FsVolumeImpl { throws IOException { LOG.info("Creating volumemap for provided volume " + this); for(ProvidedBlockPoolSlice s : bpSlices.values()) { - s.getVolumeMap(volumeMap, ramDiskReplicaMap); + s.getVolumeMap(volumeMap, ramDiskReplicaMap, remoteFS); } } @@ -414,7 +420,8 @@ public class ProvidedVolumeImpl extends FsVolumeImpl { void getVolumeMap(String bpid, ReplicaMap volumeMap, final RamDiskReplicaTracker ramDiskReplicaMap) throws IOException { - getProvidedBlockPoolSlice(bpid).getVolumeMap(volumeMap, ramDiskReplicaMap); + getProvidedBlockPoolSlice(bpid).getVolumeMap(volumeMap, ramDiskReplicaMap, + remoteFS); } @VisibleForTesting http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c2a0a1c/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestProvidedReplicaImpl.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestProvidedReplicaImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestProvidedReplicaImpl.java index 8258c21..967e94d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestProvidedReplicaImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestProvidedReplicaImpl.java @@ -87,7 +87,7 @@ public class TestProvidedReplicaImpl { FILE_LEN >= (i+1)*BLK_LEN ? BLK_LEN : FILE_LEN - i*BLK_LEN; replicas.add( new FinalizedProvidedReplica(i, providedFile.toURI(), i*BLK_LEN, - currentReplicaLength, 0, null, conf)); + currentReplicaLength, 0, null, conf, null)); } } --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org