Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 03AB0200BDB for ; Mon, 12 Dec 2016 17:09:05 +0100 (CET) Received: by cust-asf.ponee.io (Postfix) id 018DF160B35; Mon, 12 Dec 2016 16:09:05 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id D7725160B2A for ; Mon, 12 Dec 2016 17:09:03 +0100 (CET) Received: (qmail 77089 invoked by uid 500); 12 Dec 2016 16:08:41 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 71861 invoked by uid 99); 12 Dec 2016 16:08:38 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 12 Dec 2016 16:08:38 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 306E9EC22D; Mon, 12 Dec 2016 16:08:38 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: asuresh@apache.org To: common-commits@hadoop.apache.org Date: Mon, 12 Dec 2016 16:09:17 -0000 Message-Id: In-Reply-To: <9a3c12383adf4760b4b031c80b3b7ab1@git.apache.org> References: <9a3c12383adf4760b4b031c80b3b7ab1@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [41/50] [abbrv] hadoop git commit: Revert "HDFS-11094. Send back HAState along with NamespaceInfo during a versionRequest as an optional parameter. Contributed by Eric Badger" archived-at: Mon, 12 Dec 2016 16:09:05 -0000 Revert "HDFS-11094. Send back HAState along with NamespaceInfo during a versionRequest as an optional parameter. Contributed by Eric Badger" This reverts commit 8c4680852b20ad0e65e77dd123c9ba5bb6f2fa39. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9f8344db Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9f8344db Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9f8344db Branch: refs/heads/YARN-5085 Commit: 9f8344db502b833efe0f2c554b67098e77063d20 Parents: 401c731 Author: Kihwal Lee Authored: Thu Dec 8 15:36:08 2016 -0600 Committer: Kihwal Lee Committed: Thu Dec 8 15:36:08 2016 -0600 ---------------------------------------------------------------------- .../apache/hadoop/hdfs/protocolPB/PBHelper.java | 76 +++++++------------- .../hdfs/server/datanode/BPOfferService.java | 10 +-- .../hdfs/server/datanode/BPServiceActor.java | 4 +- .../hdfs/server/namenode/FSNamesystem.java | 8 +-- .../hdfs/server/protocol/NamespaceInfo.java | 26 ------- .../hadoop-hdfs/src/main/proto/HdfsServer.proto | 2 - .../server/datanode/TestBPOfferService.java | 31 -------- .../hdfs/server/namenode/TestFSNamesystem.java | 21 ------ 8 files changed, 30 insertions(+), 148 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/9f8344db/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java index 1e6d882..78371f5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java @@ -26,7 +26,7 @@ import com.google.protobuf.ByteString; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; -import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceStateProto; +import org.apache.hadoop.ha.proto.HAServiceProtocolProtos; import org.apache.hadoop.hdfs.DFSUtilClient; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.DatanodeID; @@ -338,8 +338,7 @@ public class PBHelper { StorageInfoProto storage = info.getStorageInfo(); return new NamespaceInfo(storage.getNamespceID(), storage.getClusterID(), info.getBlockPoolID(), storage.getCTime(), info.getBuildVersion(), - info.getSoftwareVersion(), info.getCapabilities(), - convert(info.getState())); + info.getSoftwareVersion(), info.getCapabilities()); } public static NamenodeCommand convert(NamenodeCommandProto cmd) { @@ -745,68 +744,43 @@ public class PBHelper { } public static NamespaceInfoProto convert(NamespaceInfo info) { - NamespaceInfoProto.Builder builder = NamespaceInfoProto.newBuilder(); - builder.setBlockPoolID(info.getBlockPoolID()) + return NamespaceInfoProto.newBuilder() + .setBlockPoolID(info.getBlockPoolID()) .setBuildVersion(info.getBuildVersion()) .setUnused(0) .setStorageInfo(PBHelper.convert((StorageInfo)info)) .setSoftwareVersion(info.getSoftwareVersion()) - .setCapabilities(info.getCapabilities()); - HAServiceState state = info.getState(); - if(state != null) { - builder.setState(convert(info.getState())); - } - return builder.build(); + .setCapabilities(info.getCapabilities()) + .build(); } - public static HAServiceState convert(HAServiceStateProto s) { - if (s == null) { - return null; - } - switch (s) { - case INITIALIZING: - return HAServiceState.INITIALIZING; - case ACTIVE: - return HAServiceState.ACTIVE; - case STANDBY: - return HAServiceState.STANDBY; - default: - throw new IllegalArgumentException("Unexpected HAServiceStateProto:" - + s); - } - } - - public static HAServiceStateProto convert(HAServiceState s) { - if (s == null) { - return null; - } - switch (s) { - case INITIALIZING: - return HAServiceStateProto.INITIALIZING; + public static NNHAStatusHeartbeat convert(NNHAStatusHeartbeatProto s) { + if (s == null) return null; + switch (s.getState()) { case ACTIVE: - return HAServiceStateProto.ACTIVE; + return new NNHAStatusHeartbeat(HAServiceState.ACTIVE, s.getTxid()); case STANDBY: - return HAServiceStateProto.STANDBY; + return new NNHAStatusHeartbeat(HAServiceState.STANDBY, s.getTxid()); default: - throw new IllegalArgumentException("Unexpected HAServiceState:" - + s); - } - } - - public static NNHAStatusHeartbeat convert(NNHAStatusHeartbeatProto s) { - if (s == null) { - return null; + throw new IllegalArgumentException("Unexpected NNHAStatusHeartbeat.State:" + s.getState()); } - return new NNHAStatusHeartbeat(convert(s.getState()), s.getTxid()); } public static NNHAStatusHeartbeatProto convert(NNHAStatusHeartbeat hb) { - if (hb == null) { - return null; - } + if (hb == null) return null; NNHAStatusHeartbeatProto.Builder builder = - NNHAStatusHeartbeatProto.newBuilder(); - builder.setState(convert(hb.getState())); + NNHAStatusHeartbeatProto.newBuilder(); + switch (hb.getState()) { + case ACTIVE: + builder.setState(HAServiceProtocolProtos.HAServiceStateProto.ACTIVE); + break; + case STANDBY: + builder.setState(HAServiceProtocolProtos.HAServiceStateProto.STANDBY); + break; + default: + throw new IllegalArgumentException("Unexpected NNHAStatusHeartbeat.State:" + + hb.getState()); + } builder.setTxid(hb.getTxId()); return builder.build(); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/9f8344db/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPOfferService.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPOfferService.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPOfferService.java index 00e6b3e..00102eb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPOfferService.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPOfferService.java @@ -307,16 +307,8 @@ class BPOfferService { * verifies that this namespace matches (eg to prevent a misconfiguration * where a StandbyNode from a different cluster is specified) */ - void verifyAndSetNamespaceInfo(BPServiceActor actor, NamespaceInfo nsInfo) - throws IOException { + void verifyAndSetNamespaceInfo(NamespaceInfo nsInfo) throws IOException { writeLock(); - - if(nsInfo.getState() == HAServiceState.ACTIVE - && bpServiceToActive == null) { - LOG.info("Acknowledging ACTIVE Namenode during handshake" + actor); - bpServiceToActive = actor; - } - try { if (this.bpNSInfo == null) { this.bpNSInfo = nsInfo; http://git-wip-us.apache.org/repos/asf/hadoop/blob/9f8344db/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java index dffe14f..f3247fc 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPServiceActor.java @@ -269,11 +269,11 @@ class BPServiceActor implements Runnable { // First phase of the handshake with NN - get the namespace // info. NamespaceInfo nsInfo = retrieveNamespaceInfo(); - + // Verify that this matches the other NN in this HA pair. // This also initializes our block pool in the DN if we are // the first NN connection for this BP. - bpos.verifyAndSetNamespaceInfo(this, nsInfo); + bpos.verifyAndSetNamespaceInfo(nsInfo); // Second phase of the handshake with the NN. register(nsInfo); http://git-wip-us.apache.org/repos/asf/hadoop/blob/9f8344db/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java index 90fb924..8a750a0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java @@ -1594,7 +1594,7 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, NamespaceInfo unprotectedGetNamespaceInfo() { return new NamespaceInfo(getFSImage().getStorage().getNamespaceID(), getClusterId(), getBlockPoolId(), - getFSImage().getStorage().getCTime(), getState()); + getFSImage().getStorage().getCTime()); } /** @@ -4531,16 +4531,12 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, return 0; } } - + @Metric public int getBlockCapacity() { return blockManager.getCapacity(); } - public HAServiceState getState() { - return haContext == null ? null : haContext.getState().getServiceState(); - } - @Override // FSNamesystemMBean public String getFSState() { return isInSafeMode() ? "safeMode" : "Operational"; http://git-wip-us.apache.org/repos/asf/hadoop/blob/9f8344db/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java index 66ce9ee..90d0aac 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamespaceInfo.java @@ -22,7 +22,6 @@ import java.io.IOException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.common.Storage; import org.apache.hadoop.hdfs.server.common.StorageInfo; @@ -45,7 +44,6 @@ public class NamespaceInfo extends StorageInfo { String blockPoolID = ""; // id of the block pool String softwareVersion; long capabilities; - HAServiceState state; // only authoritative on the server-side to determine advertisement to // clients. enum will update the supported values @@ -90,14 +88,6 @@ public class NamespaceInfo extends StorageInfo { CAPABILITIES_SUPPORTED); } - public NamespaceInfo(int nsID, String clusterID, String bpID, - long cT, String buildVersion, String softwareVersion, - long capabilities, HAServiceState st) { - this(nsID, clusterID, bpID, cT, buildVersion, softwareVersion, - capabilities); - this.state = st; - } - // for use by server and/or client public NamespaceInfo(int nsID, String clusterID, String bpID, long cT, String buildVersion, String softwareVersion, @@ -115,13 +105,6 @@ public class NamespaceInfo extends StorageInfo { this(nsID, clusterID, bpID, cT, Storage.getBuildVersion(), VersionInfo.getVersion()); } - - public NamespaceInfo(int nsID, String clusterID, String bpID, - long cT, HAServiceState st) { - this(nsID, clusterID, bpID, cT, Storage.getBuildVersion(), - VersionInfo.getVersion()); - this.state = st; - } public long getCapabilities() { return capabilities; @@ -132,11 +115,6 @@ public class NamespaceInfo extends StorageInfo { this.capabilities = capabilities; } - @VisibleForTesting - public void setState(HAServiceState state) { - this.state = state; - } - public boolean isCapabilitySupported(Capability capability) { Preconditions.checkArgument(capability != Capability.UNKNOWN, "cannot test for unknown capability"); @@ -156,10 +134,6 @@ public class NamespaceInfo extends StorageInfo { return softwareVersion; } - public HAServiceState getState() { - return state; - } - @Override public String toString(){ return super.toString() + ";bpid=" + blockPoolID; http://git-wip-us.apache.org/repos/asf/hadoop/blob/9f8344db/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HdfsServer.proto ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HdfsServer.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HdfsServer.proto index d7deebf..910e03b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HdfsServer.proto +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HdfsServer.proto @@ -32,7 +32,6 @@ option java_generate_equals_and_hash = true; package hadoop.hdfs; import "hdfs.proto"; -import "HAServiceProtocol.proto"; /** * Block access token information @@ -102,7 +101,6 @@ message NamespaceInfoProto { required StorageInfoProto storageInfo = 4;// Node information required string softwareVersion = 5; // Software version number (e.g. 2.0.0) optional uint64 capabilities = 6 [default = 0]; // feature flags - optional hadoop.common.HAServiceStateProto state = 7; } /** http://git-wip-us.apache.org/repos/asf/hadoop/blob/9f8344db/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBPOfferService.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBPOfferService.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBPOfferService.java index f8f0a3c..2d50c75 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBPOfferService.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBPOfferService.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hdfs.server.datanode; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; @@ -800,34 +799,4 @@ public class TestBPOfferService { } return -1; } - - /* - * - */ - @Test - public void testNNHAStateUpdateFromVersionRequest() throws Exception { - final BPOfferService bpos = setupBPOSForNNs(mockNN1, mockNN2); - BPServiceActor actor = bpos.getBPServiceActors().get(0); - bpos.start(); - waitForInitialization(bpos); - // Should start with neither NN as active. - assertNull(bpos.getActiveNN()); - - // getNamespaceInfo() will not include HAServiceState - NamespaceInfo nsInfo = mockNN1.versionRequest(); - bpos.verifyAndSetNamespaceInfo(actor, nsInfo); - - assertNull(bpos.getActiveNN()); - - // Change mock so getNamespaceInfo() will include HAServiceState - Mockito.doReturn(new NamespaceInfo(1, FAKE_CLUSTERID, FAKE_BPID, 0, - HAServiceState.ACTIVE)).when(mockNN1).versionRequest(); - - // Update the bpos NamespaceInfo - nsInfo = mockNN1.versionRequest(); - bpos.verifyAndSetNamespaceInfo(actor, nsInfo); - - assertNotNull(bpos.getActiveNN()); - - } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/9f8344db/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystem.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystem.java index 6a0dd6f..f02c679 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSNamesystem.java @@ -33,7 +33,6 @@ import java.util.Collection; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.ha.HAServiceProtocol; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; @@ -44,7 +43,6 @@ import org.apache.hadoop.hdfs.server.namenode.ha.HAContext; import org.apache.hadoop.hdfs.server.namenode.ha.HAState; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; import org.apache.hadoop.hdfs.server.namenode.top.TopAuditLogger; -import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.junit.After; import org.junit.Test; import org.mockito.Mockito; @@ -157,25 +155,6 @@ public class TestFSNamesystem { } @Test - public void testHAStateInNamespaceInfo() throws IOException { - Configuration conf = new Configuration(); - - FSEditLog fsEditLog = Mockito.mock(FSEditLog.class); - FSImage fsImage = Mockito.mock(FSImage.class); - Mockito.when(fsImage.getEditLog()).thenReturn(fsEditLog); - NNStorage nnStorage = Mockito.mock(NNStorage.class); - Mockito.when(fsImage.getStorage()).thenReturn(nnStorage); - - FSNamesystem fsNamesystem = new FSNamesystem(conf, fsImage); - FSNamesystem fsn = Mockito.spy(fsNamesystem); - Mockito.when(fsn.getState()).thenReturn( - HAServiceProtocol.HAServiceState.ACTIVE); - - NamespaceInfo nsInfo = fsn.unprotectedGetNamespaceInfo(); - assertNotNull(nsInfo.getState()); - } - - @Test public void testReset() throws Exception { Configuration conf = new Configuration(); FSEditLog fsEditLog = Mockito.mock(FSEditLog.class); --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org