From common-commits-return-89937-archive-asf-public=cust-asf.ponee.io@hadoop.apache.org Fri Oct 26 14:51:12 2018 Return-Path: X-Original-To: archive-asf-public@cust-asf.ponee.io Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by mx-eu-01.ponee.io (Postfix) with SMTP id E91FF180647 for ; Fri, 26 Oct 2018 14:51:10 +0200 (CEST) Received: (qmail 2345 invoked by uid 500); 26 Oct 2018 12:51:10 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 2336 invoked by uid 99); 26 Oct 2018 12:51:09 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 26 Oct 2018 12:51:09 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 8537AE0137; Fri, 26 Oct 2018 12:51:09 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: nanda@apache.org To: common-commits@hadoop.apache.org Date: Fri, 26 Oct 2018 12:51:09 -0000 Message-Id: <95b9badd41c945f48cf8b6adfbb0bf62@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [1/4] hadoop git commit: HDDS-694. Plugin new Pipeline management code in SCM. Contributed by Lokesh Jain. Repository: hadoop Updated Branches: refs/heads/trunk e28c00c29 -> dce4ebe81 http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce4ebe8/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java index e260924..52340a9 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java @@ -140,7 +140,7 @@ public class TestStorageContainerManager { if (expectPermissionDenied) { fail("Operation should fail, expecting an IOException here."); } else { - Assert.assertEquals(1, container2.getPipeline().getMachines().size()); + Assert.assertEquals(1, container2.getPipeline().getNodes().size()); } } catch (Exception e) { verifyPermissionDeniedException(e, fakeRemoteUsername); @@ -153,7 +153,7 @@ public class TestStorageContainerManager { if (expectPermissionDenied) { fail("Operation should fail, expecting an IOException here."); } else { - Assert.assertEquals(1, container3.getPipeline().getMachines().size()); + Assert.assertEquals(1, container3.getPipeline().getNodes().size()); } } catch (Exception e) { verifyPermissionDeniedException(e, fakeRemoteUsername); http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce4ebe8/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManagerHelper.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManagerHelper.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManagerHelper.java index 217d3f4..0051ecb 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManagerHelper.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManagerHelper.java @@ -162,9 +162,10 @@ public class TestStorageContainerManagerHelper { .getStorageContainerManager().getClientProtocolServer() .getContainerWithPipeline(containerID); - DatanodeDetails leadDN = containerWithPipeline.getPipeline().getLeader(); + DatanodeDetails dn = + containerWithPipeline.getPipeline().getFirstNode(); OzoneContainer containerServer = - getContainerServerByDatanodeUuid(leadDN.getUuidString()); + getContainerServerByDatanodeUuid(dn.getUuidString()); KeyValueContainerData containerData = (KeyValueContainerData) containerServer.getContainerSet() .getContainer(containerID).getContainerData(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce4ebe8/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rest/TestOzoneRestClient.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rest/TestOzoneRestClient.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rest/TestOzoneRestClient.java index 26ece8b..fe060a6 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rest/TestOzoneRestClient.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rest/TestOzoneRestClient.java @@ -24,7 +24,7 @@ import org.apache.hadoop.hdds.protocol.StorageType; import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; import org.apache.hadoop.hdds.scm.container.ContainerID; -import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.ozone.*; import org.apache.hadoop.ozone.client.*; import org.apache.hadoop.hdds.client.OzoneQuota; @@ -449,7 +449,7 @@ public class TestOzoneRestClient { .getContainerManager().getContainerWithPipeline( ContainerID.valueof(containerID)) .getPipeline(); - List datanodes = pipeline.getMachines(); + List datanodes = pipeline.getNodes(); Assert.assertEquals(datanodes.size(), 1); DatanodeDetails datanodeDetails = datanodes.get(0); http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce4ebe8/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestCloseContainerHandlingByClient.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestCloseContainerHandlingByClient.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestCloseContainerHandlingByClient.java index ee9919d..76f6f8c 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestCloseContainerHandlingByClient.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestCloseContainerHandlingByClient.java @@ -28,7 +28,7 @@ import org.apache.hadoop.hdds.scm.container.common.helpers. StorageContainerException; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; import org.apache.hadoop.hdds.scm.ScmConfigKeys; -import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConsts; @@ -306,7 +306,7 @@ public class TestCloseContainerHandlingByClient { .getContainerWithPipeline(ContainerID.valueof(containerID)) .getPipeline(); pipelineList.add(pipeline); - List datanodes = pipeline.getMachines(); + List datanodes = pipeline.getNodes(); for (DatanodeDetails details : datanodes) { Assert.assertFalse(ContainerTestHelper .isContainerClosed(cluster, containerID, details)); @@ -319,7 +319,7 @@ public class TestCloseContainerHandlingByClient { int index = 0; for (long containerID : containerIdList) { Pipeline pipeline = pipelineList.get(index); - List datanodes = pipeline.getMachines(); + List datanodes = pipeline.getNodes(); for (DatanodeDetails datanodeDetails : datanodes) { GenericTestUtils.waitFor(() -> ContainerTestHelper .isContainerClosed(cluster, containerID, datanodeDetails), 500, @@ -352,7 +352,7 @@ public class TestCloseContainerHandlingByClient { List datanodes = cluster.getStorageContainerManager().getContainerManager() .getContainerWithPipeline(ContainerID.valueof(containerID)) - .getPipeline().getMachines(); + .getPipeline().getNodes(); Assert.assertEquals(1, datanodes.size()); waitForContainerClose(keyName, key, HddsProtos.ReplicationType.STAND_ALONE); dataString = fixedLengthString(keyString, (1 * blockSize)); @@ -455,7 +455,7 @@ public class TestCloseContainerHandlingByClient { List datanodes = cluster.getStorageContainerManager().getContainerManager() .getContainerWithPipeline(ContainerID.valueof(containerID)) - .getPipeline().getMachines(); + .getPipeline().getNodes(); Assert.assertEquals(1, datanodes.size()); // move the container on the datanode to Closing state, this will ensure // closing the key will hit BLOCK_NOT_COMMITTED_EXCEPTION while trying http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce4ebe8/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneRpcClient.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneRpcClient.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneRpcClient.java index d507303..0b51bb3 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneRpcClient.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneRpcClient.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hdds.scm.XceiverClientRatis; import org.apache.hadoop.hdds.scm.XceiverClientSpi; import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline; -import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.ozone.*; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.ozone.client.*; @@ -649,7 +649,7 @@ public class TestOzoneRpcClient { cluster.getStorageContainerManager().getContainerManager() .getContainerWithPipeline(new ContainerID(containerID)); Pipeline pipeline = container.getPipeline(); - List datanodes = pipeline.getMachines(); + List datanodes = pipeline.getNodes(); DatanodeDetails datanodeDetails = datanodes.get(0); Assert.assertNotNull(datanodeDetails); @@ -754,7 +754,7 @@ public class TestOzoneRpcClient { .getContainerManager().getContainerWithPipeline( ContainerID.valueof(containerID)) .getPipeline(); - List datanodes = pipeline.getMachines(); + List datanodes = pipeline.getNodes(); Assert.assertEquals(datanodes.size(), 1); DatanodeDetails datanodeDetails = datanodes.get(0); http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce4ebe8/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ContainerTestHelper.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ContainerTestHelper.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ContainerTestHelper.java index 3969ddd..bde3bc9 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ContainerTestHelper.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ContainerTestHelper.java @@ -21,7 +21,8 @@ package org.apache.hadoop.ozone.container; import com.google.common.base.Preconditions; import org.apache.hadoop.conf.StorageUnit; import org.apache.hadoop.hdds.HddsUtils; -import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineID; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.scm.pipeline.PipelineID; import org.apache.hadoop.ozone.HddsDatanodeService; import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.container.common.impl.ContainerData; @@ -35,14 +36,12 @@ import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos .ContainerCommandRequestProto; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos .ContainerCommandResponseProto; -import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState; import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; -import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.KeyValue; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo; import org.apache.hadoop.ozone.container.common.helpers.BlockData; -import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.junit.Assert; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -128,17 +127,16 @@ public final class ContainerTestHelper { public static Pipeline createPipeline( Iterable ids) throws IOException { Objects.requireNonNull(ids, "ids == null"); - final Iterator i = ids.iterator(); - Preconditions.checkArgument(i.hasNext()); - final DatanodeDetails leader = i.next(); - final Pipeline pipeline = - new Pipeline(leader.getUuidString(), LifeCycleState.OPEN, - ReplicationType.STAND_ALONE, ReplicationFactor.ONE, - PipelineID.randomId()); - pipeline.addMember(leader); - for(; i.hasNext();) { - pipeline.addMember(i.next()); - } + Preconditions.checkArgument(ids.iterator().hasNext()); + List dns = new ArrayList<>(); + ids.forEach(dns::add); + Pipeline pipeline = Pipeline.newBuilder() + .setState(Pipeline.PipelineState.OPEN) + .setId(PipelineID.randomId()) + .setType(HddsProtos.ReplicationType.STAND_ALONE) + .setFactor(ReplicationFactor.ONE) + .setNodes(dns) + .build(); return pipeline; } @@ -218,7 +216,7 @@ public final class ContainerTestHelper { request.setContainerID(blockID.getContainerID()); request.setWriteChunk(writeRequest); request.setTraceID(UUID.randomUUID().toString()); - request.setDatanodeUuid(pipeline.getLeader().getUuidString()); + request.setDatanodeUuid(pipeline.getFirstNode().getUuidString()); return request.build(); } @@ -260,7 +258,7 @@ public final class ContainerTestHelper { request.setContainerID(blockID.getContainerID()); request.setPutSmallFile(smallFileRequest); request.setTraceID(UUID.randomUUID().toString()); - request.setDatanodeUuid(pipeline.getLeader().getUuidString()); + request.setDatanodeUuid(pipeline.getFirstNode().getUuidString()); return request.build(); } @@ -279,7 +277,7 @@ public final class ContainerTestHelper { request.setContainerID(getKey.getGetBlock().getBlockID().getContainerID()); request.setGetSmallFile(smallFileRequest); request.setTraceID(UUID.randomUUID().toString()); - request.setDatanodeUuid(pipeline.getLeader().getUuidString()); + request.setDatanodeUuid(pipeline.getFirstNode().getUuidString()); return request.build(); } @@ -309,7 +307,7 @@ public final class ContainerTestHelper { newRequest.setContainerID(readRequest.getBlockID().getContainerID()); newRequest.setReadChunk(readRequest); newRequest.setTraceID(UUID.randomUUID().toString()); - newRequest.setDatanodeUuid(pipeline.getLeader().getUuidString()); + newRequest.setDatanodeUuid(pipeline.getFirstNode().getUuidString()); return newRequest.build(); } @@ -342,7 +340,7 @@ public final class ContainerTestHelper { request.setContainerID(writeRequest.getBlockID().getContainerID()); request.setDeleteChunk(deleteRequest); request.setTraceID(UUID.randomUUID().toString()); - request.setDatanodeUuid(pipeline.getLeader().getUuidString()); + request.setDatanodeUuid(pipeline.getFirstNode().getUuidString()); return request.build(); } @@ -363,7 +361,7 @@ public final class ContainerTestHelper { request.setCreateContainer( ContainerProtos.CreateContainerRequestProto.getDefaultInstance()); request.setTraceID(UUID.randomUUID().toString()); - request.setDatanodeUuid(pipeline.getLeader().getUuidString()); + request.setDatanodeUuid(pipeline.getFirstNode().getUuidString()); return request.build(); } @@ -398,7 +396,7 @@ public final class ContainerTestHelper { request.setContainerID(containerID); request.setUpdateContainer(updateRequestBuilder.build()); request.setTraceID(UUID.randomUUID().toString()); - request.setDatanodeUuid(pipeline.getLeader().getUuidString()); + request.setDatanodeUuid(pipeline.getFirstNode().getUuidString()); return request.build(); } /** @@ -427,7 +425,8 @@ public final class ContainerTestHelper { * @return - Request */ public static ContainerCommandRequestProto getPutBlockRequest( - Pipeline pipeline, ContainerProtos.WriteChunkRequestProto writeRequest) { + Pipeline pipeline, ContainerProtos.WriteChunkRequestProto writeRequest) + throws IOException { LOG.trace("putBlock: {} to pipeline={}", writeRequest.getBlockID()); @@ -448,7 +447,7 @@ public final class ContainerTestHelper { request.setContainerID(blockData.getContainerID()); request.setPutBlock(putRequest); request.setTraceID(UUID.randomUUID().toString()); - request.setDatanodeUuid(pipeline.getLeader().getUuidString()); + request.setDatanodeUuid(pipeline.getFirstNode().getUuidString()); return request.build(); } @@ -460,7 +459,8 @@ public final class ContainerTestHelper { * immediately. */ public static ContainerCommandRequestProto getBlockRequest( - Pipeline pipeline, ContainerProtos.PutBlockRequestProto putBlockRequest) { + Pipeline pipeline, ContainerProtos.PutBlockRequestProto putBlockRequest) + throws IOException { ContainerProtos.DatanodeBlockID blockID = putBlockRequest.getBlockData().getBlockID(); LOG.trace("getKey: blockID={}", blockID); @@ -475,7 +475,7 @@ public final class ContainerTestHelper { request.setContainerID(blockID.getContainerID()); request.setGetBlock(getRequest); request.setTraceID(UUID.randomUUID().toString()); - request.setDatanodeUuid(pipeline.getLeader().getUuidString()); + request.setDatanodeUuid(pipeline.getFirstNode().getUuidString()); return request.build(); } @@ -499,7 +499,8 @@ public final class ContainerTestHelper { * @return - Request */ public static ContainerCommandRequestProto getDeleteBlockRequest( - Pipeline pipeline, ContainerProtos.PutBlockRequestProto putBlockRequest) { + Pipeline pipeline, ContainerProtos.PutBlockRequestProto putBlockRequest) + throws IOException { ContainerProtos.DatanodeBlockID blockID = putBlockRequest.getBlockData() .getBlockID(); LOG.trace("deleteBlock: name={}", blockID); @@ -512,7 +513,7 @@ public final class ContainerTestHelper { request.setContainerID(blockID.getContainerID()); request.setDeleteBlock(delRequest); request.setTraceID(UUID.randomUUID().toString()); - request.setDatanodeUuid(pipeline.getLeader().getUuidString()); + request.setDatanodeUuid(pipeline.getFirstNode().getUuidString()); return request.build(); } @@ -523,7 +524,7 @@ public final class ContainerTestHelper { * @return ContainerCommandRequestProto. */ public static ContainerCommandRequestProto getCloseContainer( - Pipeline pipeline, long containerID) { + Pipeline pipeline, long containerID) throws IOException { ContainerProtos.ContainerCommandRequestProto cmd = ContainerCommandRequestProto.newBuilder() .setCmdType(ContainerProtos.Type.CloseContainer) @@ -531,7 +532,7 @@ public final class ContainerTestHelper { .setCloseContainer( ContainerProtos.CloseContainerRequestProto.getDefaultInstance()) .setTraceID(UUID.randomUUID().toString()) - .setDatanodeUuid(pipeline.getLeader().getUuidString()) + .setDatanodeUuid(pipeline.getFirstNode().getUuidString()) .build(); return cmd; @@ -544,7 +545,7 @@ public final class ContainerTestHelper { * @return ContainerCommandRequestProto without traceId. */ public static ContainerCommandRequestProto getRequestWithoutTraceId( - Pipeline pipeline, long containerID) { + Pipeline pipeline, long containerID) throws IOException { Preconditions.checkNotNull(pipeline); ContainerProtos.ContainerCommandRequestProto cmd = ContainerCommandRequestProto.newBuilder() @@ -552,7 +553,7 @@ public final class ContainerTestHelper { .setContainerID(containerID) .setCloseContainer( ContainerProtos.CloseContainerRequestProto.getDefaultInstance()) - .setDatanodeUuid(pipeline.getLeader().getUuidString()) + .setDatanodeUuid(pipeline.getFirstNode().getUuidString()) .build(); return cmd; } @@ -563,7 +564,8 @@ public final class ContainerTestHelper { * @return ContainerCommandRequestProto. */ public static ContainerCommandRequestProto getDeleteContainer( - Pipeline pipeline, long containerID, boolean forceDelete) { + Pipeline pipeline, long containerID, boolean forceDelete) + throws IOException { Preconditions.checkNotNull(pipeline); ContainerProtos.DeleteContainerRequestProto deleteRequest = ContainerProtos.DeleteContainerRequestProto.newBuilder(). @@ -575,7 +577,7 @@ public final class ContainerTestHelper { ContainerProtos.DeleteContainerRequestProto.getDefaultInstance()) .setDeleteContainer(deleteRequest) .setTraceID(UUID.randomUUID().toString()) - .setDatanodeUuid(pipeline.getLeader().getUuidString()) + .setDatanodeUuid(pipeline.getFirstNode().getUuidString()) .build(); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce4ebe8/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/TestContainerReplication.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/TestContainerReplication.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/TestContainerReplication.java index d8a7d53..1789e55 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/TestContainerReplication.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/TestContainerReplication.java @@ -36,7 +36,7 @@ import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos .DatanodeBlockID; import org.apache.hadoop.hdds.scm.XceiverClientGrpc; import org.apache.hadoop.hdds.scm.XceiverClientSpi; -import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.ozone.HddsDatanodeService; import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.container.common.helpers.BlockData; @@ -123,7 +123,7 @@ public class TestContainerReplication { cluster.getStorageContainerManager().getScmNodeManager() .addDatanodeCommand(destinationDatanode.getDatanodeDetails().getUuid(), new ReplicateContainerCommand(containerId, - sourcePipelines.getMachines())); + sourcePipelines.getNodes())); Thread.sleep(3000); @@ -163,7 +163,7 @@ public class TestContainerReplication { private HddsDatanodeService chooseDatanodeWithoutContainer(Pipeline pipeline, List dataNodes) { for (HddsDatanodeService datanode : dataNodes) { - if (!pipeline.getMachines().contains(datanode.getDatanodeDetails())) { + if (!pipeline.getNodes().contains(datanode.getDatanodeDetails())) { return datanode; } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce4ebe8/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestCloseContainerHandler.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestCloseContainerHandler.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestCloseContainerHandler.java index 85148e1..360b683 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestCloseContainerHandler.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestCloseContainerHandler.java @@ -31,7 +31,7 @@ import org.apache.hadoop.ozone.container.common.volume.HddsVolume; import org.apache.hadoop.ozone.container.common.volume.VolumeSet; import org.apache.hadoop.ozone.container.keyvalue.KeyValueHandler; import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo; -import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.ratis.thirdparty.com.google.protobuf.ByteString; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -144,7 +144,7 @@ public class TestCloseContainerHandler { request.setContainerID(blockID.getContainerID()); request.setWriteChunk(writeRequest); request.setTraceID(UUID.randomUUID().toString()); - request.setDatanodeUuid(pipeline.getLeader().getUuidString()); + request.setDatanodeUuid(pipeline.getFirstNode().getUuidString()); dispatcher.dispatch(request.build()); chunkList.add(info); } @@ -179,7 +179,7 @@ public class TestCloseContainerHandler { request.setContainerID(blockID.getContainerID()); request.setPutBlock(putBlockRequestProto); request.setTraceID(UUID.randomUUID().toString()); - request.setDatanodeUuid(pipeline.getLeader().getUuidString()); + request.setDatanodeUuid(pipeline.getFirstNode().getUuidString()); dispatcher.dispatch(request.build()); //the open block should be removed from Map @@ -217,7 +217,7 @@ public class TestCloseContainerHandler { request.setDeleteChunk(deleteChunkProto); request.setWriteChunk(writeRequest); request.setTraceID(UUID.randomUUID().toString()); - request.setDatanodeUuid(pipeline.getLeader().getUuidString()); + request.setDatanodeUuid(pipeline.getFirstNode().getUuidString()); dispatcher.dispatch(request.build()); Assert.assertTrue( openContainerBlockMap.getBlockDataMap(testContainerID) @@ -250,7 +250,7 @@ public class TestCloseContainerHandler { request.setCloseContainer( ContainerProtos.CloseContainerRequestProto.getDefaultInstance()); request.setTraceID(UUID.randomUUID().toString()); - request.setDatanodeUuid(pipeline.getLeader().getUuidString()); + request.setDatanodeUuid(pipeline.getFirstNode().getUuidString()); dispatcher.dispatch(request.build()); Assert.assertNull( openContainerBlockMap.getBlockDataMap(testContainerID)); http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce4ebe8/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestCloseContainerByPipeline.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestCloseContainerByPipeline.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestCloseContainerByPipeline.java index 62cc5b2..aada723 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestCloseContainerByPipeline.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestCloseContainerByPipeline.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdds.scm.container.ContainerID; -import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.ozone.HddsDatanodeService; import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.client.ObjectStore; @@ -106,7 +106,7 @@ public class TestCloseContainerByPipeline { .getContainerManager().getContainerWithPipeline( ContainerID.valueof(containerID)) .getPipeline(); - List datanodes = pipeline.getMachines(); + List datanodes = pipeline.getNodes(); Assert.assertEquals(datanodes.size(), 1); DatanodeDetails datanodeDetails = datanodes.get(0); @@ -162,7 +162,7 @@ public class TestCloseContainerByPipeline { .getContainerManager().getContainerWithPipeline( ContainerID.valueof(containerID)) .getPipeline(); - List datanodes = pipeline.getMachines(); + List datanodes = pipeline.getNodes(); Assert.assertEquals(datanodes.size(), 1); DatanodeDetails datanodeDetails = datanodes.get(0); @@ -220,7 +220,7 @@ public class TestCloseContainerByPipeline { .getContainerManager().getContainerWithPipeline( ContainerID.valueof(containerID)) .getPipeline(); - List datanodes = pipeline.getMachines(); + List datanodes = pipeline.getNodes(); Assert.assertEquals(3, datanodes.size()); GenericTestUtils.LogCapturer logCapturer = http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce4ebe8/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestCloseContainerHandler.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestCloseContainerHandler.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestCloseContainerHandler.java index 4cd42ab..f3ce899 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestCloseContainerHandler.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/TestCloseContainerHandler.java @@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.container.common.statemachine.commandhandler; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.scm.container.ContainerID; -import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.client.ObjectStore; import org.apache.hadoop.ozone.client.OzoneClient; http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce4ebe8/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/TestCSMMetrics.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/TestCSMMetrics.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/TestCSMMetrics.java index 2c94f3b..a5a9641 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/TestCSMMetrics.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/TestCSMMetrics.java @@ -33,7 +33,7 @@ import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos .ContainerCommandResponseProto; import org.apache.hadoop.hdds.scm.*; -import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.RatisTestHelper; @@ -93,7 +93,7 @@ public class TestCSMMetrics { final OzoneConfiguration conf = new OzoneConfiguration(); initConf.accept(pipeline, conf); - for (DatanodeDetails dn : pipeline.getMachines()) { + for (DatanodeDetails dn : pipeline.getNodes()) { final XceiverServerSpi s = createServer.apply(dn, conf); servers.add(s); s.start(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce4ebe8/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/metrics/TestContainerMetrics.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/metrics/TestContainerMetrics.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/metrics/TestContainerMetrics.java index aac908d..d4f7ae5 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/metrics/TestContainerMetrics.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/metrics/TestContainerMetrics.java @@ -41,7 +41,7 @@ import org.apache.hadoop.ozone.container.common.impl.HddsDispatcher; import org.apache.hadoop.ozone.container.common.transport.server.XceiverServerGrpc; import org.apache.hadoop.ozone.container.common.volume.VolumeSet; import org.apache.hadoop.hdds.scm.TestUtils; -import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.ozone.container.replication.GrpcReplicationService; import org.apache.hadoop.ozone.container.replication.OnDemandContainerReplicationSource; import org.apache.hadoop.test.GenericTestUtils; @@ -75,7 +75,7 @@ public class TestContainerMetrics { .createSingleNodePipeline(); OzoneConfiguration conf = new OzoneConfiguration(); conf.setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT, - pipeline.getLeader() + pipeline.getFirstNode() .getPort(DatanodeDetails.Port.Name.STANDALONE).getValue()); conf.setInt(DFSConfigKeys.DFS_METRICS_PERCENTILES_INTERVALS_KEY, interval); http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce4ebe8/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java index a3c92fb..f7ba979 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java @@ -28,7 +28,7 @@ import org.apache.hadoop.ozone.container.ContainerTestHelper; import org.apache.hadoop.hdds.scm.TestUtils; import org.apache.hadoop.hdds.scm.XceiverClientGrpc; import org.apache.hadoop.hdds.scm.XceiverClientSpi; -import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -66,7 +66,8 @@ public class TestOzoneContainer { // independently in our test path. Pipeline pipeline = ContainerTestHelper.createSingleNodePipeline(); conf.set(HDDS_DATANODE_DIR_KEY, tempFolder.getRoot().getPath()); - conf.setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT, pipeline.getLeader() + conf.setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT, + pipeline.getFirstNode() .getPort(DatanodeDetails.Port.Name.STANDALONE).getValue()); conf.setBoolean( OzoneConfigKeys.DFS_CONTAINER_IPC_RANDOM_PORT, false); @@ -108,7 +109,7 @@ public class TestOzoneContainer { Pipeline pipeline = ContainerTestHelper.createSingleNodePipeline(); conf.setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT, - pipeline.getLeader() + pipeline.getFirstNode() .getPort(DatanodeDetails.Port.Name.STANDALONE).getValue()); cluster = MiniOzoneCluster.newBuilder(conf) @@ -514,7 +515,7 @@ public class TestOzoneContainer { Pipeline pipeline = ContainerTestHelper.createSingleNodePipeline(); conf.setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT, - pipeline.getLeader() + pipeline.getFirstNode() .getPort(DatanodeDetails.Port.Name.STANDALONE).getValue()); // This client talks to ozone container via datanode. http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce4ebe8/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainerRatis.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainerRatis.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainerRatis.java index 0f5d21f..13e41e2 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainerRatis.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainerRatis.java @@ -25,7 +25,7 @@ import org.apache.hadoop.ozone.RatisTestHelper; import org.apache.hadoop.ozone.container.ContainerTestHelper; import org.apache.hadoop.ozone.web.utils.OzoneUtils; import org.apache.hadoop.hdds.scm.XceiverClientSpi; -import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.ratis.rpc.RpcType; import org.apache.ratis.rpc.SupportedRpcType; import org.apache.ratis.util.CheckedBiConsumer; @@ -88,12 +88,12 @@ public class TestOzoneContainerRatis { // Create Ratis cluster // final String ratisId = "ratis1"; // final PipelineManager manager = RatisManagerImpl.newRatisManager(conf); -// manager.createPipeline(ratisId, pipeline.getMachines()); +// manager.createPipeline(ratisId, pipeline.getNodes()); // LOG.info("Created RatisCluster " + ratisId); // // // check Ratis cluster members // final List dns = manager.getMembers(ratisId); -// Assert.assertEquals(pipeline.getMachines(), dns); +// Assert.assertEquals(pipeline.getNodes(), dns); // // // run test // final XceiverClientSpi client = XceiverClientRatis http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce4ebe8/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/server/TestContainerServer.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/server/TestContainerServer.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/server/TestContainerServer.java index e6ebbf1..33e3e1a 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/server/TestContainerServer.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/server/TestContainerServer.java @@ -44,7 +44,7 @@ import org.apache.hadoop.ozone.web.utils.OzoneUtils; import org.apache.hadoop.hdds.scm.XceiverClientGrpc; import org.apache.hadoop.hdds.scm.XceiverClientRatis; import org.apache.hadoop.hdds.scm.XceiverClientSpi; -import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; +import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.test.GenericTestUtils; import org.apache.ratis.rpc.RpcType; import org.apache.ratis.util.CheckedBiConsumer; @@ -56,7 +56,6 @@ import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.function.BiConsumer; import static org.apache.ratis.rpc.SupportedRpcType.GRPC; import static org.apache.ratis.rpc.SupportedRpcType.NETTY; @@ -80,15 +79,15 @@ public class TestContainerServer { public void testClientServer() throws Exception { DatanodeDetails datanodeDetails = TestUtils.randomDatanodeDetails(); ContainerSet containerSet = new ContainerSet(); - runTestClientServer(1, - (pipeline, conf) -> conf.setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT, - pipeline.getLeader() - .getPort(DatanodeDetails.Port.Name.STANDALONE).getValue()), + runTestClientServer(1, (pipeline, conf) -> conf + .setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT, + pipeline.getFirstNode() + .getPort(DatanodeDetails.Port.Name.STANDALONE).getValue()), XceiverClientGrpc::new, (dn, conf) -> new XceiverServerGrpc(datanodeDetails, conf, new TestContainerDispatcher(), - createReplicationService(containerSet)), - (dn, p) -> {}); + createReplicationService(containerSet)), (dn, p) -> { + }); } @FunctionalInterface @@ -131,7 +130,7 @@ public class TestContainerServer { static void runTestClientServer( int numDatanodes, - BiConsumer initConf, + CheckedBiConsumer initConf, CheckedBiFunction createClient, CheckedBiFunction dns = new ArrayList<>(); + dns.add(datanode); + Pipeline pipeline = Pipeline.newBuilder() + .setState(Pipeline.PipelineState.OPEN) + .setId(PipelineID.randomId()) + .setType(HddsProtos.ReplicationType.STAND_ALONE) + .setFactor(HddsProtos.ReplicationFactor.ONE) + .setNodes(dns) + .build(); return pipeline; } http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce4ebe8/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkContainerStateMap.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkContainerStateMap.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkContainerStateMap.java index f29a5e6..9c0b541 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkContainerStateMap.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/genesis/BenchMarkContainerStateMap.java @@ -19,12 +19,13 @@ package org.apache.hadoop.ozone.genesis; import com.google.common.base.Preconditions; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType; import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.scm.container.ContainerInfo; -import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; -import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineID; +import org.apache.hadoop.hdds.scm.pipeline.Pipeline; +import org.apache.hadoop.hdds.scm.pipeline.PipelineID; import org.apache.hadoop.hdds.scm.container.states.ContainerStateMap; import org.apache.hadoop.hdds.scm.exceptions.SCMException; import org.apache.hadoop.util.Time; @@ -39,7 +40,6 @@ import java.io.IOException; import java.util.UUID; import java.util.List; import java.util.ArrayList; -import java.util.Iterator; import java.util.Objects; import java.util.concurrent.atomic.AtomicInteger; @@ -149,17 +149,16 @@ public class BenchMarkContainerStateMap { public static Pipeline createPipeline(String containerName, Iterable ids) throws IOException { Objects.requireNonNull(ids, "ids == null"); - final Iterator i = ids.iterator(); - Preconditions.checkArgument(i.hasNext()); - final DatanodeDetails leader = i.next(); - final Pipeline pipeline = - new Pipeline(leader.getUuidString(), OPEN, - ReplicationType.STAND_ALONE, ReplicationFactor.ONE, - PipelineID.randomId()); - pipeline.addMember(leader); - for (; i.hasNext();) { - pipeline.addMember(i.next()); - } + Preconditions.checkArgument(ids.iterator().hasNext()); + List dns = new ArrayList<>(); + ids.forEach(dns::add); + final Pipeline pipeline = Pipeline.newBuilder() + .setState(Pipeline.PipelineState.OPEN) + .setId(PipelineID.randomId()) + .setType(HddsProtos.ReplicationType.STAND_ALONE) + .setFactor(HddsProtos.ReplicationFactor.ONE) + .setNodes(dns) + .build(); return pipeline; } http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce4ebe8/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSQLCli.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSQLCli.java b/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSQLCli.java index 922856b..9117838 100644 --- a/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSQLCli.java +++ b/hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSQLCli.java @@ -21,6 +21,7 @@ import org.apache.hadoop.hdds.scm.container.ContainerManager; import org.apache.hadoop.hdds.scm.container.SCMContainerManager; import org.apache.hadoop.hdds.scm.events.SCMEvents; import org.apache.hadoop.hdds.scm.node.NodeManager; +import org.apache.hadoop.hdds.scm.pipeline.PipelineManager; import org.apache.hadoop.hdds.server.events.EventQueue; import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.OzoneConfigKeys; @@ -120,7 +121,10 @@ public class TestContainerSQLCli { cluster.getStorageContainerManager().stop(); eventQueue = new EventQueue(); nodeManager = cluster.getStorageContainerManager().getScmNodeManager(); - containerManager = new SCMContainerManager(conf, nodeManager, eventQueue); + PipelineManager pipelineManager = + cluster.getStorageContainerManager().getPipelineManager(); + containerManager = new SCMContainerManager(conf, nodeManager, + pipelineManager, eventQueue); blockManager = new BlockManagerImpl( conf, nodeManager, containerManager, eventQueue); eventQueue.addHandler(SCMEvents.CHILL_MODE_STATUS, blockManager); --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org