hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aengin...@apache.org
Subject [1/2] hadoop git commit: HDFS-11196. Ozone: Improve logging and error handling in the container layer. Contributed by Anu Engineer.
Date Wed, 08 Mar 2017 17:41:47 GMT
Repository: hadoop
Updated Branches:
  refs/heads/HDFS-7240 dac2b9e7d -> 407412363


http://git-wip-us.apache.org/repos/asf/hadoop/blob/40741236/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java
index b063085..f3ae105 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java
@@ -23,6 +23,7 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.server.datanode.StorageLocation;
 import org.apache.hadoop.hdfs.util.RwLock;
+import org.apache.hadoop.scm.container.common.helpers.StorageContainerException;
 import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
 import org.apache.hadoop.ozone.protocol.proto
     .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
@@ -43,7 +44,7 @@ public interface ContainerManager extends RwLock {
    *
    * @param config        - Configuration.
    * @param containerDirs - List of Metadata Container locations.
-   * @throws IOException
+   * @throws StorageContainerException
    */
   void init(Configuration config, List<StorageLocation> containerDirs)
       throws IOException;
@@ -53,20 +54,20 @@ public interface ContainerManager extends RwLock {
    *
    * @param pipeline      -- Nodes which make up this container.
    * @param containerData - Container Name and metadata.
-   * @throws IOException
+   * @throws StorageContainerException
    */
   void createContainer(Pipeline pipeline, ContainerData containerData)
-      throws IOException;
+      throws StorageContainerException;
 
   /**
    * Deletes an existing container.
    *
    * @param pipeline      - nodes that make this container.
    * @param containerName - name of the container.
-   * @throws IOException
+   * @throws StorageContainerException
    */
   void deleteContainer(Pipeline pipeline, String containerName)
-      throws IOException;
+      throws StorageContainerException;
 
   /**
    * As simple interface for container Iterations.
@@ -75,25 +76,26 @@ public interface ContainerManager extends RwLock {
    * @param count   - how many to return
    * @param prevKey - Previous key - Server returns results from this point.
    * @param data    - Actual containerData
-   * @throws IOException
+   * @throws StorageContainerException
    */
   void listContainer(String prefix, long count, String prevKey,
                      List<ContainerData> data)
-      throws IOException;
+      throws StorageContainerException;
 
   /**
    * Get metadata about a specific container.
    *
    * @param containerName - Name of the container
    * @return ContainerData - Container Data.
-   * @throws IOException
+   * @throws StorageContainerException
    */
-  ContainerData readContainer(String containerName) throws IOException;
+  ContainerData readContainer(String containerName)
+      throws StorageContainerException;
 
   /**
    * Supports clean shutdown of container.
    *
-   * @throws IOException
+   * @throws StorageContainerException
    */
   void shutdown() throws IOException;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40741236/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/KeyManager.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/KeyManager.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/KeyManager.java
index c6d51e3..c8cc182 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/KeyManager.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/KeyManager.java
@@ -17,10 +17,10 @@
  */
 package org.apache.hadoop.ozone.container.common.interfaces;
 
+import org.apache.hadoop.scm.container.common.helpers.StorageContainerException;
 import org.apache.hadoop.ozone.container.common.helpers.KeyData;
 import org.apache.hadoop.scm.container.common.helpers.Pipeline;
 
-import java.io.IOException;
 import java.util.List;
 
 /**
@@ -32,24 +32,28 @@ public interface KeyManager {
    *
    * @param pipeline - Pipeline.
    * @param data     - Key Data.
+   * @throws StorageContainerException
    */
-  void putKey(Pipeline pipeline, KeyData data) throws IOException;
+  void putKey(Pipeline pipeline, KeyData data) throws StorageContainerException;
 
   /**
    * Gets an existing key.
    *
    * @param data - Key Data.
    * @return Key Data.
+   * @throws StorageContainerException
    */
-  KeyData getKey(KeyData data) throws IOException;
+  KeyData getKey(KeyData data) throws StorageContainerException;
 
   /**
    * Deletes an existing Key.
    *
    * @param pipeline - Pipeline.
    * @param keyName  Key Data.
+   * @throws StorageContainerException
    */
-  void deleteKey(Pipeline pipeline, String keyName) throws IOException;
+  void deleteKey(Pipeline pipeline, String keyName)
+      throws StorageContainerException;
 
   /**
    * List keys in a container.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40741236/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/container/ContainerTestHelper.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/container/ContainerTestHelper.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/container/ContainerTestHelper.java
index 0a6ea02..213494f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/container/ContainerTestHelper.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/container/ContainerTestHelper.java
@@ -44,10 +44,16 @@ import java.util.UUID;
 /**
  * Helpers for container tests.
  */
-public class ContainerTestHelper {
+public final class ContainerTestHelper {
   private static Random r = new Random();
 
   /**
+   * Never constructed.
+   */
+  private ContainerTestHelper() {
+  }
+
+  /**
    * Create a pipeline with single node replica.
    *
    * @return Pipeline with single node in it.
@@ -291,8 +297,7 @@ public class ContainerTestHelper {
    * @return ContainerCommandRequestProto.
    */
   public static ContainerCommandResponseProto
-  getCreateContainerResponse(ContainerCommandRequestProto request) throws
-      IOException {
+      getCreateContainerResponse(ContainerCommandRequestProto request) {
     ContainerProtos.CreateContainerResponseProto.Builder createResponse =
         ContainerProtos.CreateContainerResponseProto.newBuilder();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40741236/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerPersistence.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerPersistence.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerPersistence.java
index 9dfee3d..36f5426 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerPersistence.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerPersistence.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.ozone.MiniOzoneCluster;
 import org.apache.hadoop.ozone.OzoneConfigKeys;
 import org.apache.hadoop.ozone.OzoneConfiguration;
 import org.apache.hadoop.ozone.OzoneConsts;
+import org.apache.hadoop.scm.container.common.helpers.StorageContainerException;
 import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo;
 import org.apache.hadoop.ozone.container.common.helpers.ContainerData;
 import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
@@ -121,8 +122,8 @@ public class TestContainerPersistence {
 
   @Before
   public void setupPaths() throws IOException {
-    if (!new File(path).exists()) {
-      new File(path).mkdirs();
+    if (!new File(path).exists() && !new File(path).mkdirs()) {
+      throw new IOException("Unable to create paths. " + path);
     }
     pathLists.clear();
     containerManager.getContainerMap().clear();
@@ -157,11 +158,8 @@ public class TestContainerPersistence {
     Assert.assertTrue(new File(status.getContainer().getContainerPath())
         .exists());
 
-    String containerPathString = ContainerUtils.getContainerNameFromFile(new
-        File(status.getContainer().getContainerPath()));
-
     Path meta = Paths.get(status.getContainer().getDBPath()).getParent();
-    Assert.assertTrue(Files.exists(meta));
+    Assert.assertTrue(meta != null && Files.exists(meta));
 
 
     String dbPath = status.getContainer().getDBPath();
@@ -363,8 +361,7 @@ public class TestContainerPersistence {
         sha.update(FileUtils.readFileToByteArray(fname.toFile()));
         String val = Hex.encodeHexString(sha.digest());
         Assert.assertEquals(fileHashMap.get(fname.getFileName().toString())
-                .getChecksum(),
-            val);
+                .getChecksum(), val);
         count++;
         sha.reset();
       }
@@ -486,7 +483,7 @@ public class TestContainerPersistence {
     setDataChecksum(info, data);
     chunkManager.writeChunk(pipeline, keyName, info, data);
     chunkManager.deleteChunk(pipeline, keyName, info);
-    exception.expect(IOException.class);
+    exception.expect(StorageContainerException.class);
     exception.expectMessage("Unable to find the chunk file.");
     chunkManager.readChunk(pipeline, keyName, info);
   }
@@ -572,7 +569,7 @@ public class TestContainerPersistence {
     keyData.setChunks(chunkList);
     keyManager.putKey(pipeline, keyData);
     keyManager.deleteKey(pipeline, keyName);
-    exception.expect(IOException.class);
+    exception.expect(StorageContainerException.class);
     exception.expectMessage("Unable to find the key.");
     keyManager.getKey(keyData);
   }
@@ -596,7 +593,7 @@ public class TestContainerPersistence {
     keyData.setChunks(chunkList);
     keyManager.putKey(pipeline, keyData);
     keyManager.deleteKey(pipeline, keyName);
-    exception.expect(IOException.class);
+    exception.expect(StorageContainerException.class);
     exception.expectMessage("Unable to find the key.");
     keyManager.deleteKey(pipeline, keyName);
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40741236/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/container/transport/server/TestContainerServer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/container/transport/server/TestContainerServer.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/container/transport/server/TestContainerServer.java
index 6b9e266..8606915 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/container/transport/server/TestContainerServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/container/transport/server/TestContainerServer.java
@@ -41,6 +41,9 @@ import java.io.IOException;
 
 import static org.mockito.Mockito.mock;
 
+/**
+ * Test Containers.
+ */
 public class TestContainerServer {
 
   @Test
@@ -69,8 +72,8 @@ public class TestContainerServer {
     XceiverClient client = null;
     String containerName = OzoneUtils.getRequestID();
     try {
-      Pipeline pipeline = ContainerTestHelper.createSingleNodePipeline
-          (containerName);
+      Pipeline pipeline = ContainerTestHelper.createSingleNodePipeline(
+          containerName);
       OzoneConfiguration conf = new OzoneConfiguration();
       conf.setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT,
           pipeline.getLeader().getContainerPort());
@@ -102,8 +105,8 @@ public class TestContainerServer {
     String containerName = OzoneUtils.getRequestID();
 
     try {
-      Pipeline pipeline = ContainerTestHelper.createSingleNodePipeline
-          (containerName);
+      Pipeline pipeline = ContainerTestHelper.createSingleNodePipeline(
+          containerName);
       OzoneConfiguration conf = new OzoneConfiguration();
       conf.setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT,
           pipeline.getLeader().getContainerPort());
@@ -136,17 +139,16 @@ public class TestContainerServer {
     }
   }
 
-  private class TestContainerDispatcher implements ContainerDispatcher {
+  private static class TestContainerDispatcher implements ContainerDispatcher {
     /**
      * Dispatches commands to container layer.
      *
      * @param msg - Command Request
      * @return Command Response
-     * @throws IOException
      */
     @Override
     public ContainerCommandResponseProto
-    dispatch(ContainerCommandRequestProto msg) throws IOException {
+        dispatch(ContainerCommandRequestProto msg)  {
       return ContainerTestHelper.getCreateContainerResponse(msg);
     }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40741236/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/scm/TestAllocateContainer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/scm/TestAllocateContainer.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/scm/TestAllocateContainer.java
index c310647..58d51a2 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/scm/TestAllocateContainer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/scm/TestAllocateContainer.java
@@ -49,6 +49,8 @@ public class TestAllocateContainer {
     conf = new OzoneConfiguration();
     cluster = new MiniOzoneCluster.Builder(conf).numDataNodes(1)
         .setHandlerType("distributed").build();
+    storageContainerLocationClient =
+        cluster.createStorageContainerLocationClient();
   }
 
   @AfterClass
@@ -61,8 +63,6 @@ public class TestAllocateContainer {
 
   @Test
   public void testAllocate() throws Exception {
-    storageContainerLocationClient =
-        cluster.createStorageContainerLocationClient();
     Pipeline pipeline = storageContainerLocationClient.allocateContainer(
         "container0");
     Assert.assertNotNull(pipeline);
@@ -72,8 +72,6 @@ public class TestAllocateContainer {
 
   @Test
   public void testAllocateNull() throws Exception {
-    storageContainerLocationClient =
-        cluster.createStorageContainerLocationClient();
     thrown.expect(NullPointerException.class);
     storageContainerLocationClient.allocateContainer(null);
   }
@@ -81,12 +79,9 @@ public class TestAllocateContainer {
   @Test
   public void testAllocateDuplicate() throws Exception {
     String containerName = RandomStringUtils.randomAlphanumeric(10);
-    storageContainerLocationClient =
-        cluster.createStorageContainerLocationClient();
     thrown.expect(IOException.class);
     thrown.expectMessage("Specified container already exists");
     storageContainerLocationClient.allocateContainer(containerName);
     storageContainerLocationClient.allocateContainer(containerName);
-
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40741236/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSmallFile.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSmallFile.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSmallFile.java
new file mode 100644
index 0000000..f5871cd
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSmallFile.java
@@ -0,0 +1,131 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.scm;
+
+import org.apache.hadoop.hdfs.ozone.protocol.proto.ContainerProtos;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.ozone.MiniOzoneCluster;
+import org.apache.hadoop.ozone.OzoneConfiguration;
+import org.apache.hadoop.scm.protocolPB
+    .StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.scm.XceiverClient;
+import org.apache.hadoop.scm.XceiverClientManager;
+import org.apache.hadoop.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.scm.container.common.helpers.StorageContainerException;
+import org.apache.hadoop.scm.storage.ContainerProtocolCalls;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.IOException;
+import java.util.UUID;
+
+/**
+ * Test Container calls.
+ */
+public class TestContainerSmallFile {
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
+  private static MiniOzoneCluster cluster;
+  private static OzoneConfiguration ozoneConfig;
+  private static StorageContainerLocationProtocolClientSideTranslatorPB
+      storageContainerLocationClient;
+  private static XceiverClientManager xceiverClientManager;
+
+  @BeforeClass
+  public static void init() throws IOException {
+    ozoneConfig = new OzoneConfiguration();
+    cluster = new MiniOzoneCluster.Builder(ozoneConfig)
+        .numDataNodes(1).setHandlerType("distributed").build();
+    storageContainerLocationClient = cluster
+        .createStorageContainerLocationClient();
+    xceiverClientManager = new XceiverClientManager(ozoneConfig);
+  }
+
+  @AfterClass
+  public static void shutdown() throws InterruptedException {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+    IOUtils.cleanup(null, storageContainerLocationClient, cluster);
+  }
+
+  @Test
+  public void testAllocateWrite() throws Exception {
+    String traceID = UUID.randomUUID().toString();
+    String containerName = "container0";
+    Pipeline pipeline =
+        storageContainerLocationClient.allocateContainer(containerName);
+    XceiverClient client = xceiverClientManager.acquireClient(pipeline);
+    ContainerProtocolCalls.createContainer(client, traceID);
+
+    ContainerProtocolCalls.writeSmallFile(client, containerName,
+        "key", "data123".getBytes(), traceID);
+    ContainerProtos.GetSmallFileResponseProto response =
+        ContainerProtocolCalls.readSmallFile(client, containerName, "key",
+            traceID);
+    String readData = response.getData().getData().toStringUtf8();
+    Assert.assertEquals("data123", readData);
+  }
+
+  @Test
+  public void testInvalidKeyRead() throws Exception {
+    String traceID = UUID.randomUUID().toString();
+    String containerName = "container1";
+    Pipeline pipeline =
+        storageContainerLocationClient.allocateContainer(containerName);
+    XceiverClient client = xceiverClientManager.acquireClient(pipeline);
+    ContainerProtocolCalls.createContainer(client, traceID);
+
+    thrown.expect(StorageContainerException.class);
+    thrown.expectMessage("Unable to find the key");
+
+    // Try to read a Key Container Name
+    ContainerProtos.GetSmallFileResponseProto response =
+        ContainerProtocolCalls.readSmallFile(client, containerName, "key",
+            traceID);
+  }
+
+  @Test
+  public void testInvalidContainerRead() throws Exception {
+    String traceID = UUID.randomUUID().toString();
+    String invalidName = "invalidName";
+    String containerName = "container2";
+    Pipeline pipeline =
+        storageContainerLocationClient.allocateContainer(containerName);
+    XceiverClient client = xceiverClientManager.acquireClient(pipeline);
+    ContainerProtocolCalls.createContainer(client, traceID);
+    ContainerProtocolCalls.writeSmallFile(client, containerName,
+        "key", "data123".getBytes(), traceID);
+
+
+    thrown.expect(StorageContainerException.class);
+    thrown.expectMessage("Unable to find the container");
+
+    // Try to read a invalid key
+    ContainerProtos.GetSmallFileResponseProto response =
+        ContainerProtocolCalls.readSmallFile(client, invalidName, "key",
+            traceID);
+  }
+}
+
+


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message