hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From asur...@apache.org
Subject [30/50] [abbrv] hadoop git commit: HDFS-8807. dfs.datanode.data.dir does not handle spaces between storageType and URI correctly. Contributed by Anu Engineer
Date Thu, 26 Nov 2015 19:52:40 GMT
HDFS-8807.  dfs.datanode.data.dir does not handle spaces between storageType and URI correctly.
 Contributed by Anu Engineer


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/78ec38b2
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/78ec38b2
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/78ec38b2

Branch: refs/heads/yarn-2877
Commit: 78ec38b2ede8bdf3874b2ae051af9580007a9ba1
Parents: f5acf94
Author: Tsz-Wo Nicholas Sze <szetszwo@hortonworks.com>
Authored: Tue Nov 24 16:01:55 2015 -0800
Committer: Tsz-Wo Nicholas Sze <szetszwo@hortonworks.com>
Committed: Tue Nov 24 16:01:55 2015 -0800

----------------------------------------------------------------------
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt     |  3 ++
 .../hdfs/server/datanode/StorageLocation.java   |  2 +-
 .../hdfs/server/datanode/TestDataDirs.java      | 29 ++++++++++++++------
 3 files changed, 25 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/78ec38b2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 92897b9..db49e54 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -1681,6 +1681,9 @@ Release 2.8.0 - UNRELEASED
     HDFS-9314. Improve BlockPlacementPolicyDefault's picking of excess
     replicas. (Xiao Chen via mingma)
 
+    HDFS-8807.  dfs.datanode.data.dir does not handle spaces between
+    storageType and URI correctly.  (Anu Engineer via szetszwo)
+
   OPTIMIZATIONS
 
     HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

http://git-wip-us.apache.org/repos/asf/hadoop/blob/78ec38b2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java
index 7873459..46e8e8a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java
@@ -87,7 +87,7 @@ public class StorageLocation {
 
     if (matcher.matches()) {
       String classString = matcher.group(1);
-      location = matcher.group(2);
+      location = matcher.group(2).trim();
       if (!classString.isEmpty()) {
         storageType =
             StorageType.valueOf(StringUtils.toUpperCase(classString));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/78ec38b2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java
index 396945e..d41c13e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java
@@ -36,7 +36,7 @@ import org.apache.hadoop.hdfs.server.datanode.DataNode.DataNodeDiskChecker;
 
 public class TestDataDirs {
 
-  @Test (timeout = 30000)
+  @Test(timeout = 30000)
   public void testDataDirParsing() throws Throwable {
     Configuration conf = new Configuration();
     List<StorageLocation> locations;
@@ -46,12 +46,16 @@ public class TestDataDirs {
     File dir3 = new File("/dir3");
     File dir4 = new File("/dir4");
 
+    File dir5 = new File("/dir5");
+    File dir6 = new File("/dir6");
     // Verify that a valid string is correctly parsed, and that storage
-    // type is not case-sensitive
-    String locations1 = "[disk]/dir0,[DISK]/dir1,[sSd]/dir2,[disK]/dir3,[ram_disk]/dir4";
+    // type is not case-sensitive and we are able to handle white-space between
+    // storage type and URI.
+    String locations1 = "[disk]/dir0,[DISK]/dir1,[sSd]/dir2,[disK]/dir3," +
+            "[ram_disk]/dir4,[disk]/dir5, [disk] /dir6, [disk] ";
     conf.set(DFS_DATANODE_DATA_DIR_KEY, locations1);
     locations = DataNode.getStorageLocations(conf);
-    assertThat(locations.size(), is(5));
+    assertThat(locations.size(), is(8));
     assertThat(locations.get(0).getStorageType(), is(StorageType.DISK));
     assertThat(locations.get(0).getUri(), is(dir0.toURI()));
     assertThat(locations.get(1).getStorageType(), is(StorageType.DISK));
@@ -62,6 +66,14 @@ public class TestDataDirs {
     assertThat(locations.get(3).getUri(), is(dir3.toURI()));
     assertThat(locations.get(4).getStorageType(), is(StorageType.RAM_DISK));
     assertThat(locations.get(4).getUri(), is(dir4.toURI()));
+    assertThat(locations.get(5).getStorageType(), is(StorageType.DISK));
+    assertThat(locations.get(5).getUri(), is(dir5.toURI()));
+    assertThat(locations.get(6).getStorageType(), is(StorageType.DISK));
+    assertThat(locations.get(6).getUri(), is(dir6.toURI()));
+
+    // not asserting the 8th URI since it is incomplete and it in the
+    // test set to make sure that we don't fail if we get URIs like that.
+    assertThat(locations.get(7).getStorageType(), is(StorageType.DISK));
 
     // Verify that an unrecognized storage type result in an exception.
     String locations2 = "[BadMediaType]/dir0,[ssd]/dir1,[disk]/dir2";
@@ -69,7 +81,7 @@ public class TestDataDirs {
     try {
       locations = DataNode.getStorageLocations(conf);
       fail();
-    } catch(IllegalArgumentException iae) {
+    } catch (IllegalArgumentException iae) {
       DataNode.LOG.info("The exception is expected.", iae);
     }
 
@@ -85,12 +97,13 @@ public class TestDataDirs {
     assertThat(locations.get(1).getUri(), is(dir1.toURI()));
   }
 
-  @Test (timeout = 30000)
+  @Test(timeout = 30000)
   public void testDataDirValidation() throws Throwable {
-    
+
     DataNodeDiskChecker diskChecker = mock(DataNodeDiskChecker.class);
     doThrow(new IOException()).doThrow(new IOException()).doNothing()
-      .when(diskChecker).checkDir(any(LocalFileSystem.class), any(Path.class));
+        .when(diskChecker)
+        .checkDir(any(LocalFileSystem.class), any(Path.class));
     LocalFileSystem fs = mock(LocalFileSystem.class);
     AbstractList<StorageLocation> locations = new ArrayList<StorageLocation>();
 


Mime
View raw message