hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From inigo...@apache.org
Subject [4/5] hadoop git commit: HDFS-13857. RBF: Choose to enable the default nameservice to read/write files. Contributed by yanghuafeng.
Date Tue, 04 Sep 2018 19:19:50 GMT
HDFS-13857. RBF: Choose to enable the default nameservice to read/write files. Contributed
by yanghuafeng.

(cherry picked from commit 54f2044595206455484284b43e5976c8a1982aaf)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2e744bd3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2e744bd3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2e744bd3

Branch: refs/heads/branch-2
Commit: 2e744bd31d68a8d7aec258a18873f220502d9f54
Parents: 5902c06
Author: Inigo Goiri <inigoiri@apache.org>
Authored: Tue Sep 4 12:17:17 2018 -0700
Committer: Inigo Goiri <inigoiri@apache.org>
Committed: Tue Sep 4 12:18:38 2018 -0700

----------------------------------------------------------------------
 .../federation/resolver/MountTableResolver.java | 39 +++++++++++++++++++-
 .../server/federation/router/RBFConfigKeys.java |  4 ++
 .../federation/router/RouterRpcServer.java      |  2 +-
 .../src/main/resources/hdfs-rbf-default.xml     |  8 ++++
 .../resolver/TestMountTableResolver.java        | 26 +++++++++++++
 5 files changed, 76 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2e744bd3/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/resolver/MountTableResolver.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/resolver/MountTableResolver.java
b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/resolver/MountTableResolver.java
index d45441f..bdd75c7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/resolver/MountTableResolver.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/resolver/MountTableResolver.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.hdfs.server.federation.resolver;
 import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_NAMESERVICES;
 import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DeprecatedKeys.DFS_NAMESERVICE_ID;
 import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.DFS_ROUTER_DEFAULT_NAMESERVICE;
+import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.DFS_ROUTER_DEFAULT_NAMESERVICE_ENABLE;
+import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.DFS_ROUTER_DEFAULT_NAMESERVICE_ENABLE_DEFAULT;
 import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.FEDERATION_MOUNT_TABLE_MAX_CACHE_SIZE;
 import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.FEDERATION_MOUNT_TABLE_MAX_CACHE_SIZE_DEFAULT;
 import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.FEDERATION_MOUNT_TABLE_CACHE_ENABLE;
@@ -95,6 +97,8 @@ public class MountTableResolver
 
   /** Default nameservice when no mount matches the math. */
   private String defaultNameService = "";
+  /** If use default nameservice to read and write files. */
+  private boolean defaultNSEnable = true;
 
   /** Synchronization for both the tree and the cache. */
   private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
@@ -163,6 +167,10 @@ public class MountTableResolver
         DFS_ROUTER_DEFAULT_NAMESERVICE,
         DFSUtil.getNamenodeNameServiceId(conf));
 
+    this.defaultNSEnable = conf.getBoolean(
+        DFS_ROUTER_DEFAULT_NAMESERVICE_ENABLE,
+        DFS_ROUTER_DEFAULT_NAMESERVICE_ENABLE_DEFAULT);
+
     if (defaultNameService == null) {
       LOG.warn(
           "{} and {} is not set. Fallback to {} as the default name service.",
@@ -176,9 +184,12 @@ public class MountTableResolver
     }
 
     if (this.defaultNameService.equals("")) {
+      this.defaultNSEnable = false;
       LOG.warn("Default name service is not set.");
     } else {
-      LOG.info("Default name service: {}", this.defaultNameService);
+      String enable = this.defaultNSEnable ? "enabled" : "disabled";
+      LOG.info("Default name service: {}, {} to read or write",
+          this.defaultNameService, enable);
     }
   }
 
@@ -406,13 +417,17 @@ public class MountTableResolver
    * @param path Path to check/insert.
    * @return New remote location.
    */
-  public PathLocation lookupLocation(final String path) {
+  public PathLocation lookupLocation(final String path) throws IOException {
     PathLocation ret = null;
     MountTable entry = findDeepest(path);
     if (entry != null) {
       ret = buildLocation(path, entry);
     } else {
       // Not found, use default location
+      if (!defaultNSEnable) {
+        throw new IOException("Cannot find locations for " + path + ", " +
+            "because the default nameservice is disabled to read or write");
+      }
       RemoteLocation remoteLocation =
           new RemoteLocation(defaultNameService, path, path);
       List<RemoteLocation> locations =
@@ -623,4 +638,24 @@ public class MountTableResolver
     }
     throw new IOException("localCache is null");
   }
+
+  @VisibleForTesting
+  public String getDefaultNameService() {
+    return defaultNameService;
+  }
+
+  @VisibleForTesting
+  public void setDefaultNameService(String defaultNameService) {
+    this.defaultNameService = defaultNameService;
+  }
+
+  @VisibleForTesting
+  public boolean isDefaultNSEnable() {
+    return defaultNSEnable;
+  }
+
+  @VisibleForTesting
+  public void setDefaultNSEnable(boolean defaultNSRWEnable) {
+    this.defaultNSEnable = defaultNSRWEnable;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2e744bd3/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RBFConfigKeys.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RBFConfigKeys.java
b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RBFConfigKeys.java
index 997e1dd..bbd4250 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RBFConfigKeys.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RBFConfigKeys.java
@@ -42,6 +42,10 @@ public class RBFConfigKeys extends CommonConfigurationKeysPublic {
       "dfs.federation.router.";
   public static final String DFS_ROUTER_DEFAULT_NAMESERVICE =
       FEDERATION_ROUTER_PREFIX + "default.nameserviceId";
+  public static final String DFS_ROUTER_DEFAULT_NAMESERVICE_ENABLE =
+      FEDERATION_ROUTER_PREFIX + "default.nameservice.enable";
+  public static final boolean DFS_ROUTER_DEFAULT_NAMESERVICE_ENABLE_DEFAULT =
+      true;
   public static final String DFS_ROUTER_HANDLER_COUNT_KEY =
       FEDERATION_ROUTER_PREFIX + "handler.count";
   public static final int DFS_ROUTER_HANDLER_COUNT_DEFAULT = 10;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2e744bd3/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java
b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java
index 9c07a9a..3766921 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java
@@ -2096,7 +2096,7 @@ public class RouterRpcServer extends AbstractService
           this.subclusterResolver.getDestinationForPath(path);
       if (location == null) {
         throw new IOException("Cannot find locations for " + path + " in " +
-            this.subclusterResolver);
+            this.subclusterResolver.getClass().getSimpleName());
       }
 
       // We may block some write operations

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2e744bd3/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/resources/hdfs-rbf-default.xml
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/resources/hdfs-rbf-default.xml b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/resources/hdfs-rbf-default.xml
index ed39d4b..3f56043 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/resources/hdfs-rbf-default.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/resources/hdfs-rbf-default.xml
@@ -32,6 +32,14 @@
   </property>
 
   <property>
+    <name>dfs.federation.router.default.nameservice.enable</name>
+    <value>true</value>
+    <description>
+      The default subcluster is enabled to read and write files.
+    </description>
+  </property>
+
+  <property>
     <name>dfs.federation.router.rpc.enable</name>
     <value>true</value>
     <description>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2e744bd3/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/resolver/TestMountTableResolver.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/resolver/TestMountTableResolver.java
b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/resolver/TestMountTableResolver.java
index b19a973..5e3b861 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/resolver/TestMountTableResolver.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/resolver/TestMountTableResolver.java
@@ -21,6 +21,7 @@ import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.FEDE
 import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.FEDERATION_MOUNT_TABLE_MAX_CACHE_SIZE;
 import static org.apache.hadoop.hdfs.server.federation.router.RBFConfigKeys.DFS_ROUTER_DEFAULT_NAMESERVICE;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
@@ -175,6 +176,31 @@ public class TestMountTableResolver {
 
   }
 
+  @Test
+  public void testDefaultNameServiceEnable() throws IOException {
+    assertTrue(mountTable.isDefaultNSEnable());
+    mountTable.setDefaultNameService("3");
+    mountTable.removeEntry("/");
+
+    assertEquals("3->/unknown",
+        mountTable.getDestinationForPath("/unknown").toString());
+
+    Map<String, String> map = getMountTableEntry("4", "/unknown");
+    mountTable.addEntry(MountTable.newInstance("/unknown", map));
+    mountTable.setDefaultNSEnable(false);
+    assertFalse(mountTable.isDefaultNSEnable());
+
+    assertEquals("4->/unknown",
+        mountTable.getDestinationForPath("/unknown").toString());
+    try {
+      mountTable.getDestinationForPath("/");
+      fail("The getDestinationForPath call should fail.");
+    } catch (IOException ioe) {
+      GenericTestUtils.assertExceptionContains(
+          "the default nameservice is disabled to read or write", ioe);
+    }
+  }
+
   private void compareLists(List<String> list1, String[] list2) {
     assertEquals(list1.size(), list2.length);
     for (String item : list2) {


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message