hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kih...@apache.org
Subject svn commit: r1556927 - in /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs: CHANGES.txt src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
Date Thu, 09 Jan 2014 19:24:07 GMT
Author: kihwal
Date: Thu Jan  9 19:24:07 2014
New Revision: 1556927

URL: http://svn.apache.org/r1556927
Log:
HDFS-5449. WebHdfs compatibility broken between 2.2 and 1.x / 23.x. Contributed by Kihwal
Lee.

Modified:
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1556927&r1=1556926&r2=1556927&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Thu Jan  9 19:24:07 2014
@@ -739,6 +739,8 @@ Release 2.4.0 - UNRELEASED
     HDFS-5690. DataNode fails to start in secure mode when dfs.http.policy equals to 
     HTTP_ONLY. (Haohui Mai via jing9)
 
+    HDFS-5449. WebHdfs compatibility broken between 2.2 and 1.x / 23.x (kihwal)
+
   BREAKDOWN OF HDFS-2832 SUBTASKS AND RELATED JIRAS
 
     HDFS-4985. Add storage type to the protocol and expose it in block report

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java?rev=1556927&r1=1556926&r2=1556927&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
(original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
Thu Jan  9 19:24:07 2014
@@ -271,7 +271,7 @@ public class JsonUtil {
   }
   
   /** Convert a DatanodeInfo to a Json map. */
-  private static Map<String, Object> toJsonMap(final DatanodeInfo datanodeinfo) {
+  static Map<String, Object> toJsonMap(final DatanodeInfo datanodeinfo) {
     if (datanodeinfo == null) {
       return null;
     }
@@ -279,6 +279,9 @@ public class JsonUtil {
     // TODO: Fix storageID
     final Map<String, Object> m = new TreeMap<String, Object>();
     m.put("ipAddr", datanodeinfo.getIpAddr());
+    // 'name' is equivalent to ipAddr:xferPort. Older clients (1.x, 0.23.x) 
+    // expects this instead of the two fields.
+    m.put("name", datanodeinfo.getXferAddr());
     m.put("hostName", datanodeinfo.getHostName());
     m.put("storageID", datanodeinfo.getDatanodeUuid());
     m.put("xferPort", datanodeinfo.getXferPort());
@@ -325,17 +328,49 @@ public class JsonUtil {
   }
 
   /** Convert a Json map to an DatanodeInfo object. */
-  static DatanodeInfo toDatanodeInfo(final Map<?, ?> m) {
+  static DatanodeInfo toDatanodeInfo(final Map<?, ?> m) 
+    throws IOException {
     if (m == null) {
       return null;
     }
 
+    // ipAddr and xferPort are the critical fields for accessing data.
+    // If any one of the two is missing, an exception needs to be thrown.
+
+    // Handle the case of old servers (1.x, 0.23.x) sending 'name' instead
+    //  of ipAddr and xferPort.
+    String ipAddr = getString(m, "ipAddr", null);
+    int xferPort = getInt(m, "xferPort", -1);
+    if (ipAddr == null) {
+      String name = getString(m, "name", null);
+      if (name != null) {
+        int colonIdx = name.indexOf(':');
+        if (colonIdx > 0) {
+          ipAddr = name.substring(0, colonIdx);
+          xferPort = Integer.parseInt(name.substring(colonIdx +1));
+        } else {
+          throw new IOException(
+              "Invalid value in server response: name=[" + name + "]");
+        }
+      } else {
+        throw new IOException(
+            "Missing both 'ipAddr' and 'name' in server response.");
+      }
+      // ipAddr is non-null & non-empty string at this point.
+    }
+
+    // Check the validity of xferPort.
+    if (xferPort == -1) {
+      throw new IOException(
+          "Invalid or missing 'xferPort' in server response.");
+    }
+
     // TODO: Fix storageID
     return new DatanodeInfo(
-        (String)m.get("ipAddr"),
+        ipAddr,
         (String)m.get("hostName"),
         (String)m.get("storageID"),
-        (int)(long)(Long)m.get("xferPort"),
+        xferPort,
         (int)(long)(Long)m.get("infoPort"),
         getInt(m, "infoSecurePort", 0),
         (int)(long)(Long)m.get("ipcPort"),
@@ -368,7 +403,8 @@ public class JsonUtil {
   }
 
   /** Convert an Object[] to a DatanodeInfo[]. */
-  private static DatanodeInfo[] toDatanodeInfoArray(final Object[] objects) {
+  private static DatanodeInfo[] toDatanodeInfoArray(final Object[] objects) 
+      throws IOException {
     if (objects == null) {
       return null;
     } else if (objects.length == 0) {

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java?rev=1556927&r1=1556926&r2=1556927&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
(original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java
Thu Jan  9 19:24:07 2014
@@ -24,6 +24,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.server.namenode.INodeId;
 import org.apache.hadoop.util.Time;
@@ -61,7 +62,7 @@ public class TestJsonUtil {
   }
   
   @Test
-  public void testToDatanodeInfoWithoutSecurePort() {
+  public void testToDatanodeInfoWithoutSecurePort() throws Exception {
     Map<String, Object> response = new HashMap<String, Object>();
     
     response.put("ipAddr", "127.0.0.1");
@@ -84,4 +85,63 @@ public class TestJsonUtil {
     
     JsonUtil.toDatanodeInfo(response);
   }
+
+  @Test
+  public void testToDatanodeInfoWithName() throws Exception {
+    Map<String, Object> response = new HashMap<String, Object>();
+
+    // Older servers (1.x, 0.23, etc.) sends 'name' instead of ipAddr
+    // and xferPort.
+    String name = "127.0.0.1:1004";
+    response.put("name", name);
+    response.put("hostName", "localhost");
+    response.put("storageID", "fake-id");
+    response.put("infoPort", 1338l);
+    response.put("ipcPort", 1339l);
+    response.put("capacity", 1024l);
+    response.put("dfsUsed", 512l);
+    response.put("remaining", 512l);
+    response.put("blockPoolUsed", 512l);
+    response.put("lastUpdate", 0l);
+    response.put("xceiverCount", 4096l);
+    response.put("networkLocation", "foo.bar.baz");
+    response.put("adminState", "NORMAL");
+    response.put("cacheCapacity", 123l);
+    response.put("cacheUsed", 321l);
+
+    DatanodeInfo di = JsonUtil.toDatanodeInfo(response);
+    Assert.assertEquals(name, di.getXferAddr());
+
+    // The encoded result should contain name, ipAddr and xferPort.
+    Map<String, Object> r = JsonUtil.toJsonMap(di);
+    Assert.assertEquals(name, (String)r.get("name"));
+    Assert.assertEquals("127.0.0.1", (String)r.get("ipAddr"));
+    // In this test, it is Integer instead of Long since json was not actually
+    // involved in constructing the map.
+    Assert.assertEquals(1004, (int)(Integer)r.get("xferPort"));
+
+    // Invalid names
+    String[] badNames = {"127.0.0.1", "127.0.0.1:", ":", "127.0.0.1:sweet", ":123"};
+    for (String badName : badNames) {
+      response.put("name", badName);
+      checkDecodeFailure(response);
+    }
+
+    // Missing both name and ipAddr
+    response.remove("name");
+    checkDecodeFailure(response);
+
+    // Only missing xferPort
+    response.put("ipAddr", "127.0.0.1");
+    checkDecodeFailure(response);
+  }
+
+  private void checkDecodeFailure(Map<String, Object> map) {
+    try {
+      JsonUtil.toDatanodeInfo(map);
+      Assert.fail("Exception not thrown against bad input.");
+    } catch (Exception e) {
+      // expected
+    }
+  }
 }



Mime
View raw message