hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From whe...@apache.org
Subject svn commit: r1585802 - in /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/
Date Tue, 08 Apr 2014 18:14:19 GMT
Author: wheat9
Date: Tue Apr  8 18:14:19 2014
New Revision: 1585802

URL: http://svn.apache.org/r1585802
Log:
HDFS-6169. Move the address in WebImageViewer. Contributed by Akira Ajisaka.

Modified:
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1585802&r1=1585801&r2=1585802&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Tue Apr  8 18:14:19 2014
@@ -320,6 +320,8 @@ Release 2.5.0 - UNRELEASED
     HDFS-6143. WebHdfsFileSystem open should throw FileNotFoundException for
     non-existing paths. (Gera Shegalov via wheat9)
 
+    HDFS-6169. Move the address in WebImageViewer. (Akira Ajisaka via wheat9)
+
 Release 2.4.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java?rev=1585802&r1=1585801&r2=1585802&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
(original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
Tue Apr  8 18:14:19 2014
@@ -17,7 +17,10 @@
  */
 package org.apache.hadoop.hdfs.tools.offlineImageViewer;
 
+import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.util.List;
+import java.util.Map;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -48,48 +51,81 @@ public class FSImageHandler extends Simp
   @Override
   public void messageReceived(
       ChannelHandlerContext ctx, MessageEvent e) throws Exception {
+    String op = getOp(e);
+    try {
+      String path = getPath(e);
+      handleOperation(op, path, e);
+    } catch (Exception ex) {
+      notFoundResponse(e);
+      LOG.warn(ex.getMessage());
+    } finally {
+      e.getFuture().addListener(ChannelFutureListener.CLOSE);
+    }
+  }
+
+  /** return the op parameter in upper case */
+  private String getOp(MessageEvent e) {
+    Map<String, List<String>> parameters = getDecoder(e).getParameters();
+    if (parameters.containsKey("op")) {
+      return parameters.get("op").get(0).toUpperCase();
+    } else {
+      // return "" to avoid NPE
+      return "";
+    }
+  }
+
+  private String getPath(MessageEvent e) throws FileNotFoundException {
+    String path = getDecoder(e).getPath();
+    // trim "/webhdfs/v1" to keep compatibility with WebHDFS API
+    if (path.startsWith("/webhdfs/v1/")) {
+      return path.replaceFirst("/webhdfs/v1", "");
+    } else {
+      throw new FileNotFoundException("Path: " + path + " should " +
+          "start with \"/webhdfs/v1/\"");
+    }
+  }
+
+  private QueryStringDecoder getDecoder(MessageEvent e) {
     HttpRequest request = (HttpRequest) e.getMessage();
-    if (request.getMethod() == HttpMethod.GET){
-      String uri = request.getUri();
-      QueryStringDecoder decoder = new QueryStringDecoder(uri);
+    return new QueryStringDecoder(request.getUri());
+  }
 
-      String op = "null";
-      if (decoder.getParameters().containsKey("op")) {
-        op = decoder.getParameters().get("op").get(0).toUpperCase();
-      }
-      HttpResponse response = new DefaultHttpResponse(
-          HttpVersion.HTTP_1_1, HttpResponseStatus.OK);
-      String json = null;
+  private void handleOperation(String op, String path, MessageEvent e)
+      throws IOException {
+    HttpRequest request = (HttpRequest) e.getMessage();
+    HttpResponse response = new DefaultHttpResponse(
+        HttpVersion.HTTP_1_1, HttpResponseStatus.OK);
+    response.setHeader(HttpHeaders.Names.CONTENT_TYPE,
+        "application/json");
+    String content = null;
 
+    if (request.getMethod() == HttpMethod.GET){
       if (op.equals("LISTSTATUS")) {
-        try {
-          json = loader.listStatus(decoder.getPath());
-          response.setStatus(HttpResponseStatus.OK);
-          response.setHeader(HttpHeaders.Names.CONTENT_TYPE,
-              "application/json");
-          HttpHeaders.setContentLength(response, json.length());
-        } catch (Exception ex) {
-          LOG.warn(ex.getMessage());
-          response.setStatus(HttpResponseStatus.NOT_FOUND);
-        }
+        content = loader.listStatus(path);
       } else {
         response.setStatus(HttpResponseStatus.BAD_REQUEST);
       }
-
-      e.getChannel().write(response);
-      if (json != null) {
-        e.getChannel().write(json);
-      }
-      LOG.info(response.getStatus().getCode() + " method=GET op=" + op
-          + " target=" + decoder.getPath());
     } else {
       // only HTTP GET is allowed since fsimage is read-only.
-      HttpResponse response = new DefaultHttpResponse(HttpVersion.HTTP_1_1,
-          HttpResponseStatus.METHOD_NOT_ALLOWED);
-      e.getChannel().write(response);
-      LOG.info(response.getStatus().getCode() + " method="
-          + request.getMethod().getName());
+      response.setStatus(HttpResponseStatus.METHOD_NOT_ALLOWED);
+    }
+
+    if (content != null) {
+      HttpHeaders.setContentLength(response, content.length());
+    }
+    e.getChannel().write(response);
+
+    if (content != null) {
+      e.getChannel().write(content);
     }
-    e.getFuture().addListener(ChannelFutureListener.CLOSE);
+
+    LOG.info(response.getStatus().getCode() + " method="
+        + request.getMethod().getName() + " op=" + op + " target=" + path);
+  }
+
+  private void notFoundResponse(MessageEvent e) {
+    HttpResponse response = new DefaultHttpResponse(
+        HttpVersion.HTTP_1_1, HttpResponseStatus.NOT_FOUND);
+    e.getChannel().write(response);
   }
 }

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java?rev=1585802&r1=1585801&r2=1585802&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java
(original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java
Tue Apr  8 18:14:19 2014
@@ -271,7 +271,7 @@ public class FSImageLoader {
     long id = INodeId.ROOT_INODE_ID;
     for (int i = 1; i < nameList.length; i++) {
       long[] children = dirmap.get(id);
-      Preconditions.checkNotNull(children, "The specified path: " +
+      Preconditions.checkNotNull(children, "File: " +
           strPath + " is not found in the fsimage.");
       String cName = nameList[i];
       boolean findChildren = false;
@@ -282,7 +282,7 @@ public class FSImageLoader {
           break;
         }
       }
-      Preconditions.checkArgument(findChildren, "The specified path: " +
+      Preconditions.checkArgument(findChildren, "File: " +
           strPath + " is not found in the fsimage.");
     }
     return id;

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java?rev=1585802&r1=1585801&r2=1585802&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java
(original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java
Tue Apr  8 18:14:19 2014
@@ -29,12 +29,12 @@ import java.io.RandomAccessFile;
 import java.io.StringReader;
 import java.io.StringWriter;
 import java.net.HttpURLConnection;
+import java.net.URI;
+import java.net.URISyntaxException;
 import java.net.URL;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -55,13 +55,11 @@ import org.apache.hadoop.hdfs.Distribute
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
 import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;
+import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.token.Token;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.type.TypeReference;
 import org.junit.AfterClass;
-import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
@@ -246,56 +244,68 @@ public class TestOfflineImageViewer {
   }
 
   @Test
-  public void testWebImageViewer() throws IOException, InterruptedException {
+  public void testWebImageViewer() throws IOException, InterruptedException,
+      URISyntaxException {
     WebImageViewer viewer = new WebImageViewer(
         NetUtils.createSocketAddr("localhost:0"));
     try {
       viewer.initServer(originalFsimage.getAbsolutePath());
       int port = viewer.getPort();
 
-      // 1. LISTSTATUS operation to a valid path
-      URL url = new URL("http://localhost:" + port + "/?op=LISTSTATUS");
+      // create a WebHdfsFileSystem instance
+      URI uri = new URI("webhdfs://localhost:" + String.valueOf(port));
+      Configuration conf = new Configuration();
+      WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)FileSystem.get(uri, conf);
+
+      // verify the number of directories
+      FileStatus[] statuses = webhdfs.listStatus(new Path("/"));
+      assertEquals(NUM_DIRS, statuses.length);
+
+      // verify the number of files in the directory
+      statuses = webhdfs.listStatus(new Path("/dir0"));
+      assertEquals(FILES_PER_DIR, statuses.length);
+
+      // compare a file
+      FileStatus status = webhdfs.listStatus(new Path("/dir0/file0"))[0];
+      FileStatus expected = writtenFiles.get("/dir0/file0");
+      assertEquals(expected.getAccessTime(), status.getAccessTime());
+      assertEquals(expected.getBlockSize(), status.getBlockSize());
+      assertEquals(expected.getGroup(), status.getGroup());
+      assertEquals(expected.getLen(), status.getLen());
+      assertEquals(expected.getModificationTime(),
+                   status.getModificationTime());
+      assertEquals(expected.getOwner(), status.getOwner());
+      assertEquals(expected.getPermission(), status.getPermission());
+      assertEquals(expected.getReplication(), status.getReplication());
+      assertEquals(expected.isDirectory(), status.isDirectory());
+
+      // LISTSTATUS operation to a invalid path
+      URL url = new URL("http://localhost:" + port +
+                    "/webhdfs/v1/invalid/?op=LISTSTATUS");
       HttpURLConnection connection = (HttpURLConnection) url.openConnection();
       connection.setRequestMethod("GET");
       connection.connect();
-      assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode());
-      assertEquals("application/json", connection.getContentType());
-
-      String content = org.apache.commons.io.IOUtils.toString(
-          connection.getInputStream());
-      LOG.info("content: " + content);
-
-      // verify the number of directories listed
-      ObjectMapper mapper = new ObjectMapper();
-      Map<String, Map<String, List<Map<String, Object>>>> fileStatuses
=
-          mapper.readValue(content, new TypeReference
-          <Map<String, Map<String, List<Map<String, Object>>>>>(){});
-      List<Map<String, Object>> fileStatusList = fileStatuses
-          .get("FileStatuses").get("FileStatus");
-      assertEquals(NUM_DIRS, fileStatusList.size());
-
-      // verify the number of files in a directory
-      Map<String, Object> fileStatusMap = fileStatusList.get(0);
-      assertEquals(FILES_PER_DIR, fileStatusMap.get("childrenNum"));
+      assertEquals(HttpURLConnection.HTTP_NOT_FOUND,
+                   connection.getResponseCode());
 
-      // 2. LISTSTATUS operation to a invalid path
-      url = new URL("http://localhost:" + port + "/invalid/?op=LISTSTATUS");
+      // LISTSTATUS operation to a invalid prefix
+      url = new URL("http://localhost:" + port + "/webhdfs/v1?op=LISTSTATUS");
       connection = (HttpURLConnection) url.openConnection();
       connection.setRequestMethod("GET");
       connection.connect();
       assertEquals(HttpURLConnection.HTTP_NOT_FOUND,
                    connection.getResponseCode());
 
-      // 3. invalid operation
-      url = new URL("http://localhost:" + port + "/?op=INVALID");
+      // invalid operation
+      url = new URL("http://localhost:" + port + "/webhdfs/v1/?op=INVALID");
       connection = (HttpURLConnection) url.openConnection();
       connection.setRequestMethod("GET");
       connection.connect();
       assertEquals(HttpURLConnection.HTTP_BAD_REQUEST,
           connection.getResponseCode());
 
-      // 4. invalid method
-      url = new URL("http://localhost:" + port + "/?op=LISTSTATUS");
+      // invalid method
+      url = new URL("http://localhost:" + port + "/webhdfs/v1/?op=LISTSTATUS");
       connection = (HttpURLConnection) url.openConnection();
       connection.setRequestMethod("POST");
       connection.connect();



Mime
View raw message