Return-Path: X-Original-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id CAEB511655 for ; Tue, 8 Apr 2014 18:16:24 +0000 (UTC) Received: (qmail 2617 invoked by uid 500); 8 Apr 2014 18:16:23 -0000 Delivered-To: apmail-hadoop-hdfs-commits-archive@hadoop.apache.org Received: (qmail 2558 invoked by uid 500); 8 Apr 2014 18:16:21 -0000 Mailing-List: contact hdfs-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hdfs-dev@hadoop.apache.org Delivered-To: mailing list hdfs-commits@hadoop.apache.org Received: (qmail 2542 invoked by uid 99); 8 Apr 2014 18:16:19 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 08 Apr 2014 18:16:19 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 08 Apr 2014 18:16:16 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 90F3323888FE; Tue, 8 Apr 2014 18:15:53 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1585804 - in /hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ Date: Tue, 08 Apr 2014 18:15:53 -0000 To: hdfs-commits@hadoop.apache.org From: wheat9@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20140408181553.90F3323888FE@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: wheat9 Date: Tue Apr 8 18:15:52 2014 New Revision: 1585804 URL: http://svn.apache.org/r1585804 Log: HDFS-6169. Merge r1585802 from trunk. Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1585804&r1=1585803&r2=1585804&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Tue Apr 8 18:15:52 2014 @@ -72,6 +72,8 @@ Release 2.5.0 - UNRELEASED HDFS-6143. WebHdfsFileSystem open should throw FileNotFoundException for non-existing paths. (Gera Shegalov via wheat9) + HDFS-6169. Move the address in WebImageViewer. (Akira Ajisaka via wheat9) + Release 2.4.1 - UNRELEASED INCOMPATIBLE CHANGES Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java?rev=1585804&r1=1585803&r2=1585804&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java Tue Apr 8 18:15:52 2014 @@ -17,7 +17,10 @@ */ package org.apache.hadoop.hdfs.tools.offlineImageViewer; +import java.io.FileNotFoundException; import java.io.IOException; +import java.util.List; +import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -48,48 +51,81 @@ public class FSImageHandler extends Simp @Override public void messageReceived( ChannelHandlerContext ctx, MessageEvent e) throws Exception { + String op = getOp(e); + try { + String path = getPath(e); + handleOperation(op, path, e); + } catch (Exception ex) { + notFoundResponse(e); + LOG.warn(ex.getMessage()); + } finally { + e.getFuture().addListener(ChannelFutureListener.CLOSE); + } + } + + /** return the op parameter in upper case */ + private String getOp(MessageEvent e) { + Map> parameters = getDecoder(e).getParameters(); + if (parameters.containsKey("op")) { + return parameters.get("op").get(0).toUpperCase(); + } else { + // return "" to avoid NPE + return ""; + } + } + + private String getPath(MessageEvent e) throws FileNotFoundException { + String path = getDecoder(e).getPath(); + // trim "/webhdfs/v1" to keep compatibility with WebHDFS API + if (path.startsWith("/webhdfs/v1/")) { + return path.replaceFirst("/webhdfs/v1", ""); + } else { + throw new FileNotFoundException("Path: " + path + " should " + + "start with \"/webhdfs/v1/\""); + } + } + + private QueryStringDecoder getDecoder(MessageEvent e) { HttpRequest request = (HttpRequest) e.getMessage(); - if (request.getMethod() == HttpMethod.GET){ - String uri = request.getUri(); - QueryStringDecoder decoder = new QueryStringDecoder(uri); + return new QueryStringDecoder(request.getUri()); + } - String op = "null"; - if (decoder.getParameters().containsKey("op")) { - op = decoder.getParameters().get("op").get(0).toUpperCase(); - } - HttpResponse response = new DefaultHttpResponse( - HttpVersion.HTTP_1_1, HttpResponseStatus.OK); - String json = null; + private void handleOperation(String op, String path, MessageEvent e) + throws IOException { + HttpRequest request = (HttpRequest) e.getMessage(); + HttpResponse response = new DefaultHttpResponse( + HttpVersion.HTTP_1_1, HttpResponseStatus.OK); + response.setHeader(HttpHeaders.Names.CONTENT_TYPE, + "application/json"); + String content = null; + if (request.getMethod() == HttpMethod.GET){ if (op.equals("LISTSTATUS")) { - try { - json = loader.listStatus(decoder.getPath()); - response.setStatus(HttpResponseStatus.OK); - response.setHeader(HttpHeaders.Names.CONTENT_TYPE, - "application/json"); - HttpHeaders.setContentLength(response, json.length()); - } catch (Exception ex) { - LOG.warn(ex.getMessage()); - response.setStatus(HttpResponseStatus.NOT_FOUND); - } + content = loader.listStatus(path); } else { response.setStatus(HttpResponseStatus.BAD_REQUEST); } - - e.getChannel().write(response); - if (json != null) { - e.getChannel().write(json); - } - LOG.info(response.getStatus().getCode() + " method=GET op=" + op - + " target=" + decoder.getPath()); } else { // only HTTP GET is allowed since fsimage is read-only. - HttpResponse response = new DefaultHttpResponse(HttpVersion.HTTP_1_1, - HttpResponseStatus.METHOD_NOT_ALLOWED); - e.getChannel().write(response); - LOG.info(response.getStatus().getCode() + " method=" - + request.getMethod().getName()); + response.setStatus(HttpResponseStatus.METHOD_NOT_ALLOWED); + } + + if (content != null) { + HttpHeaders.setContentLength(response, content.length()); + } + e.getChannel().write(response); + + if (content != null) { + e.getChannel().write(content); } - e.getFuture().addListener(ChannelFutureListener.CLOSE); + + LOG.info(response.getStatus().getCode() + " method=" + + request.getMethod().getName() + " op=" + op + " target=" + path); + } + + private void notFoundResponse(MessageEvent e) { + HttpResponse response = new DefaultHttpResponse( + HttpVersion.HTTP_1_1, HttpResponseStatus.NOT_FOUND); + e.getChannel().write(response); } } Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java?rev=1585804&r1=1585803&r2=1585804&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java Tue Apr 8 18:15:52 2014 @@ -271,7 +271,7 @@ public class FSImageLoader { long id = INodeId.ROOT_INODE_ID; for (int i = 1; i < nameList.length; i++) { long[] children = dirmap.get(id); - Preconditions.checkNotNull(children, "The specified path: " + + Preconditions.checkNotNull(children, "File: " + strPath + " is not found in the fsimage."); String cName = nameList[i]; boolean findChildren = false; @@ -282,7 +282,7 @@ public class FSImageLoader { break; } } - Preconditions.checkArgument(findChildren, "The specified path: " + + Preconditions.checkArgument(findChildren, "File: " + strPath + " is not found in the fsimage."); } return id; Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java?rev=1585804&r1=1585803&r2=1585804&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java Tue Apr 8 18:15:52 2014 @@ -29,12 +29,12 @@ import java.io.RandomAccessFile; import java.io.StringReader; import java.io.StringWriter; import java.net.HttpURLConnection; +import java.net.URI; +import java.net.URISyntaxException; import java.net.URL; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; -import java.util.List; -import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -55,13 +55,11 @@ import org.apache.hadoop.hdfs.Distribute import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction; import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil; +import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.token.Token; -import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.type.TypeReference; import org.junit.AfterClass; -import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; @@ -246,56 +244,68 @@ public class TestOfflineImageViewer { } @Test - public void testWebImageViewer() throws IOException, InterruptedException { + public void testWebImageViewer() throws IOException, InterruptedException, + URISyntaxException { WebImageViewer viewer = new WebImageViewer( NetUtils.createSocketAddr("localhost:0")); try { viewer.initServer(originalFsimage.getAbsolutePath()); int port = viewer.getPort(); - // 1. LISTSTATUS operation to a valid path - URL url = new URL("http://localhost:" + port + "/?op=LISTSTATUS"); + // create a WebHdfsFileSystem instance + URI uri = new URI("webhdfs://localhost:" + String.valueOf(port)); + Configuration conf = new Configuration(); + WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)FileSystem.get(uri, conf); + + // verify the number of directories + FileStatus[] statuses = webhdfs.listStatus(new Path("/")); + assertEquals(NUM_DIRS, statuses.length); + + // verify the number of files in the directory + statuses = webhdfs.listStatus(new Path("/dir0")); + assertEquals(FILES_PER_DIR, statuses.length); + + // compare a file + FileStatus status = webhdfs.listStatus(new Path("/dir0/file0"))[0]; + FileStatus expected = writtenFiles.get("/dir0/file0"); + assertEquals(expected.getAccessTime(), status.getAccessTime()); + assertEquals(expected.getBlockSize(), status.getBlockSize()); + assertEquals(expected.getGroup(), status.getGroup()); + assertEquals(expected.getLen(), status.getLen()); + assertEquals(expected.getModificationTime(), + status.getModificationTime()); + assertEquals(expected.getOwner(), status.getOwner()); + assertEquals(expected.getPermission(), status.getPermission()); + assertEquals(expected.getReplication(), status.getReplication()); + assertEquals(expected.isDirectory(), status.isDirectory()); + + // LISTSTATUS operation to a invalid path + URL url = new URL("http://localhost:" + port + + "/webhdfs/v1/invalid/?op=LISTSTATUS"); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.connect(); - assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode()); - assertEquals("application/json", connection.getContentType()); - - String content = org.apache.commons.io.IOUtils.toString( - connection.getInputStream()); - LOG.info("content: " + content); - - // verify the number of directories listed - ObjectMapper mapper = new ObjectMapper(); - Map>>> fileStatuses = - mapper.readValue(content, new TypeReference - >>>>(){}); - List> fileStatusList = fileStatuses - .get("FileStatuses").get("FileStatus"); - assertEquals(NUM_DIRS, fileStatusList.size()); - - // verify the number of files in a directory - Map fileStatusMap = fileStatusList.get(0); - assertEquals(FILES_PER_DIR, fileStatusMap.get("childrenNum")); + assertEquals(HttpURLConnection.HTTP_NOT_FOUND, + connection.getResponseCode()); - // 2. LISTSTATUS operation to a invalid path - url = new URL("http://localhost:" + port + "/invalid/?op=LISTSTATUS"); + // LISTSTATUS operation to a invalid prefix + url = new URL("http://localhost:" + port + "/webhdfs/v1?op=LISTSTATUS"); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.connect(); assertEquals(HttpURLConnection.HTTP_NOT_FOUND, connection.getResponseCode()); - // 3. invalid operation - url = new URL("http://localhost:" + port + "/?op=INVALID"); + // invalid operation + url = new URL("http://localhost:" + port + "/webhdfs/v1/?op=INVALID"); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.connect(); assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, connection.getResponseCode()); - // 4. invalid method - url = new URL("http://localhost:" + port + "/?op=LISTSTATUS"); + // invalid method + url = new URL("http://localhost:" + port + "/webhdfs/v1/?op=LISTSTATUS"); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("POST"); connection.connect();